diff --git a/.DS_Store b/.DS_Store index de9e8ce..a7068f3 100644 Binary files a/.DS_Store and b/.DS_Store differ diff --git a/README.md b/README.md index 5b29076..c89a159 100644 --- a/README.md +++ b/README.md @@ -36,7 +36,7 @@ Please check the GPT-3 Folder under src/gpt3/ for how to train the model. -### OpenAI Training +### OpenAI GPT-3 Fine Tuning OpenAI uses their own platform for fine tuning. We use command line application called `openai` to train the model. The command line application is available in the link below. @@ -104,6 +104,33 @@ openai api completions.create -m ada:ft-personal- -p ``` +### GPT-NEO Model + +The GPT-Neo training file is located on [gpt_neo.ipynb](./src/models/gpt_neo/gpt_neo.ipynb) +The evaluation file is located on [gpt_neo_eval.ipynb](./src/models/gpt_neo/gptNEO_eval.ipynb), [neo_process.ipynb](./src/models/gpt_neo/neo_process.ipynb) and [neo_process_eval.ipynb](./src/models/gpt_neo/neo_process_eval.ipynb) + +The models weights and all the information can be found at: +``` +https://drive.google.com/drive/folders/16A34CAxlvwBXNH733zFESogzx8WeA1ty?usp=sharing +``` + +We have also uploaded all the code + +### ReRanker Training + +The ReRanker training file is located on [reranker_train.ipynb](./src/models/reranker/reranker-train.ipynb) +The ReRanker evaluation is located on [gpt_neo_eval.ipynb](./src/models/reranker/reranker-show.ipynb) + + +# Results + +In our process, we started by evaluating our model on MathBERT, based on our initial studies we have a loss of 3.15 and an training accuracy of 0.13 on our test set. Accuracy here refers the getting the tactic right. We had training loss as 3.164 and a loss of accuracy of 0.14 on the validation set. + +To better see our process, please take a look at Figure 1 and Figure 2. Given the low accuracy of MathBERT, we selected GPT-3 and GPT-NEO to further fine-tune with the tactic proof dataset we selected. The training accuracy displayed below?? shows the GPT-adaMath model we trained achieved the highest accuracy of 36.27%. GPT-3 training result is more refined and have more similar queries while GPT- neo on average have worse accuracy but the result have high variance. We come up with the intuition to combine both to leverage the variance, at at the same time apply re-ranker to select the first 6 queries to improve the frequency. The combined model end up getting the highest accuracy of 45.92% compare to the benchmark from GPT-f with accuracy 32.2%. + + + + Project Organization ------------ @@ -116,7 +143,6 @@ Project Organization │   ├── processed <- The final, canonical data sets for modeling. │   └── raw <- The original, immutable data dump. │ - ├── docs <- A default Sphinx project; see sphinx-doc.org for details │ ├── models <- Trained and serialized models, model predictions, or model summaries │ @@ -136,19 +162,12 @@ Project Organization ├── src <- Source code for use in this project. │   ├── __init__.py <- Makes src a Python module │ │ - │   ├── data <- Scripts to download or generate data - │   │   └── make_dataset.py - │ │ - │   ├── features <- Scripts to turn raw data into features for modeling - │   │   └── build_features.py │ │ │   ├── models <- Scripts to train models and then use trained models to make │ │ │ predictions │   │   ├── predict_model.py │   │   └── train_model.py │ │ - │   └── visualization <- Scripts to create exploratory and results oriented visualizations - │   └── visualize.py │ └── tox.ini <- tox file with settings for running tox; see tox.readthedocs.io diff --git a/data/.DS_Store b/data/.DS_Store index bf200cb..bf98084 100644 Binary files a/data/.DS_Store and b/data/.DS_Store differ diff --git a/mathproof/.gitignore b/mathproof/.gitignore deleted file mode 100644 index d7c9832..0000000 --- a/mathproof/.gitignore +++ /dev/null @@ -1,89 +0,0 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] - -# C extensions -*.so - -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -*.egg-info/ -.installed.cfg -*.egg - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover - -# Translations -*.mo -*.pot - -# Django stuff: -*.log - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -# DotEnv configuration -.env - -# Database -*.db -*.rdb - -# Pycharm -.idea - -# VS Code -.vscode/ - -# Spyder -.spyproject/ - -# Jupyter NB Checkpoints -.ipynb_checkpoints/ - -# exclude data from source control by default -/data/ - -# Mac OS-specific storage files -.DS_Store - -# vim -*.swp -*.swo - -# Mypy cache -.mypy_cache/ diff --git a/models/.gitkeep b/models/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/notebooks/.gitkeep b/notebooks/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/references/.gitkeep b/references/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/reports/deep_learning_final.pdf b/reports/deep_learning_final.pdf new file mode 100644 index 0000000..a9f719e Binary files /dev/null and b/reports/deep_learning_final.pdf differ diff --git a/reports/figures/Screen Shot 2022-04-12 at 9.13.10 PM.png b/reports/figures/Screen Shot 2022-04-12 at 9.13.10 PM.png new file mode 100644 index 0000000..23afdc4 Binary files /dev/null and b/reports/figures/Screen Shot 2022-04-12 at 9.13.10 PM.png differ diff --git a/reports/figures/Screen Shot 2022-04-19 at 9.38.41 PM.png b/reports/figures/Screen Shot 2022-04-19 at 9.38.41 PM.png new file mode 100644 index 0000000..37606ea Binary files /dev/null and b/reports/figures/Screen Shot 2022-04-19 at 9.38.41 PM.png differ diff --git a/reports/figures/Screen Shot 2022-04-19 at 9.39.35 PM.png b/reports/figures/Screen Shot 2022-04-19 at 9.39.35 PM.png new file mode 100644 index 0000000..22f2c5d Binary files /dev/null and b/reports/figures/Screen Shot 2022-04-19 at 9.39.35 PM.png differ diff --git a/reports/figures/Screen Shot 2022-04-19 at 9.40.01 PM.png b/reports/figures/Screen Shot 2022-04-19 at 9.40.01 PM.png new file mode 100644 index 0000000..8bee8cb Binary files /dev/null and b/reports/figures/Screen Shot 2022-04-19 at 9.40.01 PM.png differ diff --git a/reports/figures/Screen Shot 2022-04-20 at 10.03.40 PM.png b/reports/figures/Screen Shot 2022-04-20 at 10.03.40 PM.png new file mode 100644 index 0000000..40e7107 Binary files /dev/null and b/reports/figures/Screen Shot 2022-04-20 at 10.03.40 PM.png differ diff --git a/reports/figures/Screen Shot 2022-04-20 at 10.14.54 PM.png b/reports/figures/Screen Shot 2022-04-20 at 10.14.54 PM.png new file mode 100644 index 0000000..58d96b0 Binary files /dev/null and b/reports/figures/Screen Shot 2022-04-20 at 10.14.54 PM.png differ diff --git a/reports/figures/Screen Shot 2022-04-20 at 8.23.31 PM.png b/reports/figures/Screen Shot 2022-04-20 at 8.23.31 PM.png new file mode 100644 index 0000000..af515eb Binary files /dev/null and b/reports/figures/Screen Shot 2022-04-20 at 8.23.31 PM.png differ diff --git a/reports/figures/Screen Shot 2022-04-20 at 8.23.44 PM.png b/reports/figures/Screen Shot 2022-04-20 at 8.23.44 PM.png new file mode 100644 index 0000000..aa939ec Binary files /dev/null and b/reports/figures/Screen Shot 2022-04-20 at 8.23.44 PM.png differ diff --git a/reports/figures/Screen Shot 2022-04-20 at 9.31.58 PM.png b/reports/figures/Screen Shot 2022-04-20 at 9.31.58 PM.png new file mode 100644 index 0000000..53c549f Binary files /dev/null and b/reports/figures/Screen Shot 2022-04-20 at 9.31.58 PM.png differ diff --git a/src/.DS_Store b/src/.DS_Store index 0d2ece9..f48a845 100644 Binary files a/src/.DS_Store and b/src/.DS_Store differ diff --git a/src/data/.gitkeep b/src/data/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/src/data/__init__.py b/src/data/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/data/make_dataset.py b/src/data/make_dataset.py deleted file mode 100644 index 96b377a..0000000 --- a/src/data/make_dataset.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -import click -import logging -from pathlib import Path -from dotenv import find_dotenv, load_dotenv - - -@click.command() -@click.argument('input_filepath', type=click.Path(exists=True)) -@click.argument('output_filepath', type=click.Path()) -def main(input_filepath, output_filepath): - """ Runs data processing scripts to turn raw data from (../raw) into - cleaned data ready to be analyzed (saved in ../processed). - """ - logger = logging.getLogger(__name__) - logger.info('making final data set from raw data') - - -if __name__ == '__main__': - log_fmt = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' - logging.basicConfig(level=logging.INFO, format=log_fmt) - - # not used in this stub but often useful for finding various files - project_dir = Path(__file__).resolve().parents[2] - - # find .env automagically by walking up directories until it's found, then - # load up the .env entries as environment variables - load_dotenv(find_dotenv()) - - main() diff --git a/src/features/.gitkeep b/src/features/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/src/features/__init__.py b/src/features/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/features/build_features.py b/src/features/build_features.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/models/.DS_Store b/src/models/.DS_Store new file mode 100644 index 0000000..be59572 Binary files /dev/null and b/src/models/.DS_Store differ diff --git a/src/models/gpt_neo/gptNEO_eval.ipynb b/src/models/gpt_neo/gptNEO_eval.ipynb new file mode 100644 index 0000000..4fbbd1f --- /dev/null +++ b/src/models/gpt_neo/gptNEO_eval.ipynb @@ -0,0 +1,1447 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "id": "37b416dd", + "metadata": {}, + "outputs": [], + "source": [ + "import glob" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "97243d38", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['json_data_0.json', 'json_data__hard_ones.json', 'json_data_1.json']" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "glob.glob(\"*.json\")" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "da4ab5e8", + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "\n", + "with open('json_data_0.json', 'r') as json_file:\n", + " json_list = [json.loads(i) for i in list(json_file)]" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "a1cb89f2", + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "\n", + "with open('json_data_1.json', 'r') as json_file:\n", + " json_list2 = [json.loads(i) for i in list(json_file)]" + ] + }, + { + "cell_type": "code", + "execution_count": 105, + "id": "96c299f2", + "metadata": {}, + "outputs": [], + "source": [ + "final_data = json_list[0] + json_list2[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 106, + "id": "aa8c2a9d", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "7128" + ] + }, + "execution_count": 106, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(final_data)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "4c5055cf", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "7128" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(final_data)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "55f25b6f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'results': [' subst n\\n',\n", + " ' dsimp [cons, vector.cons, list.cons]\\n',\n", + " ' rw [list.cons_append, list.cons_erase n]; by_cases n = 0; [ rw [list.cons_erase n, to_list_cons, list.to_list_cons] ]\\n',\n", + " ' subst n\\n',\n", + " ' subst n\\n',\n", + " ' dsimp [list.cons]\\n',\n", + " ' dsimp [cons]\\n',\n", + " ' dsimp [cons, to_list]\\n',\n", + " ' dsimp [to_list]; rw [list.cons_head, cons_nth]\\n',\n", + " ' subst n\\n',\n", + " ' subst n\\n',\n", + " ' dsimp [cons]\\n',\n", + " ' dsimp [list.to_list, list.cons]; simp [list.cons, list.to_list, list.nil_cons]\\n',\n", + " ' subst n\\n',\n", + " ' rw [← list.cons_head]\\n'],\n", + " 'prompt': 'GOAL α : Type u,\\tn : ℕ,\\ta : α,\\tv : vector α n\\t⊢ (vector.cons a v).to_list = a :: v.to_list\\n PROOFSTEP ',\n", + " 'completion': ' cases v\\n'}" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "final_data[10]" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "id": "813bdd55", + "metadata": {}, + "outputs": [], + "source": [ + "each = final_data[2]" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "id": "bb4aae67", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'results': [\" erw [read, array.read', h, array.read', h]\\n\",\n", + " ' conv_rhs { rw ←array.read ⟨b_fst, b_snd⟩ }\\n',\n", + " \" dsimp [read']\\n\",\n", + " \" rw [read', array.read_eq_nil_of_ne b_snd h]\\n\",\n", + " ' rw [read₂, h]\\n',\n", + " ' subst i\\n',\n", + " \" dsimp [read']\\n\",\n", + " \" rw [read', array_read', le_antisymm h]\\n\",\n", + " ' subst h\\n',\n", + " ' dsimp [array.read]\\n',\n", + " ' refl\\n',\n", + " \" simp [read', array, h]\\n\",\n", + " ' subst i\\n',\n", + " ' subst i\\n',\n", + " ' dsimp [read]\\n'],\n", + " 'prompt': \"GOAL α : Type u,\\t_inst_1 : inhabited α,\\ti b_fst : ℕ,\\tb_snd : array b_fst α,\\th : i < buffer.size ⟨b_fst, b_snd⟩\\t⊢ buffer.read ⟨b_fst, b_snd⟩ ⟨i, h⟩ = buffer.read' ⟨b_fst, b_snd⟩ i\\n PROOFSTEP \",\n", + " 'completion': \" unfold read read'\\n\"}" + ] + }, + "execution_count": 52, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "each\n" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "id": "b376ba85", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'best_match': \"erw [read, array.read', h, array.read', h]\",\n", + " 'score': 0.0,\n", + " 'real': \"unfold read read'\",\n", + " 'all': [\"erw [read, array.read', h, array.read', h]\",\n", + " 'refl',\n", + " 'subst i',\n", + " 'subst h',\n", + " \"rw [read', array_read', le_antisymm h]\",\n", + " \"dsimp [read']\",\n", + " 'dsimp [read]',\n", + " 'conv_rhs { rw ←array.read ⟨b_fst, b_snd⟩ }',\n", + " \"rw [read', array.read_eq_nil_of_ne b_snd h]\",\n", + " 'rw [read₂, h]',\n", + " 'dsimp [array.read]',\n", + " \"simp [read', array, h]\"],\n", + " 'prompt': \"GOAL α : Type u,\\t_inst_1 : inhabited α,\\ti b_fst : ℕ,\\tb_snd : array b_fst α,\\th : i < buffer.size ⟨b_fst, b_snd⟩\\t⊢ buffer.read ⟨b_fst, b_snd⟩ ⟨i, h⟩ = buffer.read' ⟨b_fst, b_snd⟩ i\\n PROOFSTEP \"}" + ] + }, + "execution_count": 53, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "each_one(each)" + ] + }, + { + "cell_type": "code", + "execution_count": 72, + "id": "469050dd", + "metadata": {}, + "outputs": [], + "source": [ + "def each_one(each):\n", + " resultiii = each[\"results\"]\n", + " resii = list(set([j.strip() for i in resultiii for j in i.replace(\"PROOFSTEP\",\"\\n\").replace(';', '\\n').split(\"\\n\") if not(j.isupper() or j.strip()==\"\")]))\n", + " result_real = each[\"completion\"].replace(\"PROOFSTEP\",\"\\n\").replace(';', '\\n').split(\"\\n\")[0].split()\n", + " length_result = len(result_real) \n", + " resi_length = [len(set(i.split()).intersection(result_real)) for i in resii]\n", + " maximum = max(resi_length) if resi_length!=[] else 0\n", + " return { \"best_match\": resii[resi_length.index(maximum)] if resi_length!=[] else \"\", \"score\":maximum/length_result, \"real\":\" \".join(result_real), \"all\":resii, \"prompt\":each[\"prompt\"]}\n" + ] + }, + { + "cell_type": "code", + "execution_count": 73, + "id": "4d37786f", + "metadata": {}, + "outputs": [], + "source": [ + "# final_data" + ] + }, + { + "cell_type": "code", + "execution_count": 74, + "id": "c5425391", + "metadata": {}, + "outputs": [], + "source": [ + "result_neo= [ each_one(i) for i in final_data]" + ] + }, + { + "cell_type": "code", + "execution_count": 78, + "id": "f9cbc026", + "metadata": {}, + "outputs": [], + "source": [ + "result_neo_c= [ each_one(i)[\"score\"] for i in final_data]" + ] + }, + { + "cell_type": "code", + "execution_count": 79, + "id": "52279afb", + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 80, + "id": "1198411c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0.30759333988765086" + ] + }, + "execution_count": 80, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "np.mean(result_neo_c)" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "id": "e90a2817", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'best_match': 'simp [hb]',\n", + " 'score': 1.0,\n", + " 'real': 'simp [hb]',\n", + " 'all': ['subst a',\n", + " 'aux h',\n", + " 'rw [mul_comm, mul_left_comm]',\n", + " 'exact mul_zero (mul_eq_zero.mp hab) (mul_left_cancel hcd)',\n", + " 'simp [hb]',\n", + " 'hedral_blur : (b * b / (d * d)) = ((a / b) * c / (c * d))',\n", + " 'subst hb',\n", + " 'have hcd1 : ∃ (y : ℕ), c + (d * y) = b',\n", + " 'rw [mul_div_assoc, hb.eq, mul_zero, int.mul_div_cancel_left]',\n", + " 'conv {to_rhs, rw ← hb}',\n", + " 'rw [mul_comm]',\n", + " 'rw [mul_div_assoc, ← mul_assoc (nat.eq_zero_of_dvd_one hb) b, mul_zero, zero_div]',\n", + " 'rw [mem_div_iff hb, zero_mul, zero_mul, mul_zero, zero_mul, mul_zero, zero_add, zero_add]',\n", + " 'hedral_zero_right',\n", + " 'have h₀ : ite (0 = b * d) 0 = a * c / (b * d)'],\n", + " 'prompt': 'GOAL a b c d : ℕ,\\thab : b ∣ a,\\thcd : d ∣ c,\\texi1 : ∃ (x : ℕ), a = b * x,\\texi2 : ∃ (y : ℕ), c = d * y,\\thb : b = 0\\t⊢ a / b * (c / d) = a * c / (b * d)\\n PROOFSTEP '}" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "{ \"best_match\": resii[resi_length.index(maximum)], \"score\":maximum/length_result, \"real\":\" \".join(result_real), \"all\":resii, \"prompt\":each[\"prompt\"]}" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "9ce8910b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['simp', '[hb]']" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "result_real" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "id": "6b3d8a6e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['subst a',\n", + " 'aux h',\n", + " 'rw [mul_comm, mul_left_comm]',\n", + " 'exact mul_zero (mul_eq_zero.mp hab) (mul_left_cancel hcd)',\n", + " 'simp [hb]',\n", + " 'hedral_blur : (b * b / (d * d)) = ((a / b) * c / (c * d))',\n", + " 'subst hb',\n", + " 'have hcd1 : ∃ (y : ℕ), c + (d * y) = b',\n", + " 'rw [mul_div_assoc, hb.eq, mul_zero, int.mul_div_cancel_left]',\n", + " 'conv {to_rhs, rw ← hb}',\n", + " 'rw [mul_comm]',\n", + " 'rw [mul_div_assoc, ← mul_assoc (nat.eq_zero_of_dvd_one hb) b, mul_zero, zero_div]',\n", + " 'rw [mem_div_iff hb, zero_mul, zero_mul, mul_zero, zero_mul, mul_zero, zero_add, zero_add]',\n", + " 'hedral_zero_right',\n", + " 'have h₀ : ite (0 = b * d) 0 = a * c / (b * d)']" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "resii" + ] + }, + { + "cell_type": "code", + "execution_count": 82, + "id": "566c1138", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Reranker\t gdrive\t json_data__hard_ones.json\tonstart.sh\r\n", + "all_gpt-ada.json gptNEO_eval.ipynb neo-Copy1.ipynb\t\treranker.ipynb\r\n", + "class\t\t json_data_0.json neo.ipynb\t\t\tresults\r\n", + "files_upload\t json_data_1.json onstart.log\r\n" + ] + } + ], + "source": [ + "!ls\n" + ] + }, + { + "cell_type": "code", + "execution_count": 88, + "id": "d3ee64fb", + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "\n", + "with open('all_gpt-ada.json', 'r') as json_file:\n", + " json_ada = [json.loads(i) for i in list(json_file)][0]" + ] + }, + { + "cell_type": "code", + "execution_count": 123, + "id": "a3dff8e4", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'best_match': 'cases v',\n", + " 'score': 1.0,\n", + " 'real': 'cases v',\n", + " 'all': ['cases v',\n", + " 'p : list α',\n", + " \"⊢ (vector.to_list v).cases_on' (cons a v) (λ i, id) (λ i\",\n", + " '_match : ∀ (_a _a_1 : list α), _a.length = _a_',\n", + " '_inst_1 : Π (p : Prop), decidable p,\\thv : v.to_list = list.',\n", + " 'append : v.to_list = v.cons a :: v.to_list',\n", + " \"⊢ (vector.to_list v).cases_on' (cons a v) (λ i, by simp)\",\n", + " 'rw to_list_cons'],\n", + " 'prompt': 'GOAL α : Type u,\\tn : ℕ,\\ta : α,\\tv : vector α n\\t⊢ (vector.cons a v).to_list = a :: v.to_list\\n PROOFSTEP '}" + ] + }, + "execution_count": 123, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "json_ada[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 93, + "id": "78cb85d2", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[{'best_match': 'α : Type u,\\t_inst_1 : inhabited α,\\ti b_fst : ℕ,\\tb',\n", + " 'score': 0.0,\n", + " 'real': \"unfold read read'\",\n", + " 'all': ['α : Type u,\\t_inst_1 : inhabited α,\\ti b_fst : ℕ,\\tb',\n", + " 'subst i',\n", + " 'α : Type u,\\t_inst_1 : inhabited α,\\ti b_fst :',\n", + " 'simp',\n", + " 'assumption',\n", + " 'rw [← unfold_coes_eq',\n", + " 'simp [size, h]',\n", + " 'simp [insert_of_mem, h]',\n", + " 'refl',\n", + " 'cases i',\n", + " '⊢ d_vars ⟨',\n", + " 'h'],\n", + " 'prompt': \"GOAL α : Type u,\\t_inst_1 : inhabited α,\\ti b_fst : ℕ,\\tb_snd : array b_fst α,\\th : i < buffer.size ⟨b_fst, b_snd⟩\\t⊢ buffer.read ⟨b_fst, b_snd⟩ ⟨i, h⟩ = buffer.read' ⟨b_fst, b_snd⟩ i\\n PROOFSTEP \"}]" + ] + }, + "execution_count": 93, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "[ i for i in json_ada if each[\"prompt\"]==i[\"prompt\"]]" + ] + }, + { + "cell_type": "code", + "execution_count": 107, + "id": "b3106baf", + "metadata": {}, + "outputs": [], + "source": [ + "modified = []\n", + "for each in final_data:\n", + " f = [ i[\"all\"] for i in json_ada if each[\"prompt\"]==i[\"prompt\"]]\n", + " f_flat = [j for i in f for j in i]\n", + "# print(len(each[\"results\"]))\n", + " each[\"results\"] = list(set(each[\"results\"]+f_flat))\n", + "# print(len(each[\"results\"]))" + ] + }, + { + "cell_type": "code", + "execution_count": 124, + "id": "4614e895", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'results': ['cases i',\n", + " \"apply b.reverse_corec'\",\n", + " 'apply b.append_right_',\n", + " \" rw [array.read_write', array.write'_write, buffer.write_mem, array.write_mem, h]\\n\",\n", + " 'apply b',\n", + " ' delta read\\n',\n", + " 'apply fin',\n", + " \" convert b.read'\\n\",\n", + " ' dsimp only [read, dsimp] at h\\n',\n", + " 'apply_instance',\n", + " ' \\n',\n", + " \"set i := b.reverse_core i with h'\",\n", + " 'apply b.append_left_inj',\n", + " ' dsimp [read]\\n',\n", + " ' subst i\\n',\n", + " '⊢ b.read',\n", + " 'rw [size, h]',\n", + " 'apply mem',\n", + " ' convert read_inj b\\n',\n", + " 'simp [append_right]',\n", + " 'simp [size, h]',\n", + " 'rw [←append_right_inj]',\n", + " 'simp',\n", + " '⊢ b.reverse_',\n", + " ' dsimp [read] at h\\n',\n", + " ' simp! * at *\\n',\n", + " 'set_of_eq_eq_succ_of_eq',\n", + " ' contradiction\\n',\n", + " 'rw [←append_right_inverse_iff, ←',\n", + " \"apply b.read'_eq_of_mem\",\n", + " '_match : ∀ (_a _a_1 :append (list α) ⟨i,',\n", + " 'apply b.append_right',\n", + " 'apply b.append_left_injective',\n", + " '_inst : inhabited α :=',\n", + " 'apply b.reverse_core_eq',\n", + " \"apply b.append_right'\"],\n", + " 'prompt': \"GOAL α : Type u,\\t_inst_1 : inhabited α,\\tb : buffer α,\\ti : ℕ,\\th : i < b.size\\t⊢ b.read ⟨i, h⟩ = b.read' i\\n PROOFSTEP \",\n", + " 'completion': \" cases b; unfold read read'; simp [array.read_eq_read']\\n\"}" + ] + }, + "execution_count": 124, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "final_data[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 109, + "id": "9c20b3d8", + "metadata": {}, + "outputs": [], + "source": [ + "result_neo_new= [ each_one(i) for i in final_data]" + ] + }, + { + "cell_type": "code", + "execution_count": 110, + "id": "24f23046", + "metadata": {}, + "outputs": [], + "source": [ + "result_neo_c_new= [ each_one(i)[\"score\"] for i in final_data]" + ] + }, + { + "cell_type": "code", + "execution_count": 112, + "id": "8246cbf0", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0.4592759403677324" + ] + }, + "execution_count": 112, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "np.mean(result_neo_c_new)" + ] + }, + { + "cell_type": "code", + "execution_count": 117, + "id": "5f7e4bf4", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "18.628226711560046" + ] + }, + "execution_count": 117, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "np.mean([len(i[\"all\"]) for i in result_neo_new])" + ] + }, + { + "cell_type": "code", + "execution_count": 118, + "id": "8f35d406", + "metadata": {}, + "outputs": [], + "source": [ + "# import json\n", + "\n", + "# with open('files_upload/data_test.jsonl', 'r') as json_file:\n", + "# json_list = [json.loads(i) for i in list(final_data)]" + ] + }, + { + "cell_type": "code", + "execution_count": 119, + "id": "688deda9", + "metadata": {}, + "outputs": [], + "source": [ + "with open('all_gpt_combined.json', 'w') as fout:\n", + " json.dump(result_neo_new , fout)" + ] + }, + { + "cell_type": "code", + "execution_count": 122, + "id": "62e4917d", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "400K\tReranker\n", + "6.2M\tall_gpt-ada.json\n", + "11M\tall_gpt_combined.json\n", + "538M\tclass\n", + "562M\tfiles_upload\n", + "7.9M\tgdrive\n", + "20K\tgptNEO_eval.ipynb\n", + "1012K\tjson_data_0.json\n", + "7.9M\tjson_data_1.json\n", + "4.3M\tjson_data__hard_ones.json\n", + "740K\tneo-Copy1.ipynb\n", + "908K\tneo.ipynb\n", + "0\tonstart.log\n", + "4.0K\tonstart.sh\n", + "12K\treranker.ipynb\n", + "11G\tresults\n" + ] + } + ], + "source": [ + "!du -sh *" + ] + }, + { + "cell_type": "code", + "execution_count": 126, + "id": "c1453a31", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'best_match': 'convert read_inj b',\n", + " 'score': 0.5,\n", + " 'real': 'cases b',\n", + " 'all': ['contradiction',\n", + " 'delta read',\n", + " 'subst i',\n", + " 'dsimp only [read, dsimp] at h',\n", + " \"rw [array.read_write', array.write'_write, buffer.write_mem, array.write_mem, h]\",\n", + " 'simp! * at *',\n", + " 'dsimp [read]',\n", + " 'convert read_inj b',\n", + " \"convert b.read'\",\n", + " 'dsimp [read] at h'],\n", + " 'prompt': \"GOAL α : Type u,\\t_inst_1 : inhabited α,\\tb : buffer α,\\ti : ℕ,\\th : i < b.size\\t⊢ b.read ⟨i, h⟩ = b.read' i\\n PROOFSTEP \"}" + ] + }, + "execution_count": 126, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "result_neo[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 127, + "id": "0d36849b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[0m\u001b[01;34mReranker\u001b[0m/ \u001b[01;32mgdrive\u001b[0m* onstart.sh\r\n", + "all_gpt-ada.json gptNEO_eval.ipynb reranker-show.ipynb\r\n", + "all_gpt_combined.json json_data_0.json reranker-train.ipynb\r\n", + "\u001b[01;34mclass\u001b[0m/ json_data_1.json \u001b[01;34mresults\u001b[0m/\r\n", + "\u001b[01;34mdata\u001b[0m/ json_data__hard_ones.json test.json\r\n", + "data.py neo-Copy1.ipynb \u001b[01;34mtmp_trainer\u001b[0m/\r\n", + "data_and_metadata.csv neo.ipynb\r\n", + "\u001b[01;34mfiles_upload\u001b[0m/ onstart.log\r\n" + ] + } + ], + "source": [ + "ls " + ] + }, + { + "cell_type": "code", + "execution_count": 128, + "id": "c9c06f52", + "metadata": {}, + "outputs": [], + "source": [ + "import pandas as pd" + ] + }, + { + "cell_type": "code", + "execution_count": 130, + "id": "9d26ae59", + "metadata": {}, + "outputs": [], + "source": [ + "df = pd.read_csv(\"data_and_metadata.csv\")" + ] + }, + { + "cell_type": "code", + "execution_count": 141, + "id": "17b8b110", + "metadata": {}, + "outputs": [], + "source": [ + "df2=pd.DataFrame(result_neo_new)" + ] + }, + { + "cell_type": "code", + "execution_count": 142, + "id": "9386b6dd", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
best_matchscorerealallprompt
0cases i0.500000cases b[cases i, apply b.reverse_corec', apply b.appe...GOAL α : Type u,\\t_inst_1 : inhabited α,\\tb : ...
1cases i0.500000cases b[cases i, cases b.read_inj h, apply b.reverse_...GOAL α : Type u,\\t_inst_1 : inhabited α,\\tb : ...
2cases i0.000000unfold read read'[cases i, simp [insert_of_mem, h], assumption,...GOAL α : Type u,\\t_inst_1 : inhabited α,\\ti b_...
3simp [show ((b_fst, i) - 1) = i + 1, from fune...0.500000simp [array.read_eq_read'][try {refl}, exact rfl, cases b_snd, refl, sub...GOAL α : Type u,\\t_inst_1 : inhabited α,\\ti b_...
4cases i0.500000cases b[cases i, apply b.reverse_corec', apply b.appe...GOAL α : Type u,\\t_inst_1 : inhabited α,\\tb : ...
..................
7123simp only [f0.285714simp only [B, finset.card_sdiff this, function...[apply nat, bound, { rw [← C_eq], exact nat.lt...GOAL R : Type u,\\tι : Type u',\\tM₁ : ι → Type ...
7124exact0.166667exact nat.pred_lt (ne_of_gt (lt_trans nat.zero...[ik, bound, exact, conv, { rw [card_singleton,...GOAL R : Type u,\\tι : Type u',\\tM₁ : ι → Type ...
7125refine lt0.083333refine finset.sum_lt_sum (λ i hi, finset.card_...[rw C, exact, ⊢ finset., refine lt, rw [finset...GOAL R : Type u,\\tι : Type u',\\tM₁ : ι → Type ...
7126{ rcases Ai_empty with ⟨i, _⟩, simp only [fins...0.087719{ rcases Ai_empty with ⟨i, hi⟩, have : ∑ j in ...[{, { rcases Ai_empty with ⟨i, _⟩, simp only [...GOAL R : Type u,\\tι : Type u',\\tM₁ : ι → Type ...
7127{ intro i, have : (⊥ : finset (α i)).card = n ...0.181818{ assume i, have pos : finset.card (A i) ≠ 0, ...[{ intro i, have : (⊥ : finset (α i)).card = n...GOAL R : Type u,\\tι : Type u',\\tM₁ : ι → Type ...
\n", + "

7128 rows × 5 columns

\n", + "
" + ], + "text/plain": [ + " best_match score \\\n", + "0 cases i 0.500000 \n", + "1 cases i 0.500000 \n", + "2 cases i 0.000000 \n", + "3 simp [show ((b_fst, i) - 1) = i + 1, from fune... 0.500000 \n", + "4 cases i 0.500000 \n", + "... ... ... \n", + "7123 simp only [f 0.285714 \n", + "7124 exact 0.166667 \n", + "7125 refine lt 0.083333 \n", + "7126 { rcases Ai_empty with ⟨i, _⟩, simp only [fins... 0.087719 \n", + "7127 { intro i, have : (⊥ : finset (α i)).card = n ... 0.181818 \n", + "\n", + " real \\\n", + "0 cases b \n", + "1 cases b \n", + "2 unfold read read' \n", + "3 simp [array.read_eq_read'] \n", + "4 cases b \n", + "... ... \n", + "7123 simp only [B, finset.card_sdiff this, function... \n", + "7124 exact nat.pred_lt (ne_of_gt (lt_trans nat.zero... \n", + "7125 refine finset.sum_lt_sum (λ i hi, finset.card_... \n", + "7126 { rcases Ai_empty with ⟨i, hi⟩, have : ∑ j in ... \n", + "7127 { assume i, have pos : finset.card (A i) ≠ 0, ... \n", + "\n", + " all \\\n", + "0 [cases i, apply b.reverse_corec', apply b.appe... \n", + "1 [cases i, cases b.read_inj h, apply b.reverse_... \n", + "2 [cases i, simp [insert_of_mem, h], assumption,... \n", + "3 [try {refl}, exact rfl, cases b_snd, refl, sub... \n", + "4 [cases i, apply b.reverse_corec', apply b.appe... \n", + "... ... \n", + "7123 [apply nat, bound, { rw [← C_eq], exact nat.lt... \n", + "7124 [ik, bound, exact, conv, { rw [card_singleton,... \n", + "7125 [rw C, exact, ⊢ finset., refine lt, rw [finset... \n", + "7126 [{, { rcases Ai_empty with ⟨i, _⟩, simp only [... \n", + "7127 [{ intro i, have : (⊥ : finset (α i)).card = n... \n", + "\n", + " prompt \n", + "0 GOAL α : Type u,\\t_inst_1 : inhabited α,\\tb : ... \n", + "1 GOAL α : Type u,\\t_inst_1 : inhabited α,\\tb : ... \n", + "2 GOAL α : Type u,\\t_inst_1 : inhabited α,\\ti b_... \n", + "3 GOAL α : Type u,\\t_inst_1 : inhabited α,\\ti b_... \n", + "4 GOAL α : Type u,\\t_inst_1 : inhabited α,\\tb : ... \n", + "... ... \n", + "7123 GOAL R : Type u,\\tι : Type u',\\tM₁ : ι → Type ... \n", + "7124 GOAL R : Type u,\\tι : Type u',\\tM₁ : ι → Type ... \n", + "7125 GOAL R : Type u,\\tι : Type u',\\tM₁ : ι → Type ... \n", + "7126 GOAL R : Type u,\\tι : Type u',\\tM₁ : ι → Type ... \n", + "7127 GOAL R : Type u,\\tι : Type u',\\tM₁ : ι → Type ... \n", + "\n", + "[7128 rows x 5 columns]" + ] + }, + "execution_count": 142, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "df2" + ] + }, + { + "cell_type": "code", + "execution_count": 143, + "id": "2dafc56b", + "metadata": {}, + "outputs": [], + "source": [ + "prompt = [i.replace(\"GOAL\",\"\").replace(\"PROOFSTEP\",\"\").strip() for i in df2[\"prompt\"]]" + ] + }, + { + "cell_type": "code", + "execution_count": 144, + "id": "ec65ee63", + "metadata": {}, + "outputs": [], + "source": [ + "df2[\"prompt\"] = prompt" + ] + }, + { + "cell_type": "code", + "execution_count": 145, + "id": "3dc35b66", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Unnamed: 0goal_ppdecl_nameopen_namespacesfilenamelinecolumnproof_keyhuman_tactic_codetactic_classcleaned_goalsplit
00α : Type u,\\n_inst_1 : inhabited α,\\nb : buffe...buffer.read_eq_read'bufferlean/library/data/buffer.lean4930lean/library/data/buffer.lean:49:1cases b; unfold read read'; simp [array.read_e...semicolonα : Type u,\\t_inst_1 : inhabited α,\\tb : buffe...test
11α : Type u,\\n_inst_1 : inhabited α,\\nb : buffe...buffer.read_eq_read'bufferlean/library/data/buffer.lean4911lean/library/data/buffer.lean:49:1cases b; unfold read read'semicolonα : Type u,\\t_inst_1 : inhabited α,\\tb : buffe...test
22α : Type u,\\n_inst_1 : inhabited α,\\ni b_fst :...buffer.read_eq_read'bufferlean/library/data/buffer.lean4913lean/library/data/buffer.lean:49:1unfold read read'namedα : Type u,\\t_inst_1 : inhabited α,\\ti b_fst :...test
33α : Type u,\\n_inst_1 : inhabited α,\\ni b_fst :...buffer.read_eq_read'bufferlean/library/data/buffer.lean4932lean/library/data/buffer.lean:49:1simp [array.read_eq_read']namedα : Type u,\\t_inst_1 : inhabited α,\\ti b_fst :...test
44α : Type u,\\n_inst_1 : inhabited α,\\nb : buffe...buffer.read_eq_read'bufferlean/library/data/buffer.lean494lean/library/data/buffer.lean:49:1cases bnamedα : Type u,\\t_inst_1 : inhabited α,\\tb : buffe...test
.......................................
200025200025α : Type u_1,\\nβ : Type u_2,\\n_inst_1 : unifor...uniform_embedding_def'classical filter topological_space setmathlib/src/topology/uniform_space/uniform_emb...7861mathlib/src/topology/uniform_space/uniform_emb...simp only [uniform_embedding_def, uniform_cont...semicolonα : Type u_1,\\tβ : Type u_2,\\t_inst_1 : unifor...test
200026200026α : Type u_1,\\nβ : Type u_2,\\n_inst_1 : unifor...uniform_embedding_def'classical filter topological_space setmathlib/src/topology/uniform_space/uniform_emb...7863mathlib/src/topology/uniform_space/uniform_emb...exact ⟨λ ⟨I, H⟩, ⟨I, λ s su, (H _).2 ⟨s, su, λ...namedα : Type u_1,\\tβ : Type u_2,\\t_inst_1 : unifor...test
200195200195R : Type u_1,\\nB : Type u_2,\\nF : Type u_3,\\nE...topological_vector_bundle.mem_source_trivializ...topological_vector_bundle bundle topological_v...mathlib/src/topology/vector_bundle.lean12461mathlib/src/topology/vector_bundle.lean:124:1apply mem_base_set_trivialization_atnamedR : Type u_1,\\tB : Type u_2,\\tF : Type u_3,\\tE...test
200196200196R : Type u_1,\\nB : Type u_2,\\nF : Type u_3,\\nE...topological_vector_bundle.mem_source_trivializ...topological_vector_bundle bundle topological_v...mathlib/src/topology/vector_bundle.lean1246mathlib/src/topology/vector_bundle.lean:124:1rw topological_fiber_bundle.trivialization.mem...namedR : Type u_1,\\tB : Type u_2,\\tF : Type u_3,\\tE...test
200197200197R : Type u_1,\\nB : Type u_2,\\nF : Type u_3,\\nE...topological_vector_bundle.mem_source_trivializ...topological_vector_bundle bundle topological_v...mathlib/src/topology/vector_bundle.lean1244mathlib/src/topology/vector_bundle.lean:124:1{ rw topological_fiber_bundle.trivialization.m...solve1R : Type u_1,\\tB : Type u_2,\\tF : Type u_3,\\tE...test
\n", + "

7902 rows × 12 columns

\n", + "
" + ], + "text/plain": [ + " Unnamed: 0 goal_pp \\\n", + "0 0 α : Type u,\\n_inst_1 : inhabited α,\\nb : buffe... \n", + "1 1 α : Type u,\\n_inst_1 : inhabited α,\\nb : buffe... \n", + "2 2 α : Type u,\\n_inst_1 : inhabited α,\\ni b_fst :... \n", + "3 3 α : Type u,\\n_inst_1 : inhabited α,\\ni b_fst :... \n", + "4 4 α : Type u,\\n_inst_1 : inhabited α,\\nb : buffe... \n", + "... ... ... \n", + "200025 200025 α : Type u_1,\\nβ : Type u_2,\\n_inst_1 : unifor... \n", + "200026 200026 α : Type u_1,\\nβ : Type u_2,\\n_inst_1 : unifor... \n", + "200195 200195 R : Type u_1,\\nB : Type u_2,\\nF : Type u_3,\\nE... \n", + "200196 200196 R : Type u_1,\\nB : Type u_2,\\nF : Type u_3,\\nE... \n", + "200197 200197 R : Type u_1,\\nB : Type u_2,\\nF : Type u_3,\\nE... \n", + "\n", + " decl_name \\\n", + "0 buffer.read_eq_read' \n", + "1 buffer.read_eq_read' \n", + "2 buffer.read_eq_read' \n", + "3 buffer.read_eq_read' \n", + "4 buffer.read_eq_read' \n", + "... ... \n", + "200025 uniform_embedding_def' \n", + "200026 uniform_embedding_def' \n", + "200195 topological_vector_bundle.mem_source_trivializ... \n", + "200196 topological_vector_bundle.mem_source_trivializ... \n", + "200197 topological_vector_bundle.mem_source_trivializ... \n", + "\n", + " open_namespaces \\\n", + "0 buffer \n", + "1 buffer \n", + "2 buffer \n", + "3 buffer \n", + "4 buffer \n", + "... ... \n", + "200025 classical filter topological_space set \n", + "200026 classical filter topological_space set \n", + "200195 topological_vector_bundle bundle topological_v... \n", + "200196 topological_vector_bundle bundle topological_v... \n", + "200197 topological_vector_bundle bundle topological_v... \n", + "\n", + " filename line column \\\n", + "0 lean/library/data/buffer.lean 49 30 \n", + "1 lean/library/data/buffer.lean 49 11 \n", + "2 lean/library/data/buffer.lean 49 13 \n", + "3 lean/library/data/buffer.lean 49 32 \n", + "4 lean/library/data/buffer.lean 49 4 \n", + "... ... ... ... \n", + "200025 mathlib/src/topology/uniform_space/uniform_emb... 78 61 \n", + "200026 mathlib/src/topology/uniform_space/uniform_emb... 78 63 \n", + "200195 mathlib/src/topology/vector_bundle.lean 124 61 \n", + "200196 mathlib/src/topology/vector_bundle.lean 124 6 \n", + "200197 mathlib/src/topology/vector_bundle.lean 124 4 \n", + "\n", + " proof_key \\\n", + "0 lean/library/data/buffer.lean:49:1 \n", + "1 lean/library/data/buffer.lean:49:1 \n", + "2 lean/library/data/buffer.lean:49:1 \n", + "3 lean/library/data/buffer.lean:49:1 \n", + "4 lean/library/data/buffer.lean:49:1 \n", + "... ... \n", + "200025 mathlib/src/topology/uniform_space/uniform_emb... \n", + "200026 mathlib/src/topology/uniform_space/uniform_emb... \n", + "200195 mathlib/src/topology/vector_bundle.lean:124:1 \n", + "200196 mathlib/src/topology/vector_bundle.lean:124:1 \n", + "200197 mathlib/src/topology/vector_bundle.lean:124:1 \n", + "\n", + " human_tactic_code tactic_class \\\n", + "0 cases b; unfold read read'; simp [array.read_e... semicolon \n", + "1 cases b; unfold read read' semicolon \n", + "2 unfold read read' named \n", + "3 simp [array.read_eq_read'] named \n", + "4 cases b named \n", + "... ... ... \n", + "200025 simp only [uniform_embedding_def, uniform_cont... semicolon \n", + "200026 exact ⟨λ ⟨I, H⟩, ⟨I, λ s su, (H _).2 ⟨s, su, λ... named \n", + "200195 apply mem_base_set_trivialization_at named \n", + "200196 rw topological_fiber_bundle.trivialization.mem... named \n", + "200197 { rw topological_fiber_bundle.trivialization.m... solve1 \n", + "\n", + " cleaned_goal split \n", + "0 α : Type u,\\t_inst_1 : inhabited α,\\tb : buffe... test \n", + "1 α : Type u,\\t_inst_1 : inhabited α,\\tb : buffe... test \n", + "2 α : Type u,\\t_inst_1 : inhabited α,\\ti b_fst :... test \n", + "3 α : Type u,\\t_inst_1 : inhabited α,\\ti b_fst :... test \n", + "4 α : Type u,\\t_inst_1 : inhabited α,\\tb : buffe... test \n", + "... ... ... \n", + "200025 α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : unifor... test \n", + "200026 α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : unifor... test \n", + "200195 R : Type u_1,\\tB : Type u_2,\\tF : Type u_3,\\tE... test \n", + "200196 R : Type u_1,\\tB : Type u_2,\\tF : Type u_3,\\tE... test \n", + "200197 R : Type u_1,\\tB : Type u_2,\\tF : Type u_3,\\tE... test \n", + "\n", + "[7902 rows x 12 columns]" + ] + }, + "execution_count": 145, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "df = df[df[\"split\"]==\"test\"]\n", + "df" + ] + }, + { + "cell_type": "code", + "execution_count": 146, + "id": "6ee0b5d1", + "metadata": {}, + "outputs": [], + "source": [ + "dfi = df.merge(df2,left_on=\"cleaned_goal\",right_on=\"prompt\")" + ] + }, + { + "cell_type": "code", + "execution_count": 147, + "id": "a8ba1733", + "metadata": {}, + "outputs": [], + "source": [ + "dfi = dfi[dfi[\"filename\"].str.contains(\"mathlib\")]" + ] + }, + { + "cell_type": "code", + "execution_count": 148, + "id": "0c558dda", + "metadata": {}, + "outputs": [], + "source": [ + "dfi[\"topic\"] = [i.split(\"/\")[2] for i in dfi[\"filename\"]]" + ] + }, + { + "cell_type": "code", + "execution_count": 149, + "id": "5f19f7d5", + "metadata": {}, + "outputs": [], + "source": [ + "rsiii =dfi.groupby(\"topic\").mean()[\"score\"].sort_values(ascending=False)" + ] + }, + { + "cell_type": "code", + "execution_count": 150, + "id": "8f156186", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "topic\n", + "control 0.706960\n", + "tactic 0.651040\n", + "computability 0.649710\n", + "testing 0.593750\n", + "algebra 0.573117\n", + "category_theory 0.549169\n", + "algebraic_topology 0.545299\n", + "logic 0.527329\n", + "data 0.501841\n", + "set_theory 0.486266\n", + "algebraic_geometry 0.470193\n", + "deprecated 0.459829\n", + "order 0.435351\n", + "group_theory 0.431248\n", + "number_theory 0.429445\n", + "measure_theory 0.424553\n", + "analysis 0.420037\n", + "linear_algebra 0.406642\n", + "topology 0.406416\n", + "ring_theory 0.397458\n", + "geometry 0.390710\n", + "combinatorics 0.385952\n", + "field_theory 0.381898\n", + "system 0.336364\n", + "probability_theory 0.276866\n", + "dynamics 0.268600\n", + "Name: score, dtype: float64" + ] + }, + "execution_count": 150, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "rsiii" + ] + }, + { + "cell_type": "code", + "execution_count": 151, + "id": "60745841", + "metadata": {}, + "outputs": [], + "source": [ + "rsiii.to_csv(\"gpt_combined.csv\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8cf7a46b", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/models/gpt_neo/gpt_neo.ipynb b/src/models/gpt_neo/gpt_neo.ipynb new file mode 100644 index 0000000..1e4e33f --- /dev/null +++ b/src/models/gpt_neo/gpt_neo.ipynb @@ -0,0 +1,615 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "dee8b62b", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "os.environ['MASTER_ADDR'] = 'localhost'\n", + "os.environ['MASTER_PORT'] = '9994'\n", + "os.environ['RANK'] = \"0\"\n", + "os.environ['LOCAL_RANK'] = \"0\"\n", + "os.environ['WORLD_SIZE'] = \"1\"\n", + "os.environ[\"TOKENIZERS_PARALLELISM\"] = \"false\"" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "da90efac", + "metadata": {}, + "outputs": [], + "source": [ + "from transformers import GPT2Tokenizer, GPTNeoForCausalLM" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "b0824339", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n" + ] + } + ], + "source": [ + "tokenizer = GPT2Tokenizer.from_pretrained(\"EleutherAI/gpt-neo-125M\", \n", + " bos_token=\"<|startoftext|>\",\n", + " eos_token=\"<|endoftext|>\",\n", + " pad_token=\"<|pad|>\")\n", + "# Download the pre-trained GPT-Neo model and transfer it to the GPU\n", + "model = GPTNeoForCausalLM.from_pretrained(\"EleutherAI/gpt-neo-125M\")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "c6b811e0", + "metadata": {}, + "outputs": [], + "source": [ + "model= model.cuda()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "0a5c2592", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Embedding(50259, 768)" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.resize_token_embeddings(len(tokenizer))" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "b52d7da2", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'/workspace'" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pwd" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "3eda9f60", + "metadata": {}, + "outputs": [], + "source": [ + "DATA_DIRECTORY = \"workspace/lean_proof_recording/data\"" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "1b1a169e", + "metadata": {}, + "outputs": [], + "source": [ + "with open(\"workspace/lean_proof_recording/data/cleaned_training_data/train.src\") as f:\n", + " train_src = f.readlines()" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "1bee10d2", + "metadata": {}, + "outputs": [], + "source": [ + "with open(\"workspace/lean_proof_recording/data/cleaned_training_data/train.tgt\") as f:\n", + " train_tgt = f.readlines()" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "5b68200e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "data_and_metadata.csv test.src train.names valid.index valid.tgt\r\n", + "test.index test.tgt train.src valid.names\r\n", + "test.names train.index train.tgt valid.src\r\n" + ] + } + ], + "source": [ + "ls workspace/lean_proof_recording/data/cleaned_training_data" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "7962a7cf", + "metadata": {}, + "outputs": [], + "source": [ + "with open(\"workspace/lean_proof_recording/data/cleaned_training_data/valid.tgt\") as f:\n", + " val_tgt = f.readlines()" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "04e68a8c", + "metadata": {}, + "outputs": [], + "source": [ + "with open(\"workspace/lean_proof_recording/data/cleaned_training_data/valid.src\") as f:\n", + " val_src = f.readlines()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "fccfc8a2", + "metadata": {}, + "outputs": [], + "source": [ + "with open(\"workspace/lean_proof_recording/data/cleaned_training_data/test.src\") as f:\n", + " test_src = f.readlines()" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "1e061617", + "metadata": {}, + "outputs": [], + "source": [ + "with open(\"workspace/lean_proof_recording/data/cleaned_training_data/test.tgt\") as f:\n", + " test_tgt = f.readlines()" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "1ed37b6e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "7291" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(test_tgt)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "25015d8e", + "metadata": {}, + "outputs": [], + "source": [ + "import torch" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "b2329888", + "metadata": {}, + "outputs": [], + "source": [ + "from torch.utils.data import Dataset\n", + "class TacticDataset(Dataset):\n", + " def __init__(self, txt_list,tgt_list, tokenizer):\n", + " self.input_ids = []\n", + " self.attn_masks = []\n", + " self.labels = []\n", + " for txt,txt2 in zip(txt_list,tgt_list):\n", + " # Encode the descriptions using the GPT-Neo tokenizer\n", + " encodings_dict = tokenizer('<|startoftext|>'\n", + " + ' ' + txt + ' ' + txt2+ \n", + " '<|endoftext|>',\n", + " truncation=True,\n", + " padding=\"max_length\")\n", + " \n", + " input_ids = torch.tensor(encodings_dict['input_ids']) \n", + " self.input_ids.append(input_ids)\n", + " mask = torch.tensor(encodings_dict['attention_mask'])\n", + " self.attn_masks.append(mask)\n", + "# break\n", + " def __len__(self):\n", + " return len(self.input_ids)\n", + " def __getitem__(self, idx):\n", + " return self.input_ids[idx], self.attn_masks[idx]" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "5d62012c", + "metadata": {}, + "outputs": [], + "source": [ + "# del val_src" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "856a11b4", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "13" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import gc\n", + "gc.collect()" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "11508122", + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "validDataset = TacticDataset(val_src,val_tgt,tokenizer)\n", + "testDataset = TacticDataset(test_src,test_tgt,tokenizer)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "b0bb272c", + "metadata": {}, + "outputs": [], + "source": [ + "trainDataset = TacticDataset(train_src,train_tgt,tokenizer)" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "5e994dbd", + "metadata": {}, + "outputs": [], + "source": [ + "# x = TacticDataset(test_src,test_tgt,tokenizer)" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "683109ca", + "metadata": {}, + "outputs": [], + "source": [ + "# x[0][0].shape" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "ae215c09", + "metadata": {}, + "outputs": [], + "source": [ + "from torch import nn\n", + "from transformers import Trainer\n", + "from transformers import TrainingArguments, Trainer" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "38b57afa", + "metadata": {}, + "outputs": [], + "source": [ + "{\n", + " \"train_batch_size\": 15,\n", + " \"fp16\": {\n", + " \"enabled\": true,\n", + " \"min_loss_scale\": 1,\n", + " \"opt_level\": \"O2\"\n", + " },\n", + " \"zero_optimization\": {\n", + " \"stage\": 2,\n", + " \"offload_param\": {\n", + " \"device\": \"cpu\"\n", + " },\n", + " \"offload_optimizer\": {\n", + " \"device\": \"cpu\"\n", + " },\n", + " \"allgather_partitions\": true,\n", + " \"allgather_bucket_size\": 5e8,\n", + " \"contiguous_gradients\": true\n", + " },\n", + " \"optimizer\": {\n", + " \"type\": \"AdamW\",\n", + " \"params\": {\n", + " \"lr\": 5e-05,\n", + " \"betas\": [\n", + " 0.9,\n", + " 0.999\n", + " ],\n", + " \"eps\": 1e-08\n", + " }\n", + " },\n", + " \"scheduler\": {\n", + " \"type\": \"WarmupLR\",\n", + " \"params\": {\n", + " \"warmup_min_lr\": 0,\n", + " \"warmup_max_lr\": 5e-05,\n", + " \"warmup_num_steps\": 50\n", + " }\n", + " }\n", + "}\n" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "499308c1", + "metadata": {}, + "outputs": [], + "source": [ + "from transformers import AutoTokenizer, TrainingArguments, Trainer, AutoModelForCausalLM, IntervalStrategy" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "a78f9777", + "metadata": {}, + "outputs": [], + "source": [ + "training_args = TrainingArguments(output_dir='./results',\n", + " num_train_epochs=5,\n", + " logging_steps=5000,\n", + " save_steps=20000, \n", + " per_device_train_batch_size=1,\n", + " per_device_eval_batch_size=1,\n", + " warmup_steps=100,\n", + " weight_decay=0.01, \n", + " logging_dir='./logs')" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "55fb65ff", + "metadata": {}, + "outputs": [], + "source": [ + "import mpi4py" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "4adb63c9", + "metadata": {}, + "outputs": [], + "source": [ + "trainer = Trainer(model=model, args=training_args, \n", + " train_dataset=trainDataset,\n", + " eval_dataset=validDataset, \n", + " # This custom collate function is necessary \n", + " # to built batches of data\n", + " data_collator=lambda data: \n", + " {'input_ids': torch.stack([f[0] for f in data]), \n", + " 'attention_mask': torch.stack([f[1] for f in data]),\n", + " 'labels': torch.stack([f[0] for f in data])})" + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "id": "97f1b9cb", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "PyTorch: setting up devices\n", + "The default value for the training argument `--report_to` will change in v5 (from all installed integrations to none). In v5, you will need to use `--report_to all` to get the same behavior as now. You should start updating your code and make this info disappear :-).\n" + ] + } + ], + "source": [ + "training_args = TrainingArguments(output_dir='./results',\n", + " num_train_epochs=5,\n", + " logging_steps=5000,\n", + " save_steps=20000, \n", + " per_device_train_batch_size=6,\n", + " per_device_eval_batch_size=6,\n", + " warmup_steps=100,\n", + " weight_decay=0.01, \n", + " logging_dir='./logs',fp16= True)" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "id": "1c465bfc", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using amp half precision backend\n" + ] + } + ], + "source": [ + "trainer = Trainer(model=model, args=training_args, \n", + " train_dataset=trainDataset,\n", + " eval_dataset=validDataset, \n", + " # This custom collate function is necessary \n", + " # to built batches of data\n", + " data_collator=lambda data: \n", + " {'input_ids': torch.stack([f[0] for f in data]), \n", + " 'attention_mask': torch.stack([f[1] for f in data]),\n", + " 'labels': torch.stack([f[0] for f in data])})" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "22f4059a", + "metadata": { + "scrolled": false + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "***** Running training *****\n", + " Num examples = 168649\n", + " Num Epochs = 5\n", + " Instantaneous batch size per device = 6\n", + " Total train batch size (w. parallel, distributed & accumulation) = 6\n", + " Gradient Accumulation steps = 1\n", + " Total optimization steps = 140545\n" + ] + }, + { + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [ 19/140545 00:14 < 34:01:47, 1.15 it/s, Epoch 0.00/5]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss

" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "trainer.train()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5fc655a2", + "metadata": {}, + "outputs": [], + "source": [ + "1+!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cf646b05", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "422ad755", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "902240e7", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/models/gpt_neo/neo-refined.ipynb b/src/models/gpt_neo/neo-refined.ipynb new file mode 100644 index 0000000..78ddfb4 --- /dev/null +++ b/src/models/gpt_neo/neo-refined.ipynb @@ -0,0 +1,3350 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "0252fa11", + "metadata": {}, + "outputs": [], + "source": [ + "import glob" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "e42d9ae1", + "metadata": {}, + "outputs": [], + "source": [ + "from transformers import GPT2Tokenizer, GPTNeoForCausalLM,GPTNeoModel" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "d0c3701c", + "metadata": {}, + "outputs": [], + "source": [ + "from transformers import AutoModelForSequenceClassification" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "e05c6e6b", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n" + ] + } + ], + "source": [ + "tokenizer = GPT2Tokenizer.from_pretrained(\"EleutherAI/gpt-neo-125M\", \n", + " bos_token=\"<|startoftext|>\",\n", + " eos_token=\"<|endoftext|>\",\n", + " pad_token=\"<|pad|>\",truncation=True,padding=True)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "011925ac", + "metadata": {}, + "outputs": [], + "source": [ + "# Download the pre-trained GPT-Neo model and transfer it to the GPU\n", + "model = GPTNeoForCausalLM.from_pretrained(\"results/checkpoint-140000/\")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "8d3ba692", + "metadata": {}, + "outputs": [], + "source": [ + "# AutoModelForSequenceClassification.from_pretrained(\"results/checkpoint-140000/\")" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "b0846e13", + "metadata": {}, + "outputs": [], + "source": [ + "# model" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "61f73398", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "GPTNeoForCausalLM(\n", + " (transformer): GPTNeoModel(\n", + " (wte): Embedding(50259, 768)\n", + " (wpe): Embedding(2048, 768)\n", + " (drop): Dropout(p=0, inplace=False)\n", + " (h): ModuleList(\n", + " (0): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (1): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (2): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (3): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (4): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (5): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (6): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (7): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (8): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (9): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (10): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (11): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " )\n", + " (ln_f): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " )\n", + " (lm_head): Linear(in_features=768, out_features=50259, bias=False)\n", + ")" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "9e136e2e", + "metadata": {}, + "outputs": [], + "source": [ + "import json" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "241c26ab", + "metadata": {}, + "outputs": [], + "source": [ + "data = open(\"files_upload/data_test_prepared.jsonl\",\"r\").readlines()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "86e2a2ad", + "metadata": {}, + "outputs": [], + "source": [ + "data = [json.loads(i) for i in data]" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "afc9f67f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'prompt': \"GOAL α : Type u,\\t_inst_1 : inhabited α,\\tb : buffer α,\\ti : ℕ,\\th : i < b.size\\t⊢ b.read ⟨i, h⟩ = b.read' i\\n PROOFSTEP \",\n", + " 'completion': \" cases b; unfold read read'; simp [array.read_eq_read']\\n\"}" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "data[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "id": "fe7b00a4", + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'hard' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "Input \u001b[0;32mIn [38]\u001b[0m, in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[38;5;28mlen\u001b[39m(\u001b[43mhard\u001b[49m)\n", + "\u001b[0;31mNameError\u001b[0m: name 'hard' is not defined" + ] + } + ], + "source": [ + "len(hard)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "8f7051a1", + "metadata": {}, + "outputs": [], + "source": [ + "all_d = ['<|startoftext|>' + ' ' + i[\"prompt\"].replace(\"GOAL\",\"\").replace(\"PROOFSTEP\",\"\") + ' ' for i in data]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "b5829864", + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import torch" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "id": "519f359e", + "metadata": {}, + "outputs": [], + "source": [ + "results = []\n", + "hard = []\n", + "model.cuda()\n", + "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "b89c575f", + "metadata": {}, + "outputs": [], + "source": [ + "all_d[0]\n", + "\n", + "tr = tokenizer(all_d[0], return_tensors=\"pt\",padding=True).input_ids" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "667fb03b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([1, 74])" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "tr.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "2d4aff22", + "metadata": {}, + "outputs": [], + "source": [ + "output = model(tr)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "5b728419", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "GPTNeoModel(\n", + " (wte): Embedding(50259, 768)\n", + " (wpe): Embedding(2048, 768)\n", + " (drop): Dropout(p=0, inplace=False)\n", + " (h): ModuleList(\n", + " (0): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (1): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (2): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (3): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (4): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (5): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (6): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (7): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (8): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (9): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (10): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (11): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " )\n", + " (ln_f): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + ")" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "2d7b9ba7", + "metadata": {}, + "outputs": [], + "source": [ + "logits = output[0]\n", + "hidden_states = output[1]" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "id": "4d09a8a0", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[-6.5701e-01, -5.5526e-01, 6.5292e-01, -4.7964e-01, 1.2447e-01,\n", + " -4.4783e-01, -3.2223e-01, 6.3936e-01, -1.1968e+00, 4.0471e-01,\n", + " 2.6583e-01, -6.3809e-01, -1.5580e+00, 4.7649e-02, -7.8752e-01,\n", + " -9.3344e-02, 7.0465e-01, 1.1290e+00, -2.3664e-01, -1.2280e+00,\n", + " 4.5062e-01, 3.9161e-01, -8.2371e-01, 8.9174e-01, -8.8534e-01,\n", + " -1.9812e+00, 4.1720e-01, -1.5243e-01, -6.0353e-02, 6.7547e-01,\n", + " -8.3931e-01, -4.6398e-01, 3.8069e-01, 6.3796e-01, -5.6777e-01,\n", + " 2.2866e-01, 5.9045e-01, 6.4310e-02, -4.0508e-01, -1.2508e-01,\n", + " 1.1214e+00, -4.0432e-01, 8.7023e-01, 2.7850e-01, 1.3989e+00,\n", + " -2.0679e-02, -3.5790e-01, 1.1951e-01, 4.1695e-01, 6.1825e-01,\n", + " 3.7406e-01, 7.5965e-01, -6.4549e-01, 9.9186e-01, 2.6553e-01,\n", + " -4.4531e-02, 8.1131e-01, -6.3210e-01, 2.5568e-01, 1.3212e+00,\n", + " -4.6501e-01, 5.4562e-01, 2.6294e-01, 4.9838e-01, 9.4803e-01,\n", + " 3.5791e+00, -9.8433e-01, 1.7407e+00, 1.4252e-01, 1.0361e+00,\n", + " 2.0368e+00, -2.2072e-01, -1.1897e-02, 3.2045e-02, -1.0798e+00,\n", + " -7.7112e-01, -1.0345e+00, 8.2340e-01, -1.9613e-01, -4.8245e-01,\n", + " 6.8854e-01, 1.4294e-01, 1.9809e-01, 6.1636e-01, -3.9681e-01,\n", + " -9.4599e-01, -3.6114e+00, -7.7438e-01, 6.7198e-01, -3.2263e-01,\n", + " 1.6561e+00, -1.5145e+00, 2.2258e-01, 1.3614e-01, -1.1029e-01,\n", + " 3.4223e-01, -4.1185e-01, -2.1168e-01, -1.4953e-02, 7.8509e-01,\n", + " 6.5173e-01, -8.5843e-01, -2.7685e-01, -1.2903e-02, -5.5519e-02,\n", + " 1.6653e+00, -2.6266e+00, 2.7142e-01, -4.8294e-01, 1.2584e+00,\n", + " 8.8732e-01, 1.2777e-01, 4.2908e-01, 7.3520e-01, -7.2251e-01,\n", + " 2.6671e-01, -7.5993e-01, 3.9686e+00, -1.8805e-01, -1.8968e-01,\n", + " 1.7378e-01, 2.0486e-01, 8.2541e-02, 4.6833e-01, -4.0784e-02,\n", + " 1.6197e+00, -1.6930e+00, 3.4423e-01, -1.5985e-01, 3.4134e-01,\n", + " 2.6094e+00, -1.3786e-01, -3.4967e-01, -9.2543e-01, 7.1359e-01,\n", + " 6.4271e-02, -1.6339e-01, -1.6413e-01, -1.3114e+00, 1.7517e+00,\n", + " -6.9101e-01, -3.6901e-01, -2.5279e-01, 4.6112e-01, -3.7700e-01,\n", + " -6.6868e-01, 1.4174e-01, -1.5963e+00, 5.6456e-01, 2.7206e-01,\n", + " -5.1195e-01, 8.4535e-01, -2.0141e+00, -1.5964e+00, 7.4845e-01,\n", + " 4.9039e-01, -7.0711e-01, -8.1750e-01, 7.7699e-01, 2.8655e-02,\n", + " -1.3790e+00, -8.1854e-01, -2.0050e+00, 8.0013e-01, 1.3345e-01,\n", + " 7.7979e-01, 1.7734e-01, -1.8613e-01, -5.6572e-02, 2.5177e-01,\n", + " -1.0537e-01, 8.4286e-02, -9.0934e-01, 1.1124e+00, 1.6632e-01,\n", + " 1.1082e+00, -3.0319e-02, -8.6474e-01, 6.4134e-01, 4.6665e-02,\n", + " -2.8866e-01, -1.8509e+00, -3.5197e-01, -6.4529e-01, -4.7109e-01,\n", + " 8.6366e-01, -8.9103e-01, -2.7812e-01, 9.4400e-01, 1.1482e-01,\n", + " -1.5588e+00, 1.7912e+00, -1.2328e+00, -2.6627e-01, -5.2035e-01,\n", + " 1.3610e-01, -6.2723e-01, -1.7563e+00, -2.1655e-01, 1.4863e+00,\n", + " 2.4796e-01, -2.5510e-01, -7.8675e-01, -1.2291e+00, 1.0645e+00,\n", + " 1.4270e-01, -7.4664e-02, 3.6338e-01, -6.2780e-01, -1.4775e+00,\n", + " -3.3132e-01, -5.3231e-01, -1.5406e+00, 4.0470e-01, -9.4024e-01,\n", + " -1.2271e+00, -5.2211e-01, 5.0287e-01, 8.4112e-01, -4.7627e-01,\n", + " -8.5266e-01, 8.8161e-01, 4.9542e-01, -7.1815e-01, -2.7518e-01,\n", + " -6.3465e-01, -3.8776e-01, 3.4735e-01, -3.5354e-01, -7.1639e-01,\n", + " -1.6516e+00, -1.1366e-01, 1.0438e+00, 2.6526e-01, 8.2621e-01,\n", + " 3.7207e-01, -5.8425e-01, 3.7507e-01, -5.7666e-01, 1.4220e-01,\n", + " -1.7574e-01, 1.0300e+00, 1.3847e+00, -9.3166e-01, 2.2898e+00,\n", + " -4.0698e-02, -1.4337e+00, 6.8945e-01, -1.2590e-01, 2.2820e+00,\n", + " -1.2637e+00, 8.6306e-01, 2.5949e-01, -1.6272e+00, -7.4919e-01,\n", + " 7.9700e-02, -1.4580e-01, 1.4702e-01, -4.7805e+00, -1.7447e-01,\n", + " 1.5733e-01, -1.1328e+00, -1.7997e-01, 1.1849e-01, -7.6814e-02,\n", + " 5.7783e-02, 8.5474e-01, 7.1130e-01, -4.8318e-01, -4.6597e-01,\n", + " -2.3761e-01, 6.5967e-01, -2.3719e-01, -5.8491e-01, -4.6444e-01,\n", + " 1.9960e+00, -4.8328e-02, -3.0666e-01, 3.1178e+00, -2.6608e-01,\n", + " 1.5935e+00, -5.9974e-01, 1.2217e+00, -8.8419e-01, 3.1703e-01,\n", + " -9.0153e-01, 3.9764e-01, 2.6823e+00, 1.1637e+00, 5.7270e-01,\n", + " -1.1563e+00, -4.0448e-02, -8.3996e-01, -3.8448e-01, 1.0513e+00,\n", + " 1.1910e+00, -8.6219e-01, -1.0555e+00, -2.9337e-01, 3.2118e-01,\n", + " 8.7482e-01, -8.1878e-01, -1.6606e+00, -1.1992e+00, -5.5086e-01,\n", + " -6.2969e-02, 1.7242e-01, 1.2792e+00, 1.7232e-01, 1.1086e-02,\n", + " 3.4841e-01, 3.9411e-02, -3.0313e-01, -7.6677e-01, -7.8490e-01,\n", + " -9.1679e-01, -8.8015e-01, 7.2944e-01, 6.4357e-01, -1.1834e+00,\n", + " -1.7342e-01, 5.5864e-03, 1.2810e+00, -7.1590e-01, 2.5360e-01,\n", + " 8.0045e-01, 1.5325e-01, 4.9814e-01, -7.0453e-01, -1.7591e-01,\n", + " -4.2090e-01, -1.5402e-01, 8.1987e-01, -2.4300e-01, -7.4993e-01,\n", + " -1.2363e+00, 2.9098e-01, -1.1224e-02, -3.8614e-01, -1.6036e+00,\n", + " 1.3793e+00, -9.9113e-01, -1.2236e+00, 9.9869e-01, -3.1115e-01,\n", + " 7.5197e-02, -7.7634e-01, -1.0849e+00, -1.2853e+00, -7.2456e-02,\n", + " -3.3859e-01, 1.7963e-01, -7.7258e-02, -3.1284e-01, 4.8385e-01,\n", + " -1.8931e-01, -1.2296e-01, 4.8511e-01, -3.6387e+00, 1.2859e-01,\n", + " 6.5065e-01, 5.2783e-01, 1.3326e-02, -6.0778e+00, -8.8962e-01,\n", + " 1.6501e+00, 1.1722e+00, 7.2446e-01, -1.2549e+00, 4.5469e-01,\n", + " -8.5785e-02, -3.7273e-01, -6.0931e-01, -1.2346e+00, 1.7816e-01,\n", + " -6.9163e-01, 3.8728e-01, 1.0446e+00, -3.1714e-01, 4.4183e-01,\n", + " -5.0124e-01, 3.0049e-02, 5.8875e-02, 2.0558e+00, 2.0781e-01,\n", + " -5.1865e-01, -8.9325e-01, 6.6030e-01, 4.6610e-01, -2.2562e-01,\n", + " 1.5435e+00, 1.9012e-01, -1.0826e-01, -1.2049e+00, 5.8945e-01,\n", + " -1.9320e-01, -3.4943e-01, 5.9606e-02, -1.4613e-01, 2.5164e-01,\n", + " -5.5244e-01, -9.7601e-01, 3.2872e-01, -6.7769e-01, 1.3954e+00,\n", + " -9.0082e-02, 5.0501e-02, 7.4378e-02, 2.5703e-01, 7.5120e-01,\n", + " -5.1075e-01, -1.6856e-01, 7.8392e-01, 3.2094e-01, -5.6037e-01,\n", + " 6.1606e-01, 6.8398e-04, 1.0000e+00, 4.2385e+00, 5.3653e-01,\n", + " 2.0919e-01, -1.3542e+00, 3.1766e-01, -7.0561e-01, -5.8379e-02,\n", + " 2.5936e-01, 2.5020e-01, 4.4425e-01, -6.2398e-01, -1.4404e+00,\n", + " -7.2330e-02, -1.0905e-01, 8.3460e-01, 2.2813e-01, 3.0086e-01,\n", + " 1.0936e+00, -9.4616e-01, 2.5665e-01, -1.7743e-01, -1.3334e+00,\n", + " 6.4375e-01, -1.4355e+00, -6.5327e-01, 3.6492e-01, -7.0356e-02,\n", + " 4.0684e-02, 4.3950e-01, 4.7223e-01, -3.9971e-04, -5.8388e-01,\n", + " 2.6515e-01, 3.3364e-01, 4.2673e-01, 2.1493e-03, -1.1889e+00,\n", + " 5.3956e-01, 8.8198e-01, -8.8949e-02, -5.2723e-01, 1.1455e-01,\n", + " 1.8076e-01, 2.3838e-02, 2.4469e-01, -6.8680e-01, 1.5550e+00,\n", + " 1.0842e+00, -2.1521e+00, 4.5426e-01, 2.6543e-01, -7.3560e-01,\n", + " -1.0515e+00, -4.6559e-01, 1.0901e+00, -2.4138e-01, -3.7686e-01,\n", + " -7.8627e-01, 1.0888e+00, 9.7114e-01, 6.5637e-01, -1.4758e-01,\n", + " -3.8624e-01, 1.7539e-01, 8.2698e-01, 4.4543e-01, 1.2358e+00,\n", + " -1.0063e+00, 3.1053e+00, -1.6710e-01, 3.6936e-01, 1.7222e-01,\n", + " 3.3817e-01, 2.5023e+00, 9.3275e-01, -1.6935e-01, -6.0860e-03,\n", + " 7.9665e-01, -2.9551e-01, -1.4873e+00, 1.1630e-01, -2.5062e-01,\n", + " 9.5673e-01, -1.5985e+00, -8.0909e-01, -4.1604e-01, 4.9216e-01,\n", + " 2.6708e-02, 4.7558e-02, -1.0335e+00, -6.3715e-01, -3.9305e-01,\n", + " -1.6873e+00, -1.4530e-01, -1.2510e+00, -1.1041e+00, -1.3027e+00,\n", + " 1.0975e-01, -1.4295e+00, 3.3724e-01, 6.7257e-01, -1.3012e-01,\n", + " 5.1997e-02, -1.5490e+00, -1.7612e+00, 1.5916e-01, 2.9130e-01,\n", + " -3.2242e-01, -1.3686e+00, -1.5555e+00, -1.8884e-01, -1.6161e+00,\n", + " 8.1470e-01, 1.1737e+00, -5.6447e-01, 2.9640e+00, 1.6170e+00,\n", + " 1.1311e+00, -8.8295e-01, 1.1148e+00, 1.0092e+00, 1.2691e-01,\n", + " 6.8562e+00, -6.0829e-01, 4.1997e-01, -3.8029e-01, -1.8360e-02,\n", + " 3.6805e-01, 2.5688e-01, 5.3263e-01, -4.0681e-01, 2.0300e+00,\n", + " -2.5101e+00, -1.2686e-01, -1.4400e+00, 6.3667e-02, -6.0642e-01,\n", + " 1.0279e-01, -1.4474e-01, -1.0112e+00, -5.2810e-01, -1.8737e-01,\n", + " 2.7003e-01, 1.6971e+00, 1.0340e-01, -3.3243e-01, 1.7826e-01,\n", + " -1.3094e+00, -1.9179e+00, -9.5765e-02, 6.7344e-01, -2.5450e-01,\n", + " 1.4722e+00, 1.3464e-01, -4.5146e-02, 1.8374e+00, -4.7156e-02,\n", + " -8.8361e-01, 3.3692e-01, 1.0993e+00, 1.1693e-02, -6.3684e-01,\n", + " 1.1004e-01, 8.1219e-02, -2.6482e-01, 3.4436e-01, -1.1046e+00,\n", + " 1.9981e+00, -1.5174e-01, -1.2494e+00, -6.1701e-01, -7.8737e-01,\n", + " -7.7434e-01, 7.1746e-01, 1.3383e-01, 5.0151e-01, -7.2617e-01,\n", + " 2.8029e-01, 2.9190e-01, -3.2804e-01, 3.5449e-01, 1.0341e+00,\n", + " -1.3220e-01, -1.9574e-01, -1.3481e+00, -4.2592e-01, 4.5257e-01,\n", + " 3.6219e-01, 3.8969e-02, -2.7785e+00, 9.7504e-01, 7.8570e-02,\n", + " -8.3187e-01, -2.1373e-01, 5.1211e-01, 7.2179e-01, -3.5053e-01,\n", + " -4.0530e-01, -1.0231e-02, 1.8640e-01, -7.7031e-01, -2.1166e-01,\n", + " -5.3105e-02, -3.7754e-01, -7.1673e-01, 7.1881e-01, 3.1602e-01,\n", + " 1.1775e+00, -4.8429e-01, 1.0751e-01, -1.3432e+00, 1.0650e+00,\n", + " 1.4206e-01, 6.2686e-01, 6.5756e-01, -1.7967e-01, -3.6516e-01,\n", + " 4.6328e-02, -2.6320e-01, -1.5158e-01, 4.8855e-01, -4.3280e-01,\n", + " 4.4565e-03, 1.7619e+00, 6.1102e-01, -4.6537e-01, -1.4941e-01,\n", + " 9.1688e-01, -7.5126e-01, 4.7119e-01, -4.7130e-02, 2.6428e-01,\n", + " 2.8655e-01, -4.5528e-01, 1.7627e+00, -2.0353e-01, 1.6043e+00,\n", + " 6.1926e-01, -1.4069e+00, 2.6919e+00, -5.2989e-01, 3.1456e+00,\n", + " 5.6604e-01, -7.5212e-01, 6.2512e-01, -6.9199e-01, -5.0610e-02,\n", + " 1.2574e-01, 1.1247e+00, 7.2623e-01, 1.4213e+00, -1.0503e+00,\n", + " -1.1006e+00, 5.9191e-01, -6.8493e-02, 7.0727e-01, 1.9768e-01,\n", + " -1.3564e+00, -9.7928e-02, 1.0823e+00, 7.1868e-01, -2.0407e-01,\n", + " -4.5663e-01, 2.3876e-01, -7.0750e-01, 6.8472e-01, -3.5027e-01,\n", + " 4.0879e-01, 5.1511e-02, -2.3052e-01, 1.7554e-01, -6.4398e-01,\n", + " -5.8293e-01, 1.0289e+00, -1.1623e+00, -8.3321e-01, 8.1716e-01,\n", + " -5.8036e-01, -1.4840e+00, -1.7825e-01, -4.4530e-01, 7.1821e-02,\n", + " 1.0437e+00, 1.1869e+00, 3.2286e-01, 2.1705e-01, -1.0956e+00,\n", + " 4.3658e-01, -1.2708e+00, 6.1088e-01, 7.8037e-01, 7.6636e-01,\n", + " 3.7340e-01, -3.0718e-01, -4.7959e-01, 2.4528e+00, -6.1872e-01,\n", + " 6.8037e-02, -1.7061e-01, 3.8657e-01, 4.1846e-01, -1.8799e+00,\n", + " 3.8377e-01, 2.6193e-01, 1.3883e+00, 7.2463e-01, -1.0595e-01,\n", + " 1.5683e-02, 4.2094e-02, -1.5529e-01, 7.2770e-01, -1.1587e-01,\n", + " -1.7248e+00, 2.9029e-01, -7.9939e-01, -5.6164e-01, -9.2776e-01,\n", + " 7.0453e-01, -7.9448e-01, 8.2995e-02, -7.9815e-02, 6.3741e-01,\n", + " -2.6026e-01, -4.5850e-01, 8.8943e-02, 8.3316e-01, -1.0701e+00,\n", + " 8.8479e-01, 1.8555e-01, -1.0572e+00, 6.3512e-01, 1.7233e-01,\n", + " 4.0035e-01, -2.0063e-01, 1.4709e+00, -4.4721e-01, -4.6141e-01,\n", + " 7.1513e-02, 1.6342e+00, 3.9355e-01, 1.2577e-01, -9.6842e-02,\n", + " -8.0375e-01, -1.1370e+00, -1.3411e+00, -2.6858e-01, -3.9891e-01,\n", + " 5.3725e-01, -3.0091e-01, 8.7687e-01]], grad_fn=)" + ] + }, + "execution_count": 37, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "torch.mean(logits,axis=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "id": "0b99367c", + "metadata": {}, + "outputs": [], + "source": [ + "# tensor.mean(logits)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "91659d82", + "metadata": {}, + "outputs": [], + "source": [ + "gen_tokens = model.generate(\n", + " tr,\n", + " do_sample=True,\n", + " temperature=0.9,\n", + " max_length=400, \n", + " num_return_sequences=15, \n", + " )\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "26f11621", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[]" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "results" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "56691d90", + "metadata": {}, + "outputs": [], + "source": [ + "hard = []" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "f059f362", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 494, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 529, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 400, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 454, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 504, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 517, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 530, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 544, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 558, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 927, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 469, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1001, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 489, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 524, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 567, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1177, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 631, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 645, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 661, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1068, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 538, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 585, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 567, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 545, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 520, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1094, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 541, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 570, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 578, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 516, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 702, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 700, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 672, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 490, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1161, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 492, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 686, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 664, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 484, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 499, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1007, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 492, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1150, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 524, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 681, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 681, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 429, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 447, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 431, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 455, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + } + ], + "source": [ + "for i,j in zip(all_d,data):\n", + " tr = tokenizer(i, return_tensors=\"pt\",padding=True).input_ids\n", + " try:\n", + " tr = tr.to(device)\n", + " gen_tokens = model.generate(\n", + " tr,\n", + " do_sample=True,\n", + " temperature=0.9,\n", + " max_length=400, \n", + " num_return_sequences=15, \n", + " )\n", + " gen_text = tokenizer.batch_decode(gen_tokens)\n", + " results.append((gen_text,j))\n", + " except:\n", + " hard.append(j)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "92d5fefe", + "metadata": {}, + "outputs": [], + "source": [ + "len(hard),len(results)" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "fe6fda67", + "metadata": {}, + "outputs": [], + "source": [ + "i = hard[99]" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "d65ce087", + "metadata": {}, + "outputs": [ + { + "ename": "RuntimeError", + "evalue": "CUDA error: device-side assert triggered\nCUDA kernel errors might be asynchronously reported at some other API call,so the stacktrace below might be incorrect.\nFor debugging consider passing CUDA_LAUNCH_BLOCKING=1.", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "Input \u001b[0;32mIn [27]\u001b[0m, in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mmodel\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdevice\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcpu\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/nn/modules/module.py:907\u001b[0m, in \u001b[0;36mModule.to\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 903\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m t\u001b[38;5;241m.\u001b[39mto(device, dtype \u001b[38;5;28;01mif\u001b[39;00m t\u001b[38;5;241m.\u001b[39mis_floating_point() \u001b[38;5;129;01mor\u001b[39;00m t\u001b[38;5;241m.\u001b[39mis_complex() \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 904\u001b[0m non_blocking, memory_format\u001b[38;5;241m=\u001b[39mconvert_to_format)\n\u001b[1;32m 905\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m t\u001b[38;5;241m.\u001b[39mto(device, dtype \u001b[38;5;28;01mif\u001b[39;00m t\u001b[38;5;241m.\u001b[39mis_floating_point() \u001b[38;5;129;01mor\u001b[39;00m t\u001b[38;5;241m.\u001b[39mis_complex() \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m, non_blocking)\n\u001b[0;32m--> 907\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_apply\u001b[49m\u001b[43m(\u001b[49m\u001b[43mconvert\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/nn/modules/module.py:578\u001b[0m, in \u001b[0;36mModule._apply\u001b[0;34m(self, fn)\u001b[0m\n\u001b[1;32m 576\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_apply\u001b[39m(\u001b[38;5;28mself\u001b[39m, fn):\n\u001b[1;32m 577\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m module \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mchildren():\n\u001b[0;32m--> 578\u001b[0m \u001b[43mmodule\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_apply\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfn\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 580\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcompute_should_use_set_data\u001b[39m(tensor, tensor_applied):\n\u001b[1;32m 581\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m torch\u001b[38;5;241m.\u001b[39m_has_compatible_shallow_copy_type(tensor, tensor_applied):\n\u001b[1;32m 582\u001b[0m \u001b[38;5;66;03m# If the new tensor has compatible tensor type as the existing tensor,\u001b[39;00m\n\u001b[1;32m 583\u001b[0m \u001b[38;5;66;03m# the current behavior is to change the tensor in-place using `.data =`,\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 588\u001b[0m \u001b[38;5;66;03m# global flag to let the user control whether they want the future\u001b[39;00m\n\u001b[1;32m 589\u001b[0m \u001b[38;5;66;03m# behavior of overwriting the existing tensor or not.\u001b[39;00m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/nn/modules/module.py:578\u001b[0m, in \u001b[0;36mModule._apply\u001b[0;34m(self, fn)\u001b[0m\n\u001b[1;32m 576\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_apply\u001b[39m(\u001b[38;5;28mself\u001b[39m, fn):\n\u001b[1;32m 577\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m module \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mchildren():\n\u001b[0;32m--> 578\u001b[0m \u001b[43mmodule\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_apply\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfn\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 580\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcompute_should_use_set_data\u001b[39m(tensor, tensor_applied):\n\u001b[1;32m 581\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m torch\u001b[38;5;241m.\u001b[39m_has_compatible_shallow_copy_type(tensor, tensor_applied):\n\u001b[1;32m 582\u001b[0m \u001b[38;5;66;03m# If the new tensor has compatible tensor type as the existing tensor,\u001b[39;00m\n\u001b[1;32m 583\u001b[0m \u001b[38;5;66;03m# the current behavior is to change the tensor in-place using `.data =`,\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 588\u001b[0m \u001b[38;5;66;03m# global flag to let the user control whether they want the future\u001b[39;00m\n\u001b[1;32m 589\u001b[0m \u001b[38;5;66;03m# behavior of overwriting the existing tensor or not.\u001b[39;00m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/nn/modules/module.py:601\u001b[0m, in \u001b[0;36mModule._apply\u001b[0;34m(self, fn)\u001b[0m\n\u001b[1;32m 597\u001b[0m \u001b[38;5;66;03m# Tensors stored in modules are graph leaves, and we don't want to\u001b[39;00m\n\u001b[1;32m 598\u001b[0m \u001b[38;5;66;03m# track autograd history of `param_applied`, so we have to use\u001b[39;00m\n\u001b[1;32m 599\u001b[0m \u001b[38;5;66;03m# `with torch.no_grad():`\u001b[39;00m\n\u001b[1;32m 600\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mno_grad():\n\u001b[0;32m--> 601\u001b[0m param_applied \u001b[38;5;241m=\u001b[39m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[43mparam\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 602\u001b[0m should_use_set_data \u001b[38;5;241m=\u001b[39m compute_should_use_set_data(param, param_applied)\n\u001b[1;32m 603\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m should_use_set_data:\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/nn/modules/module.py:905\u001b[0m, in \u001b[0;36mModule.to..convert\u001b[0;34m(t)\u001b[0m\n\u001b[1;32m 902\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m convert_to_format \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m t\u001b[38;5;241m.\u001b[39mdim() \u001b[38;5;129;01min\u001b[39;00m (\u001b[38;5;241m4\u001b[39m, \u001b[38;5;241m5\u001b[39m):\n\u001b[1;32m 903\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m t\u001b[38;5;241m.\u001b[39mto(device, dtype \u001b[38;5;28;01mif\u001b[39;00m t\u001b[38;5;241m.\u001b[39mis_floating_point() \u001b[38;5;129;01mor\u001b[39;00m t\u001b[38;5;241m.\u001b[39mis_complex() \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 904\u001b[0m non_blocking, memory_format\u001b[38;5;241m=\u001b[39mconvert_to_format)\n\u001b[0;32m--> 905\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mt\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdtype\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mt\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mis_floating_point\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mt\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mis_complex\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01melse\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnon_blocking\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[0;31mRuntimeError\u001b[0m: CUDA error: device-side assert triggered\nCUDA kernel errors might be asynchronously reported at some other API call,so the stacktrace below might be incorrect.\nFor debugging consider passing CUDA_LAUNCH_BLOCKING=1." + ] + } + ], + "source": [ + "model.to(torch.device(\"cpu\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "6f658073", + "metadata": {}, + "outputs": [], + "source": [ + "tr = '<|startoftext|>' + ' ' + i[\"prompt\"].replace(\"GOAL\",\"\").replace(\"PROOFSTEP\",\"\") + ' '" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "4271c93c", + "metadata": {}, + "outputs": [], + "source": [ + "tr = tokenizer(tr, return_tensors=\"pt\",padding=True).input_ids" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "02638133", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "ename": "RuntimeError", + "evalue": "CUDA error: device-side assert triggered\nCUDA kernel errors might be asynchronously reported at some other API call,so the stacktrace below might be incorrect.\nFor debugging consider passing CUDA_LAUNCH_BLOCKING=1.", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "Input \u001b[0;32mIn [24]\u001b[0m, in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0m gen_tokens \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgenerate\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 2\u001b[0m \u001b[43m \u001b[49m\u001b[43mtr\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3\u001b[0m \u001b[43m \u001b[49m\u001b[43mdo_sample\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 4\u001b[0m \u001b[43m \u001b[49m\u001b[43mtemperature\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m0.9\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 5\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_length\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m500\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\n\u001b[1;32m 6\u001b[0m \u001b[43m \u001b[49m\u001b[43mnum_return_sequences\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m15\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\n\u001b[1;32m 7\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/autograd/grad_mode.py:27\u001b[0m, in \u001b[0;36m_DecoratorContextManager.__call__..decorate_context\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 24\u001b[0m \u001b[38;5;129m@functools\u001b[39m\u001b[38;5;241m.\u001b[39mwraps(func)\n\u001b[1;32m 25\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mdecorate_context\u001b[39m(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m 26\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mclone():\n\u001b[0;32m---> 27\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/transformers/generation_utils.py:1149\u001b[0m, in \u001b[0;36mGenerationMixin.generate\u001b[0;34m(self, inputs, max_length, min_length, do_sample, early_stopping, num_beams, temperature, top_k, top_p, typical_p, repetition_penalty, bad_words_ids, force_words_ids, bos_token_id, pad_token_id, eos_token_id, length_penalty, no_repeat_ngram_size, encoder_no_repeat_ngram_size, num_return_sequences, max_time, max_new_tokens, decoder_start_token_id, use_cache, num_beam_groups, diversity_penalty, prefix_allowed_tokens_fn, logits_processor, stopping_criteria, constraints, output_attentions, output_hidden_states, output_scores, return_dict_in_generate, forced_bos_token_id, forced_eos_token_id, remove_invalid_values, synced_gpus, exponential_decay_length_penalty, **model_kwargs)\u001b[0m\n\u001b[1;32m 1146\u001b[0m requires_attention_mask \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mencoder_outputs\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;129;01min\u001b[39;00m model_kwargs\n\u001b[1;32m 1148\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m model_kwargs\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mattention_mask\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28;01mNone\u001b[39;00m) \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m requires_attention_mask \u001b[38;5;129;01mand\u001b[39;00m accepts_attention_mask:\n\u001b[0;32m-> 1149\u001b[0m model_kwargs[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mattention_mask\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_prepare_attention_mask_for_generation\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1150\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs_tensor\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpad_token_id\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43meos_token_id\u001b[49m\n\u001b[1;32m 1151\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1153\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconfig\u001b[38;5;241m.\u001b[39mis_encoder_decoder \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mencoder_outputs\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;129;01min\u001b[39;00m model_kwargs:\n\u001b[1;32m 1154\u001b[0m \u001b[38;5;66;03m# if model is encoder decoder encoder_outputs are created\u001b[39;00m\n\u001b[1;32m 1155\u001b[0m \u001b[38;5;66;03m# and added to `model_kwargs`\u001b[39;00m\n\u001b[1;32m 1156\u001b[0m model_kwargs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_prepare_encoder_decoder_kwargs_for_generation(\n\u001b[1;32m 1157\u001b[0m inputs_tensor, model_kwargs, model_input_name\n\u001b[1;32m 1158\u001b[0m )\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/transformers/generation_utils.py:504\u001b[0m, in \u001b[0;36mGenerationMixin._prepare_attention_mask_for_generation\u001b[0;34m(self, inputs, pad_token_id, eos_token_id)\u001b[0m\n\u001b[1;32m 502\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m inputs\u001b[38;5;241m.\u001b[39mne(pad_token_id)\u001b[38;5;241m.\u001b[39mlong()\n\u001b[1;32m 503\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 504\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mones\u001b[49m\u001b[43m(\u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mshape\u001b[49m\u001b[43m[\u001b[49m\u001b[43m:\u001b[49m\u001b[38;5;241;43m2\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdtype\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlong\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[0;31mRuntimeError\u001b[0m: CUDA error: device-side assert triggered\nCUDA kernel errors might be asynchronously reported at some other API call,so the stacktrace below might be incorrect.\nFor debugging consider passing CUDA_LAUNCH_BLOCKING=1." + ] + } + ], + "source": [ + "gen_tokens = model.generate(\n", + " tr,\n", + " do_sample=True,\n", + " temperature=0.9,\n", + " max_length=500, \n", + " num_return_sequences=15, \n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "id": "8462b9eb", + "metadata": {}, + "outputs": [], + "source": [ + "gen_text = tokenizer.batch_decode(gen_tokens)" + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "id": "ea1158a1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ y + (z - y) ∈ t\\n bound',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ y + (z - y) ∈ t\\n xt',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ y + (z - y) ∈ t\\n let',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ y + (z - y) ∈ t\\n calc',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ y + (z - y) ∈ t\\n xt',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ y + (z - y) ∈ t\\n calc',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ y + (z - y) ∈ t\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ y + (z - y) ∈ t\\n exact',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ y + (z - y) ∈ t\\n xt',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ y + (z - y) ∈ t\\n xt',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ y + (z - y) ∈ t\\n sim',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ y + (z - y) ∈ t\\n exact',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ y + (z - y) ∈ t\\n xt',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ y + (z - y) ∈ t\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ y + (z - y) ∈ t\\n xt']" + ] + }, + "execution_count": 57, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "gen_text" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "id": "0f36b2b7", + "metadata": {}, + "outputs": [], + "source": [ + "hard2 = []" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "id": "029dc2a4", + "metadata": {}, + "outputs": [], + "source": [ + "device = torch.device(\"cpu\" if torch.cuda.is_available() else \"cpu\")" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "id": "8a08ac13", + "metadata": {}, + "outputs": [ + { + "ename": "RuntimeError", + "evalue": "CUDA error: device-side assert triggered\nCUDA kernel errors might be asynchronously reported at some other API call,so the stacktrace below might be incorrect.\nFor debugging consider passing CUDA_LAUNCH_BLOCKING=1.", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "Input \u001b[0;32mIn [41]\u001b[0m, in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0m model \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/nn/modules/module.py:907\u001b[0m, in \u001b[0;36mModule.to\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 903\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m t\u001b[38;5;241m.\u001b[39mto(device, dtype \u001b[38;5;28;01mif\u001b[39;00m t\u001b[38;5;241m.\u001b[39mis_floating_point() \u001b[38;5;129;01mor\u001b[39;00m t\u001b[38;5;241m.\u001b[39mis_complex() \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 904\u001b[0m non_blocking, memory_format\u001b[38;5;241m=\u001b[39mconvert_to_format)\n\u001b[1;32m 905\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m t\u001b[38;5;241m.\u001b[39mto(device, dtype \u001b[38;5;28;01mif\u001b[39;00m t\u001b[38;5;241m.\u001b[39mis_floating_point() \u001b[38;5;129;01mor\u001b[39;00m t\u001b[38;5;241m.\u001b[39mis_complex() \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m, non_blocking)\n\u001b[0;32m--> 907\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_apply\u001b[49m\u001b[43m(\u001b[49m\u001b[43mconvert\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/nn/modules/module.py:578\u001b[0m, in \u001b[0;36mModule._apply\u001b[0;34m(self, fn)\u001b[0m\n\u001b[1;32m 576\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_apply\u001b[39m(\u001b[38;5;28mself\u001b[39m, fn):\n\u001b[1;32m 577\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m module \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mchildren():\n\u001b[0;32m--> 578\u001b[0m \u001b[43mmodule\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_apply\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfn\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 580\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcompute_should_use_set_data\u001b[39m(tensor, tensor_applied):\n\u001b[1;32m 581\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m torch\u001b[38;5;241m.\u001b[39m_has_compatible_shallow_copy_type(tensor, tensor_applied):\n\u001b[1;32m 582\u001b[0m \u001b[38;5;66;03m# If the new tensor has compatible tensor type as the existing tensor,\u001b[39;00m\n\u001b[1;32m 583\u001b[0m \u001b[38;5;66;03m# the current behavior is to change the tensor in-place using `.data =`,\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 588\u001b[0m \u001b[38;5;66;03m# global flag to let the user control whether they want the future\u001b[39;00m\n\u001b[1;32m 589\u001b[0m \u001b[38;5;66;03m# behavior of overwriting the existing tensor or not.\u001b[39;00m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/nn/modules/module.py:578\u001b[0m, in \u001b[0;36mModule._apply\u001b[0;34m(self, fn)\u001b[0m\n\u001b[1;32m 576\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_apply\u001b[39m(\u001b[38;5;28mself\u001b[39m, fn):\n\u001b[1;32m 577\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m module \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mchildren():\n\u001b[0;32m--> 578\u001b[0m \u001b[43mmodule\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_apply\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfn\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 580\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcompute_should_use_set_data\u001b[39m(tensor, tensor_applied):\n\u001b[1;32m 581\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m torch\u001b[38;5;241m.\u001b[39m_has_compatible_shallow_copy_type(tensor, tensor_applied):\n\u001b[1;32m 582\u001b[0m \u001b[38;5;66;03m# If the new tensor has compatible tensor type as the existing tensor,\u001b[39;00m\n\u001b[1;32m 583\u001b[0m \u001b[38;5;66;03m# the current behavior is to change the tensor in-place using `.data =`,\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 588\u001b[0m \u001b[38;5;66;03m# global flag to let the user control whether they want the future\u001b[39;00m\n\u001b[1;32m 589\u001b[0m \u001b[38;5;66;03m# behavior of overwriting the existing tensor or not.\u001b[39;00m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/nn/modules/module.py:601\u001b[0m, in \u001b[0;36mModule._apply\u001b[0;34m(self, fn)\u001b[0m\n\u001b[1;32m 597\u001b[0m \u001b[38;5;66;03m# Tensors stored in modules are graph leaves, and we don't want to\u001b[39;00m\n\u001b[1;32m 598\u001b[0m \u001b[38;5;66;03m# track autograd history of `param_applied`, so we have to use\u001b[39;00m\n\u001b[1;32m 599\u001b[0m \u001b[38;5;66;03m# `with torch.no_grad():`\u001b[39;00m\n\u001b[1;32m 600\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mno_grad():\n\u001b[0;32m--> 601\u001b[0m param_applied \u001b[38;5;241m=\u001b[39m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[43mparam\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 602\u001b[0m should_use_set_data \u001b[38;5;241m=\u001b[39m compute_should_use_set_data(param, param_applied)\n\u001b[1;32m 603\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m should_use_set_data:\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/nn/modules/module.py:905\u001b[0m, in \u001b[0;36mModule.to..convert\u001b[0;34m(t)\u001b[0m\n\u001b[1;32m 902\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m convert_to_format \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m t\u001b[38;5;241m.\u001b[39mdim() \u001b[38;5;129;01min\u001b[39;00m (\u001b[38;5;241m4\u001b[39m, \u001b[38;5;241m5\u001b[39m):\n\u001b[1;32m 903\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m t\u001b[38;5;241m.\u001b[39mto(device, dtype \u001b[38;5;28;01mif\u001b[39;00m t\u001b[38;5;241m.\u001b[39mis_floating_point() \u001b[38;5;129;01mor\u001b[39;00m t\u001b[38;5;241m.\u001b[39mis_complex() \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 904\u001b[0m non_blocking, memory_format\u001b[38;5;241m=\u001b[39mconvert_to_format)\n\u001b[0;32m--> 905\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mt\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdtype\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mt\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mis_floating_point\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mt\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mis_complex\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01melse\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnon_blocking\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[0;31mRuntimeError\u001b[0m: CUDA error: device-side assert triggered\nCUDA kernel errors might be asynchronously reported at some other API call,so the stacktrace below might be incorrect.\nFor debugging consider passing CUDA_LAUNCH_BLOCKING=1." + ] + } + ], + "source": [ + "model = model.to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": 55, + "id": "0787ba8f", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 2792, but ``max_length`` is set to 100. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "ename": "IndexError", + "evalue": "index out of range in self", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mIndexError\u001b[0m Traceback (most recent call last)", + "Input \u001b[0;32mIn [55]\u001b[0m, in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 5\u001b[0m tr \u001b[38;5;241m=\u001b[39m tokenizer(tr, return_tensors\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mpt\u001b[39m\u001b[38;5;124m\"\u001b[39m,padding\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\u001b[38;5;241m.\u001b[39minput_ids\n\u001b[1;32m 6\u001b[0m \u001b[38;5;66;03m# tr = tr.to(device)\u001b[39;00m\n\u001b[0;32m----> 7\u001b[0m gen_tokens \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgenerate\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 8\u001b[0m \u001b[43m \u001b[49m\u001b[43mtr\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 9\u001b[0m \u001b[43m \u001b[49m\u001b[43mdo_sample\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 10\u001b[0m \u001b[43m \u001b[49m\u001b[43mtemperature\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m0.9\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 11\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_length\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m100\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\n\u001b[1;32m 12\u001b[0m \u001b[43m \u001b[49m\u001b[43mnum_return_sequences\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m10\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\n\u001b[1;32m 13\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 14\u001b[0m gen_text \u001b[38;5;241m=\u001b[39m tokenizer\u001b[38;5;241m.\u001b[39mbatch_decode(gen_tokens)\n\u001b[1;32m 15\u001b[0m results\u001b[38;5;241m.\u001b[39mappend((gen_text,i))\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/autograd/grad_mode.py:27\u001b[0m, in \u001b[0;36m_DecoratorContextManager.__call__..decorate_context\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 24\u001b[0m \u001b[38;5;129m@functools\u001b[39m\u001b[38;5;241m.\u001b[39mwraps(func)\n\u001b[1;32m 25\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mdecorate_context\u001b[39m(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m 26\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mclone():\n\u001b[0;32m---> 27\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/transformers/generation_utils.py:1281\u001b[0m, in \u001b[0;36mGenerationMixin.generate\u001b[0;34m(self, inputs, max_length, min_length, do_sample, early_stopping, num_beams, temperature, top_k, top_p, typical_p, repetition_penalty, bad_words_ids, force_words_ids, bos_token_id, pad_token_id, eos_token_id, length_penalty, no_repeat_ngram_size, encoder_no_repeat_ngram_size, num_return_sequences, max_time, max_new_tokens, decoder_start_token_id, use_cache, num_beam_groups, diversity_penalty, prefix_allowed_tokens_fn, logits_processor, stopping_criteria, constraints, output_attentions, output_hidden_states, output_scores, return_dict_in_generate, forced_bos_token_id, forced_eos_token_id, remove_invalid_values, synced_gpus, exponential_decay_length_penalty, **model_kwargs)\u001b[0m\n\u001b[1;32m 1273\u001b[0m input_ids, model_kwargs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_expand_inputs_for_generation(\n\u001b[1;32m 1274\u001b[0m input_ids,\n\u001b[1;32m 1275\u001b[0m expand_size\u001b[38;5;241m=\u001b[39mnum_return_sequences,\n\u001b[1;32m 1276\u001b[0m is_encoder_decoder\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconfig\u001b[38;5;241m.\u001b[39mis_encoder_decoder,\n\u001b[1;32m 1277\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mmodel_kwargs,\n\u001b[1;32m 1278\u001b[0m )\n\u001b[1;32m 1280\u001b[0m \u001b[38;5;66;03m# 12. run sample\u001b[39;00m\n\u001b[0;32m-> 1281\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msample\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1282\u001b[0m \u001b[43m \u001b[49m\u001b[43minput_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1283\u001b[0m \u001b[43m \u001b[49m\u001b[43mlogits_processor\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlogits_processor\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1284\u001b[0m \u001b[43m \u001b[49m\u001b[43mlogits_warper\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mlogits_warper\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1285\u001b[0m \u001b[43m \u001b[49m\u001b[43mstopping_criteria\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstopping_criteria\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1286\u001b[0m \u001b[43m \u001b[49m\u001b[43mpad_token_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpad_token_id\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1287\u001b[0m \u001b[43m \u001b[49m\u001b[43meos_token_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43meos_token_id\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1288\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_scores\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_scores\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1289\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_dict_in_generate\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_dict_in_generate\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1290\u001b[0m \u001b[43m \u001b[49m\u001b[43msynced_gpus\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43msynced_gpus\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1291\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mmodel_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1292\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1294\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m is_beam_gen_mode:\n\u001b[1;32m 1295\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m num_return_sequences \u001b[38;5;241m>\u001b[39m num_beams:\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/transformers/generation_utils.py:1892\u001b[0m, in \u001b[0;36mGenerationMixin.sample\u001b[0;34m(self, input_ids, logits_processor, stopping_criteria, logits_warper, max_length, pad_token_id, eos_token_id, output_attentions, output_hidden_states, output_scores, return_dict_in_generate, synced_gpus, **model_kwargs)\u001b[0m\n\u001b[1;32m 1889\u001b[0m model_inputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mprepare_inputs_for_generation(input_ids, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mmodel_kwargs)\n\u001b[1;32m 1891\u001b[0m \u001b[38;5;66;03m# forward pass to get next token\u001b[39;00m\n\u001b[0;32m-> 1892\u001b[0m outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1893\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mmodel_inputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1894\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_dict\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 1895\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_attentions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1896\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_hidden_states\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_hidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1897\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1899\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m synced_gpus \u001b[38;5;129;01mand\u001b[39;00m this_peer_finished:\n\u001b[1;32m 1900\u001b[0m cur_len \u001b[38;5;241m=\u001b[39m cur_len \u001b[38;5;241m+\u001b[39m \u001b[38;5;241m1\u001b[39m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/nn/modules/module.py:1110\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 1106\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1107\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1108\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1109\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1110\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1111\u001b[0m \u001b[38;5;66;03m# Do not call functions when jit is used\u001b[39;00m\n\u001b[1;32m 1112\u001b[0m full_backward_hooks, non_full_backward_hooks \u001b[38;5;241m=\u001b[39m [], []\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/transformers/models/gpt_neo/modeling_gpt_neo.py:742\u001b[0m, in \u001b[0;36mGPTNeoForCausalLM.forward\u001b[0;34m(self, input_ids, past_key_values, attention_mask, token_type_ids, position_ids, head_mask, inputs_embeds, labels, use_cache, output_attentions, output_hidden_states, return_dict)\u001b[0m\n\u001b[1;32m 734\u001b[0m \u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 735\u001b[0m \u001b[38;5;124;03mlabels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):\u001b[39;00m\n\u001b[1;32m 736\u001b[0m \u001b[38;5;124;03m Labels for language modeling. Note that the labels **are shifted** inside the model, i.e. you can set\u001b[39;00m\n\u001b[1;32m 737\u001b[0m \u001b[38;5;124;03m `labels = input_ids` Indices are selected in `[-100, 0, ..., config.vocab_size]` All labels set to `-100`\u001b[39;00m\n\u001b[1;32m 738\u001b[0m \u001b[38;5;124;03m are ignored (masked), the loss is only computed for labels in `[0, ..., config.vocab_size]`\u001b[39;00m\n\u001b[1;32m 739\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 740\u001b[0m return_dict \u001b[38;5;241m=\u001b[39m return_dict \u001b[38;5;28;01mif\u001b[39;00m return_dict \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconfig\u001b[38;5;241m.\u001b[39muse_return_dict\n\u001b[0;32m--> 742\u001b[0m transformer_outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtransformer\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 743\u001b[0m \u001b[43m \u001b[49m\u001b[43minput_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 744\u001b[0m \u001b[43m \u001b[49m\u001b[43mpast_key_values\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpast_key_values\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 745\u001b[0m \u001b[43m \u001b[49m\u001b[43mattention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mattention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 746\u001b[0m \u001b[43m \u001b[49m\u001b[43mtoken_type_ids\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtoken_type_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 747\u001b[0m \u001b[43m \u001b[49m\u001b[43mposition_ids\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mposition_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 748\u001b[0m \u001b[43m \u001b[49m\u001b[43mhead_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mhead_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 749\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs_embeds\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minputs_embeds\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 750\u001b[0m \u001b[43m \u001b[49m\u001b[43muse_cache\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43muse_cache\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 751\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_attentions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 752\u001b[0m \u001b[43m \u001b[49m\u001b[43moutput_hidden_states\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moutput_hidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 753\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_dict\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_dict\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 754\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 755\u001b[0m hidden_states \u001b[38;5;241m=\u001b[39m transformer_outputs[\u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m 757\u001b[0m lm_logits \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlm_head(hidden_states)\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/nn/modules/module.py:1110\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 1106\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1107\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1108\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1109\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1110\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1111\u001b[0m \u001b[38;5;66;03m# Do not call functions when jit is used\u001b[39;00m\n\u001b[1;32m 1112\u001b[0m full_backward_hooks, non_full_backward_hooks \u001b[38;5;241m=\u001b[39m [], []\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/transformers/models/gpt_neo/modeling_gpt_neo.py:580\u001b[0m, in \u001b[0;36mGPTNeoModel.forward\u001b[0;34m(self, input_ids, past_key_values, attention_mask, token_type_ids, position_ids, head_mask, inputs_embeds, use_cache, output_attentions, output_hidden_states, return_dict)\u001b[0m\n\u001b[1;32m 578\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m inputs_embeds \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 579\u001b[0m inputs_embeds \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mwte(input_ids)\n\u001b[0;32m--> 580\u001b[0m position_embeds \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mwpe\u001b[49m\u001b[43m(\u001b[49m\u001b[43mposition_ids\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 581\u001b[0m hidden_states \u001b[38;5;241m=\u001b[39m inputs_embeds \u001b[38;5;241m+\u001b[39m position_embeds\n\u001b[1;32m 583\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m token_type_ids \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/nn/modules/module.py:1110\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *input, **kwargs)\u001b[0m\n\u001b[1;32m 1106\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1107\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1108\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1109\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1110\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1111\u001b[0m \u001b[38;5;66;03m# Do not call functions when jit is used\u001b[39;00m\n\u001b[1;32m 1112\u001b[0m full_backward_hooks, non_full_backward_hooks \u001b[38;5;241m=\u001b[39m [], []\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/nn/modules/sparse.py:158\u001b[0m, in \u001b[0;36mEmbedding.forward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 157\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m: Tensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[0;32m--> 158\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43membedding\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 159\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mweight\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpadding_idx\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmax_norm\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 160\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnorm_type\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mscale_grad_by_freq\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msparse\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/nn/functional.py:2183\u001b[0m, in \u001b[0;36membedding\u001b[0;34m(input, weight, padding_idx, max_norm, norm_type, scale_grad_by_freq, sparse)\u001b[0m\n\u001b[1;32m 2177\u001b[0m \u001b[38;5;66;03m# Note [embedding_renorm set_grad_enabled]\u001b[39;00m\n\u001b[1;32m 2178\u001b[0m \u001b[38;5;66;03m# XXX: equivalent to\u001b[39;00m\n\u001b[1;32m 2179\u001b[0m \u001b[38;5;66;03m# with torch.no_grad():\u001b[39;00m\n\u001b[1;32m 2180\u001b[0m \u001b[38;5;66;03m# torch.embedding_renorm_\u001b[39;00m\n\u001b[1;32m 2181\u001b[0m \u001b[38;5;66;03m# remove once script supports set_grad_enabled\u001b[39;00m\n\u001b[1;32m 2182\u001b[0m _no_grad_embedding_renorm_(weight, \u001b[38;5;28minput\u001b[39m, max_norm, norm_type)\n\u001b[0;32m-> 2183\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43membedding\u001b[49m\u001b[43m(\u001b[49m\u001b[43mweight\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpadding_idx\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mscale_grad_by_freq\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43msparse\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[0;31mIndexError\u001b[0m: index out of range in self" + ] + } + ], + "source": [ + "for i in hard:\n", + "# try:\n", + " \n", + " tr = '<|startoftext|>' + ' ' + i[\"prompt\"].replace(\"GOAL\",\"\").replace(\"PROOFSTEP\",\"\") + ' '\n", + " tr = tokenizer(tr, return_tensors=\"pt\",padding=True).input_ids\n", + "# tr = tr.to(device)\n", + " gen_tokens = model.generate(\n", + " tr,\n", + " do_sample=True,\n", + " temperature=0.9,\n", + " max_length=100, \n", + " num_return_sequences=10, \n", + " )\n", + " gen_text = tokenizer.batch_decode(gen_tokens)\n", + " results.append((gen_text,i))\n", + "# except:\n", + "# hard2.append(i)" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "id": "af347e5a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "6369" + ] + }, + "execution_count": 39, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(hard)" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "id": "78371dc8", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[\"<|startoftext|> α : Type u,\\t_inst_1 : inhabited α,\\tb : buffer α,\\ti : ℕ,\\th : i < b.size\\t⊢ b.read ⟨i, h⟩ = b.read' i\\n \",\n", + " \"<|startoftext|> α : Type u,\\t_inst_1 : inhabited α,\\tb : buffer α,\\ti : ℕ,\\th : i < b.size\\t⊢ b.read ⟨i, h⟩ = b.read' i\\n \",\n", + " \"<|startoftext|> α : Type u,\\t_inst_1 : inhabited α,\\ti b_fst : ℕ,\\tb_snd : array b_fst α,\\th : i < buffer.size ⟨b_fst, b_snd⟩\\t⊢ buffer.read ⟨b_fst, b_snd⟩ ⟨i, h⟩ = buffer.read' ⟨b_fst, b_snd⟩ i\\n \",\n", + " \"<|startoftext|> α : Type u,\\t_inst_1 : inhabited α,\\ti b_fst : ℕ,\\tb_snd : array b_fst α,\\th : i < buffer.size ⟨b_fst, b_snd⟩\\t⊢ b_snd.read ⟨i, h⟩ = b_snd.read' i\\n \",\n", + " \"<|startoftext|> α : Type u,\\t_inst_1 : inhabited α,\\tb : buffer α,\\ti : ℕ,\\th : i < b.size\\t⊢ b.read ⟨i, h⟩ = b.read' i\\n \",\n", + " '<|startoftext|> α : Type u,\\tf : α → α,\\ta : α\\t⊢ stream.iterate f a = a::stream.iterate f (f a)\\n ',\n", + " '<|startoftext|> α : Type u,\\tf : α → α,\\ta : α\\t⊢ (stream.iterate f a).head::stream.iterate f (f a) = a::stream.iterate f (f a)\\n ',\n", + " '<|startoftext|> α : Type u,\\tf : α → α,\\ta : α\\t⊢ (stream.iterate f a).head::(stream.iterate f a).tail = a::stream.iterate f (f a)\\n ',\n", + " '<|startoftext|> α : Type u,\\ta : α,\\ts : stream α,\\t_x : a ∈ s.even,\\t_fun_match : a ∈ s.even → a ∈ s,\\tn : ℕ,\\th : (λ (b : α), a = b) (stream.nth n s.even)\\t⊢ a = stream.nth (2 * n) s\\n ',\n", + " '<|startoftext|> α : Type u,\\tn : ℕ,\\ta : α,\\tv_val : list α,\\tv_property : v_val.length = n\\t⊢ (vector.cons a ⟨v_val, v_property⟩).to_list = a :: vector.to_list ⟨v_val, v_property⟩\\n ',\n", + " '<|startoftext|> α : Type u,\\tn : ℕ,\\ta : α,\\tv : vector α n\\t⊢ (vector.cons a v).to_list = a :: v.to_list\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : linear_order α,\\ta b c : α,\\th₁ : a ≤ c,\\th₂ : b ≤ c\\t⊢ linear_order.max a b ≤ c\\n ',\n", + " '<|startoftext|> α : Type u,\\tb : bool,\\ta : α\\t⊢ cond b a a = a\\n ',\n", + " '<|startoftext|> α : Type u,\\ta : α\\t⊢ cond bool.ff a a = a\\n ',\n", + " '<|startoftext|> α : Type u,\\ta : α\\t⊢ cond bool.tt a a = a\\n ',\n", + " '<|startoftext|> α : Type u,\\tb : bool,\\ta : α\\t⊢ cond b a a = a\\n ',\n", + " '<|startoftext|> b : bool\\t⊢ bool.ff && b = bool.ff\\n ',\n", + " '<|startoftext|> ⊢ bool.ff && bool.ff = bool.ff\\n ',\n", + " '<|startoftext|> ⊢ bool.ff && bool.tt = bool.ff\\n ',\n", + " '<|startoftext|> b : bool\\t⊢ bool.ff && b = bool.ff\\n ',\n", + " '<|startoftext|> b : bool\\t⊢ (¬b = bool.tt) = (b = bool.ff)\\n ',\n", + " '<|startoftext|> ⊢ (¬bool.ff = bool.tt) = (bool.ff = bool.ff)\\n ',\n", + " '<|startoftext|> ⊢ (¬bool.tt = bool.tt) = (bool.tt = bool.ff)\\n ',\n", + " '<|startoftext|> b : bool\\t⊢ (¬b = bool.tt) = (b = bool.ff)\\n ',\n", + " '<|startoftext|> ⊢ bool.ff = bool.tt = false\\n ',\n", + " '<|startoftext|> c : char\\t⊢ decidable c.is_whitespace\\n ',\n", + " \"<|startoftext|> c : char\\t⊢ decidable (c ∈ [' ', '\\\\t', '\\\\n'])\\n \",\n", + " '<|startoftext|> ⊢ decidable_pred char.is_whitespace\\n ',\n", + " '<|startoftext|> c : char\\t⊢ decidable c.is_digit\\n ',\n", + " '<|startoftext|> c : char\\t⊢ decidable (c.val ≥ 48 ∧ c.val ≤ 57)\\n ',\n", + " '<|startoftext|> ⊢ decidable_pred char.is_digit\\n ',\n", + " '<|startoftext|> n : ℕ\\t⊢ is_valid_char n → (char.of_nat n).val = n\\n ',\n", + " '<|startoftext|> n : ℕ\\t⊢ is_valid_char n → (char.of_nat n).val = n\\n ',\n", + " '<|startoftext|> n : ℕ,\\th : is_valid_char n\\t⊢ (char.of_nat n).val = n\\n ',\n", + " \"<|startoftext|> n : ℕ,\\th : is_valid_char n\\t⊢ (dite (is_valid_char n) (λ (h : is_valid_char n), {val := n, valid := h}) (λ (h : ¬is_valid_char n), '\\\\x00')).val = n\\n \",\n", + " '<|startoftext|> n : ℕ\\t⊢ is_valid_char n → (char.of_nat n).val = n\\n ',\n", + " '<|startoftext|> add_left_neg : ∀ (a : ℤ), -a + a = 0,\\tm : ℕ\\t⊢ -int.of_nat m.succ + int.of_nat m.succ = 0\\n ',\n", + " '<|startoftext|> add_left_neg : ∀ (a : ℤ), -a + a = 0,\\tm : ℕ\\t⊢ - -[1+ m] + -[1+ m] = 0\\n ',\n", + " '<|startoftext|> m n k : ℕ\\t⊢ -[1+ m] * int.sub_nat_nat n k = int.sub_nat_nat (m.succ * k) (m.succ * n)\\n ',\n", + " '<|startoftext|> m n k : ℕ,\\th : n < k ∨ k ≤ n\\t⊢ -[1+ m] * int.sub_nat_nat n k = int.sub_nat_nat (m.succ * k) (m.succ * n)\\n ',\n", + " '<|startoftext|> m n k : ℕ,\\th : n < k\\t⊢ -[1+ m] * int.sub_nat_nat n k = int.sub_nat_nat (m.succ * k) (m.succ * n)\\n ',\n", + " \"<|startoftext|> m n k : ℕ,\\th : n < k\\t⊢ m.succ * n < m.succ * k\\t\\tm n k : ℕ,\\th : n < k,\\th' : m.succ * n < m.succ * k\\t⊢ -[1+ m] * int.sub_nat_nat n k = int.sub_nat_nat (m.succ * k) (m.succ * n)\\n \",\n", + " \"<|startoftext|> m n k : ℕ,\\th : n < k,\\th' : m.succ * n < m.succ * k\\t⊢ -[1+ m] * int.sub_nat_nat n k = int.sub_nat_nat (m.succ * k) (m.succ * n)\\n \",\n", + " \"<|startoftext|> m n k : ℕ,\\th : n < k,\\th' : m.succ * n < m.succ * k\\t⊢ -[1+ m] * -[1+ (k - n).pred] = int.of_nat (m.succ * k - m.succ * n)\\n \",\n", + " '<|startoftext|> m n k : ℕ,\\th : n < k\\t⊢ -[1+ m] * int.sub_nat_nat n k = int.sub_nat_nat (m.succ * k) (m.succ * n)\\t\\tcase or.inr\\tm n k : ℕ,\\th : k ≤ n\\t⊢ -[1+ m] * int.sub_nat_nat n k = int.sub_nat_nat (m.succ * k) (m.succ * n)\\n ',\n", + " '<|startoftext|> m n k : ℕ,\\th : k ≤ n\\t⊢ -[1+ m] * int.sub_nat_nat n k = int.sub_nat_nat (m.succ * k) (m.succ * n)\\n ',\n", + " \"<|startoftext|> m n k : ℕ,\\th : k ≤ n\\t⊢ n > k ∨ k = n\\t\\tm n k : ℕ,\\th : k ≤ n,\\th' : n > k ∨ k = n\\t⊢ -[1+ m] * int.sub_nat_nat n k = int.sub_nat_nat (m.succ * k) (m.succ * n)\\n \",\n", + " \"<|startoftext|> m n k : ℕ,\\th : k ≤ n,\\th' : n > k ∨ k = n\\t⊢ -[1+ m] * int.sub_nat_nat n k = int.sub_nat_nat (m.succ * k) (m.succ * n)\\n \",\n", + " \"<|startoftext|> m n k : ℕ,\\th : k ≤ n,\\th' : n > k\\t⊢ -[1+ m] * int.sub_nat_nat n k = int.sub_nat_nat (m.succ * k) (m.succ * n)\\n \",\n", + " \"<|startoftext|> m n k : ℕ,\\th : k ≤ n,\\th' : n > k\\t⊢ m.succ * n > m.succ * k\\t\\tm n k : ℕ,\\th : k ≤ n,\\th' : n > k,\\th₁ : m.succ * n > m.succ * k\\t⊢ -[1+ m] * int.sub_nat_nat n k = int.sub_nat_nat (m.succ * k) (m.succ * n)\\n \",\n", + " \"<|startoftext|> m n k : ℕ,\\th : k ≤ n,\\th' : n > k,\\th₁ : m.succ * n > m.succ * k\\t⊢ -[1+ m] * int.of_nat (n - k) = -[1+ (m.succ * n - m.succ * k).pred]\\n \",\n", + " \"<|startoftext|> m n k : ℕ,\\th : k ≤ n,\\th' : n > k,\\th₁ : m.succ * n > m.succ * k\\t⊢ -[1+ m] * int.sub_nat_nat n k = int.sub_nat_nat (m.succ * k) (m.succ * n)\\n \",\n", + " \"<|startoftext|> m n k : ℕ,\\th : k ≤ n,\\th' : n > k,\\th₁ : m.succ * n > m.succ * k\\t⊢ int.neg_of_nat (n * m.succ - k * m.succ) = -[1+ (n * m.succ - k * m.succ).pred]\\n \",\n", + " \"<|startoftext|> m n k : ℕ,\\th : k ≤ n,\\th' : n > k\\t⊢ -[1+ m] * int.sub_nat_nat n k = int.sub_nat_nat (m.succ * k) (m.succ * n)\\t\\tcase or.inr\\tm n k : ℕ,\\th : k ≤ n,\\th' : k = n\\t⊢ -[1+ m] * int.sub_nat_nat n k = int.sub_nat_nat (m.succ * k) (m.succ * n)\\n \",\n", + " \"<|startoftext|> m n k : ℕ,\\th : k ≤ n,\\th' : n > k,\\th₁ : m.succ * n > m.succ * k\\t⊢ int.neg_of_nat (m.succ * n - m.succ * k).pred.succ = -int.of_nat (m.succ * n - m.succ * k).pred.succ.pred.succ\\n \",\n", + " '<|startoftext|> m k : ℕ,\\th : k ≤ k\\t⊢ -[1+ m] * int.sub_nat_nat k k = int.sub_nat_nat (m.succ * k) (m.succ * k)\\n ',\n", + " '<|startoftext|> m k : ℕ,\\th : k ≤ k\\t⊢ -[1+ m] * 0 = 0\\n ',\n", + " \"<|startoftext|> m n k : ℕ,\\th : k ≤ n,\\th' : k = n\\t⊢ -[1+ m] * int.sub_nat_nat n k = int.sub_nat_nat (m.succ * k) (m.succ * n)\\n \",\n", + " '<|startoftext|> a b : ℤ,\\tn m : ℕ,\\tha : 0 ≤ a,\\thb : 0 ≤ b,\\te1 : a.nat_abs = n,\\te2 : b.nat_abs = m,\\th : n ≠ m\\t⊢ a.nat_abs ≠ b.nat_abs\\n ',\n", + " '<|startoftext|> a b : ℤ,\\th : a < -b\\t⊢ b < -a\\n ',\n", + " '<|startoftext|> a b : ℤ,\\th : a < -b,\\th : - -b < -a\\t⊢ b < -a\\n ',\n", + " '<|startoftext|> a b c : ℤ,\\th : a ≤ b + c\\t⊢ a - b ≤ c\\n ',\n", + " '<|startoftext|> a b c : ℤ,\\th : a ≤ b + c,\\th : a + -b ≤ b + c + -b\\t⊢ a - b ≤ c\\n ',\n", + " '<|startoftext|> a b c : ℤ,\\th : -c + a ≤ b\\t⊢ a ≤ b + c\\n ',\n", + " '<|startoftext|> a b c : ℤ,\\th : a + -c ≤ b\\t⊢ a ≤ b + c\\n ',\n", + " '<|startoftext|> a b c : ℤ,\\th : a - b < c\\t⊢ a < b + c\\n ',\n", + " '<|startoftext|> a b c : ℤ,\\th : a - b < c,\\th : a - b + b < c + b\\t⊢ a < b + c\\n ',\n", + " '<|startoftext|> a b : ℤ,\\tha : a < 0,\\thb : 0 < b,\\th : a * b < 0 * b\\t⊢ a * b < 0\\n ',\n", + " '<|startoftext|> α : Type u,\\tn : ℕ,\\tl : list α\\t⊢ (list.take n l).length ≤ n\\n ',\n", + " '<|startoftext|> α : Type u,\\ta : α,\\tn : ℕ\\t⊢ (list.repeat a n).length = n\\n ',\n", + " '<|startoftext|> α : Type u,\\ta : α,\\tn : ℕ\\t⊢ (list.repeat a n).length = n\\n ',\n", + " '<|startoftext|> α : Type u,\\ta : α\\t⊢ (list.repeat a 0).length = 0\\n ',\n", + " '<|startoftext|> α : Type u,\\ta : α,\\tn_n : ℕ,\\tn_ih : (list.repeat a n_n).length = n_n\\t⊢ (list.repeat a n_n.succ).length = n_n.succ\\n ',\n", + " '<|startoftext|> α : Type u,\\ta : α,\\tn : ℕ\\t⊢ (list.repeat a n).length = n\\n ',\n", + " '<|startoftext|> C : ℕ → Sort u,\\tz : C 0,\\tf : Π (b : bool) (n : ℕ), C n → C (nat.bit b n),\\tbinary_rec : Π (n : ℕ), C n,\\tn : ℕ,\\tn0 : n = 0\\t⊢ C n\\n ',\n", + " '<|startoftext|> C : ℕ → Sort u,\\tz : C 0,\\tf : Π (b : bool) (n : ℕ), C n → C (nat.bit b n),\\tbinary_rec : Π (n : ℕ), C n,\\tn : ℕ,\\tn0 : n = 0\\t⊢ C n\\n ',\n", + " '<|startoftext|> C : ℕ → Sort u,\\tz : C 0,\\tf : Π (b : bool) (n : ℕ), C n → C (nat.bit b n),\\tbinary_rec : Π (n : ℕ), C n,\\tn : ℕ,\\tn0 : n = 0\\t⊢ C 0\\n ',\n", + " \"<|startoftext|> C : ℕ → Sort u,\\tz : C 0,\\tf : Π (b : bool) (n : ℕ), C n → C (nat.bit b n),\\tbinary_rec : Π (n : ℕ), C n,\\tn : ℕ,\\tn0 : ¬n = 0,\\tn' : ℕ := n.div2\\t⊢ n.div2 < n\\n \",\n", + " \"<|startoftext|> C : ℕ → Sort u,\\tz : C 0,\\tf : Π (b : bool) (n : ℕ), C n → C (nat.bit b n),\\tbinary_rec : Π (n : ℕ), C n,\\tn : ℕ,\\tn0 : ¬n = 0,\\tn' : ℕ := n.div2\\t⊢ n' < n\\n \",\n", + " \"<|startoftext|> C : ℕ → Sort u,\\tz : C 0,\\tf : Π (b : bool) (n : ℕ), C n → C (nat.bit b n),\\tbinary_rec : Π (n : ℕ), C n,\\tn : ℕ,\\tn0 : ¬n = 0,\\tn' : ℕ := n.div2\\t⊢ n / 2 < n\\n \",\n", + " \"<|startoftext|> C : ℕ → Sort u,\\tz : C 0,\\tf : Π (b : bool) (n : ℕ), C n → C (nat.bit b n),\\tbinary_rec : Π (n : ℕ), C n,\\tn : ℕ,\\tn0 : ¬n = 0,\\tn' : ℕ := n.div2\\t⊢ n < n * 1.succ\\n \",\n", + " \"<|startoftext|> C : ℕ → Sort u,\\tz : C 0,\\tf : Π (b : bool) (n : ℕ), C n → C (nat.bit b n),\\tbinary_rec : Π (n : ℕ), C n,\\tn : ℕ,\\tn0 : ¬n = 0,\\tn' : ℕ := n.div2,\\tthis : n * 1 < n * 1.succ\\t⊢ n < n * 1.succ\\n \",\n", + " \"<|startoftext|> C : ℕ → Sort u,\\tz : C 0,\\tf : Π (b : bool) (n : ℕ), C n → C (nat.bit b n),\\tbinary_rec : Π (n : ℕ), C n,\\tn : ℕ,\\tn0 : ¬n = 0,\\tn' : ℕ := n.div2,\\tthis : n' < n\\t⊢ C n\\n \",\n", + " \"<|startoftext|> C : ℕ → Sort u,\\tz : C 0,\\tf : Π (b : bool) (n : ℕ), C n → C (nat.bit b n),\\tbinary_rec : Π (n : ℕ), C n,\\tn : ℕ,\\tn0 : ¬n = 0,\\tn' : ℕ := n.div2,\\tthis : n' < n\\t⊢ C (nat.bit n.bodd n')\\n \",\n", + " \"<|startoftext|> C : ℕ → Sort u,\\tz : C 0,\\tf : Π (b : bool) (n : ℕ), C n → C (nat.bit b n),\\tbinary_rec : Π (n : ℕ), C n,\\tn : ℕ,\\tn0 : ¬n = 0,\\tn' : ℕ := n.div2,\\tthis : n' < n\\t⊢ C n\\n \",\n", + " '<|startoftext|> f : bool → bool → bool,\\th : f bool.ff bool.ff = bool.ff,\\tm : ℕ\\t⊢ nat.bitwise f m 0 = cond (f bool.tt bool.ff) m 0\\n ',\n", + " '<|startoftext|> f : bool → bool → bool,\\th : f bool.ff bool.ff = bool.ff,\\tm : ℕ\\t⊢ nat.bitwise f m 0 = cond (f bool.tt bool.ff) m 0\\n ',\n", + " '<|startoftext|> f : bool → bool → bool,\\th : f bool.ff bool.ff = bool.ff,\\tm : ℕ\\t⊢ nat.bitwise f m 0 = cond (f bool.tt bool.ff) m 0\\n ',\n", + " '<|startoftext|> f : bool → bool → bool,\\th : f bool.ff bool.ff = bool.ff,\\tm : ℕ\\t⊢ nat.bitwise f m 0 = cond (f bool.tt bool.ff) m 0\\n ',\n", + " '<|startoftext|> f : bool → bool → bool,\\th : f bool.ff bool.ff = bool.ff,\\tm : ℕ\\t⊢ nat.binary_rec (λ (n : ℕ), cond (f bool.ff bool.tt) n 0) (λ (a : bool) (m : ℕ) (Ia : ℕ → ℕ), nat.binary_rec (cond (f bool.tt bool.ff) (nat.bit a m) 0) (λ (b : bool) (n _x : ℕ), nat.bit (f a b) (Ia n))) m 0 = cond (f bool.tt bool.ff) m 0\\n ',\n", + " '<|startoftext|> f : bool → bool → bool,\\th : f bool.ff bool.ff = bool.ff,\\tm : ℕ\\t⊢ ∀ (b : bool) (n : ℕ), nat.binary_rec (λ (n : ℕ), cond (f bool.ff bool.tt) n 0) (λ (a : bool) (m : ℕ) (Ia : ℕ → ℕ), nat.binary_rec (cond (f bool.tt bool.ff) (nat.bit a m) 0) (λ (b : bool) (n _x : ℕ), nat.bit (f a b) (Ia n))) (nat.bit b n) 0 = cond (f bool.tt bool.ff) (nat.bit b n) 0\\n ',\n", + " '<|startoftext|> f : bool → bool → bool,\\th : f bool.ff bool.ff = bool.ff,\\tm : ℕ\\t⊢ nat.bitwise f m 0 = cond (f bool.tt bool.ff) m 0\\n ',\n", + " '<|startoftext|> f : bool → bool → bool,\\th : f bool.ff bool.ff = bool.ff,\\tm : ℕ,\\tb : bool,\\tn : ℕ\\t⊢ nat.binary_rec (cond (f bool.tt bool.ff) (nat.bit bool.ff 0) 0) (λ (b : bool) (n _x : ℕ), nat.bit (f bool.ff b) (cond (f bool.ff bool.tt) n 0)) = λ (n : ℕ), cond (f bool.ff bool.tt) n 0\\n ',\n", + " '<|startoftext|> f : bool → bool → bool,\\th : f bool.ff bool.ff = bool.ff,\\tm : ℕ,\\tb : bool,\\tn : ℕ\\t⊢ nat.binary_rec (λ (n : ℕ), cond (f bool.ff bool.tt) n 0) (λ (a : bool) (m : ℕ) (Ia : ℕ → ℕ), nat.binary_rec (cond (f bool.tt bool.ff) (nat.bit a m) 0) (λ (b : bool) (n _x : ℕ), nat.bit (f a b) (Ia n))) (nat.bit b n) 0 = cond (f bool.tt bool.ff) (nat.bit b n) 0\\n ',\n", + " '<|startoftext|> n : ℕ\\t⊢ 1.gcd n = 1\\n ',\n", + " '<|startoftext|> add_assoc : ∀ (n m k : ℕ), n + m + k = n + (m + k),\\tn m k : ℕ\\t⊢ n + m + k.succ = n + (m + k.succ)\\n ',\n", + " '<|startoftext|> add_assoc : ∀ (n m k : ℕ), n + m + k = n + (m + k),\\tn m k : ℕ\\t⊢ n + m + k.succ = n + (m + k.succ)\\n ',\n", + " '<|startoftext|> add_assoc : ∀ (n m k : ℕ), n + m + k = n + (m + k),\\tn m k : ℕ\\t⊢ (n + (m + k)).succ = n + (m + k).succ\\n ',\n", + " '<|startoftext|> a : ℕ\\t⊢ a % 0 = a\\n ',\n", + " '<|startoftext|> a : ℕ\\t⊢ ite (0 < 0 ∧ 0 ≤ a) ((a - 0) % 0) a = a\\n ',\n", + " '<|startoftext|> a : ℕ\\t⊢ ¬(0 < 0 ∧ 0 ≤ a)\\t\\ta : ℕ,\\th : ¬(0 < 0 ∧ 0 ≤ a)\\t⊢ ite (0 < 0 ∧ 0 ≤ a) ((a - 0) % 0) a = a\\n ',\n", + " '<|startoftext|> a : ℕ,\\th : ¬(0 < 0 ∧ 0 ≤ a)\\t⊢ ite (0 < 0 ∧ 0 ≤ a) ((a - 0) % 0) a = a\\n ',\n", + " '<|startoftext|> b : ℕ\\t⊢ 0 % b = 0\\n ',\n", + " '<|startoftext|> b : ℕ\\t⊢ ite (0 < b ∧ b ≤ 0) ((0 - b) % b) 0 = 0\\n ',\n", + " '<|startoftext|> b : ℕ,\\thn : 0 < b ∧ b ≤ 0\\t⊢ false\\n ',\n", + " '<|startoftext|> b : ℕ,\\tl : 0 < b,\\tr : b ≤ 0\\t⊢ false\\n ',\n", + " '<|startoftext|> b : ℕ\\t⊢ ¬(0 < b ∧ b ≤ 0)\\n ',\n", + " '<|startoftext|> b : ℕ\\t⊢ ¬(0 < b ∧ b ≤ 0)\\t\\tb : ℕ,\\th : ¬(0 < b ∧ b ≤ 0)\\t⊢ ite (0 < b ∧ b ≤ 0) ((0 - b) % b) 0 = 0\\n ',\n", + " '<|startoftext|> b : ℕ,\\th : ¬(0 < b ∧ b ≤ 0)\\t⊢ ite (0 < b ∧ b ≤ 0) ((0 - b) % b) 0 = 0\\n ',\n", + " '<|startoftext|> smt_tactic.monad : monad smt_tactic\\t⊢ monad smt_tactic\\n ',\n", + " '<|startoftext|> R : Type u,\\tA₁ : Type v,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A₁,\\t_inst_5 : algebra R A₁,\\tx : A₁ ≃ₐ[R] A₁,\\ta : A₁\\t⊢ ⇑(⇑(alg_equiv.refl.aut_congr) x) a = ⇑(⇑(mul_equiv.refl (A₁ ≃ₐ[R] A₁)) x) a\\n ',\n", + " '<|startoftext|> R : Type u,\\tA₁ : Type v,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A₁,\\t_inst_5 : algebra R A₁\\t⊢ alg_equiv.refl.aut_congr = mul_equiv.refl (A₁ ≃ₐ[R] A₁)\\n ',\n", + " \"<|startoftext|> I : Type u,\\tR : Type u_1,\\tf : I → Type v,\\tr : comm_semiring R,\\ts : Π (i : I), semiring (f i),\\t_inst_1 : Π (i : I), algebra R (f i),\\ta : R,\\tf : Π (i : I), f i\\t⊢ {to_fun := (pi.ring_hom (λ (i : I), algebra_map R (f i))).to_fun, map_one' := _, map_mul' := _, map_zero' := _, map_add' := _}.to_fun a * f = f * {to_fun := (pi.ring_hom (λ (i : I), algebra_map R (f i))).to_fun, map_one' := _, map_mul' := _, map_zero' := _, map_add' := _}.to_fun a\\n \",\n", + " \"<|startoftext|> I : Type u,\\tR : Type u_1,\\tf : I → Type v,\\tr : comm_semiring R,\\ts : Π (i : I), semiring (f i),\\t_inst_1 : Π (i : I), algebra R (f i),\\ta : R,\\tf : Π (i : I), f i,\\tx : I\\t⊢ ({to_fun := (pi.ring_hom (λ (i : I), algebra_map R (f i))).to_fun, map_one' := _, map_mul' := _, map_zero' := _, map_add' := _}.to_fun a * f) x = (f * {to_fun := (pi.ring_hom (λ (i : I), algebra_map R (f i))).to_fun, map_one' := _, map_mul' := _, map_zero' := _, map_add' := _}.to_fun a) x\\n \",\n", + " \"<|startoftext|> I : Type u,\\tR : Type u_1,\\tf : I → Type v,\\tr : comm_semiring R,\\ts : Π (i : I), semiring (f i),\\t_inst_1 : Π (i : I), algebra R (f i),\\ta : R,\\tf : Π (i : I), f i\\t⊢ a • f = {to_fun := (pi.ring_hom (λ (i : I), algebra_map R (f i))).to_fun, map_one' := _, map_mul' := _, map_zero' := _, map_add' := _}.to_fun a * f\\n \",\n", + " \"<|startoftext|> I : Type u,\\tR : Type u_1,\\tf : I → Type v,\\tr : comm_semiring R,\\ts : Π (i : I), semiring (f i),\\t_inst_1 : Π (i : I), algebra R (f i),\\ta : R,\\tf : Π (i : I), f i,\\tx : I\\t⊢ (a • f) x = ({to_fun := (pi.ring_hom (λ (i : I), algebra_map R (f i))).to_fun, map_one' := _, map_mul' := _, map_zero' := _, map_add' := _}.to_fun a * f) x\\n \",\n", + " '<|startoftext|> R : Type u,\\tA : Type v,\\tB : Type w,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A,\\t_inst_3 : semiring B,\\t_inst_6 : algebra R A,\\t_inst_7 : algebra R B,\\tf : A →ₗ[R] B,\\tmap_one : ⇑f 1 = 1,\\tmap_mul : ∀ (x y : A), ⇑f (x * y) = ⇑f x * ⇑f y,\\tx : A\\t⊢ ⇑((alg_hom.of_linear_map f map_one map_mul).to_linear_map) x = ⇑f x\\n ',\n", + " '<|startoftext|> R : Type u,\\tA : Type v,\\tB : Type w,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A,\\t_inst_3 : semiring B,\\t_inst_6 : algebra R A,\\t_inst_7 : algebra R B,\\tf : A →ₗ[R] B,\\tmap_one : ⇑f 1 = 1,\\tmap_mul : ∀ (x y : A), ⇑f (x * y) = ⇑f x * ⇑f y\\t⊢ (alg_hom.of_linear_map f map_one map_mul).to_linear_map = f\\n ',\n", + " '<|startoftext|> R : Type u,\\tA₁ : Type v,\\tA₂ : Type w,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A₁,\\t_inst_3 : semiring A₂,\\t_inst_5 : algebra R A₁,\\t_inst_6 : algebra R A₂,\\te : A₁ ≃ₐ[R] A₂,\\tx : A₁\\t⊢ ⇑(↑(e.symm).comp ↑e) x = ⇑(alg_hom.id R A₁) x\\n ',\n", + " '<|startoftext|> R : Type u,\\tA₁ : Type v,\\tA₂ : Type w,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A₁,\\t_inst_3 : semiring A₂,\\t_inst_5 : algebra R A₁,\\t_inst_6 : algebra R A₂,\\te : A₁ ≃ₐ[R] A₂\\t⊢ ↑(e.symm).comp ↑e = alg_hom.id R A₁\\n ',\n", + " '<|startoftext|> R : Type u_1,\\tA : Type u_2,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A,\\t_inst_3 : algebra R A,\\ta b x : A\\t⊢ ⇑(algebra.lmul_left R (a * b)) x = ⇑((algebra.lmul_left R a).comp (algebra.lmul_left R b)) x\\n ',\n", + " '<|startoftext|> R : Type u_1,\\tA : Type u_2,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A,\\t_inst_3 : algebra R A,\\ta b : A\\t⊢ algebra.lmul_left R (a * b) = (algebra.lmul_left R a).comp (algebra.lmul_left R b)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\ta : A,\\tM : submodule R A\\t⊢ {a}.up • M = ?m_1\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\ta : A,\\tM : submodule R A\\t⊢ {a}.up • M = submodule.map (algebra.lmul_left R a) M\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\ta : A,\\tM : submodule R A\\t⊢ {a}.up • submodule.span R ↑M = submodule.map (algebra.lmul_left R a) M\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\ta : A,\\tM : submodule R A\\t⊢ submodule.span R {a}.up * submodule.span R ↑M = submodule.map (algebra.lmul_left R a) M\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\ta : A,\\tM : submodule R A\\t⊢ submodule.span R ({a}.up * ↑M) = submodule.map (algebra.lmul_left R a) M\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\ta : A,\\tM : submodule R A\\t⊢ submodule.span R ({a}.up * ↑M) ≤ submodule.map (algebra.lmul_left R a) M\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\ta : A,\\tM : submodule R A\\t⊢ {a}.up * ↑M ⊆ ↑(submodule.map (algebra.lmul_left R a) M)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\ta : A,\\tM : submodule R A,\\tb m : A,\\thb : b ∈ {a}.up,\\thm : m ∈ ↑M\\t⊢ b * m ∈ ↑(submodule.map (algebra.lmul_left R a) M)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\ta : A,\\tM : submodule R A\\t⊢ submodule.span R ({a}.up * ↑M) ≤ submodule.map (algebra.lmul_left R a) M\\t\\tR : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\ta : A,\\tM : submodule R A\\t⊢ submodule.map (algebra.lmul_left R a) M ≤ submodule.span R ({a}.up * ↑M)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\ta : A,\\tM : submodule R A,\\tb m : A,\\thb : b ∈ {a}.up,\\thm : m ∈ ↑M\\t⊢ ∃ (y : A), y ∈ M ∧ ⇑(algebra.lmul_left R a) y = a * m\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\ta : A,\\tM : submodule R A,\\tm : A,\\thm : m ∈ ↑M\\t⊢ ⇑(algebra.lmul_left R a) m ∈ submodule.span R ({a}.up * ↑M)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\ta : A,\\tM : submodule R A\\t⊢ submodule.map (algebra.lmul_left R a) M ≤ submodule.span R ({a}.up * ↑M)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\tI : submodule R A,\\thI : I ≤ 1\\t⊢ I ≤ I * (1 / I)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\tI : submodule R A,\\thI : I ≤ 1\\t⊢ I * 1 ≤ I * (1 / I)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\tB : Type u_1,\\t_inst_4 : comm_ring B,\\t_inst_5 : algebra R B,\\tI J : submodule R A,\\th : A ≃ₐ[R] B\\t⊢ submodule.map h.to_linear_map (I / J) = submodule.map h.to_linear_map I / submodule.map h.to_linear_map J\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\tB : Type u_1,\\t_inst_4 : comm_ring B,\\t_inst_5 : algebra R B,\\tI J : submodule R A,\\th : A ≃ₐ[R] B,\\tx : B\\t⊢ x ∈ submodule.map h.to_linear_map (I / J) ↔ x ∈ submodule.map h.to_linear_map I / submodule.map h.to_linear_map J\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\tB : Type u_1,\\t_inst_4 : comm_ring B,\\t_inst_5 : algebra R B,\\tI J : submodule R A,\\th : A ≃ₐ[R] B,\\tx : B\\t⊢ (∃ (y : A), (∀ (y_1 : A), y_1 ∈ J → y * y_1 ∈ I) ∧ ⇑(h.to_linear_map) y = x) ↔ ∀ (y : B), (∃ (y_1 : A), y_1 ∈ J ∧ ⇑(h.to_linear_map) y_1 = y) → (∃ (y_1 : A), y_1 ∈ I ∧ ⇑(h.to_linear_map) y_1 = x * y)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\tB : Type u_1,\\t_inst_4 : comm_ring B,\\t_inst_5 : algebra R B,\\tI J : submodule R A,\\th : A ≃ₐ[R] B,\\tx : B\\t⊢ (∃ (y : A), (∀ (y_1 : A), y_1 ∈ J → y * y_1 ∈ I) ∧ ⇑(h.to_linear_map) y = x) → ∀ (y : B), (∃ (y_1 : A), y_1 ∈ J ∧ ⇑(h.to_linear_map) y_1 = y) → (∃ (y_1 : A), y_1 ∈ I ∧ ⇑(h.to_linear_map) y_1 = x * y)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\tB : Type u_1,\\t_inst_4 : comm_ring B,\\t_inst_5 : algebra R B,\\tI J : submodule R A,\\th : A ≃ₐ[R] B,\\tx : B\\t⊢ (∃ (y : A), (∀ (y_1 : A), y_1 ∈ J → y * y_1 ∈ I) ∧ ⇑(h.to_linear_map) y = x) → ∀ (y : B), (∃ (y_1 : A), y_1 ∈ J ∧ ⇑(h.to_linear_map) y_1 = y) → (∃ (y_1 : A), y_1 ∈ I ∧ ⇑(h.to_linear_map) y_1 = x * y)\\t\\tR : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\tB : Type u_1,\\t_inst_4 : comm_ring B,\\t_inst_5 : algebra R B,\\tI J : submodule R A,\\th : A ≃ₐ[R] B,\\tx : B\\t⊢ (∀ (y : B), (∃ (y_1 : A), y_1 ∈ J ∧ ⇑(h.to_linear_map) y_1 = y) → (∃ (y_1 : A), y_1 ∈ I ∧ ⇑(h.to_linear_map) y_1 = x * y)) → (∃ (y : A), (∀ (y_1 : A), y_1 ∈ J → y * y_1 ∈ I) ∧ ⇑(h.to_linear_map) y = x)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\tB : Type u_1,\\t_inst_4 : comm_ring B,\\t_inst_5 : algebra R B,\\tI J : submodule R A,\\th : A ≃ₐ[R] B,\\tx : A,\\thx : ∀ (y : A), y ∈ J → x * y ∈ I,\\ty : A,\\thy : y ∈ J\\t⊢ ∃ (y_1 : A), y_1 ∈ I ∧ ⇑(h.to_linear_map) y_1 = ⇑(h.to_linear_map) x * ⇑(h.to_linear_map) y\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\tB : Type u_1,\\t_inst_4 : comm_ring B,\\t_inst_5 : algebra R B,\\tI J : submodule R A,\\th : A ≃ₐ[R] B,\\tx : B\\t⊢ (∀ (y : B), (∃ (y_1 : A), y_1 ∈ J ∧ ⇑(h.to_linear_map) y_1 = y) → (∃ (y_1 : A), y_1 ∈ I ∧ ⇑(h.to_linear_map) y_1 = x * y)) → (∃ (y : A), (∀ (y_1 : A), y_1 ∈ J → y * y_1 ∈ I) ∧ ⇑(h.to_linear_map) y = x)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\tB : Type u_1,\\t_inst_4 : comm_ring B,\\t_inst_5 : algebra R B,\\tI J : submodule R A,\\th : A ≃ₐ[R] B,\\tx : B,\\thx : ∀ (y : B), (∃ (y_1 : A), y_1 ∈ J ∧ ⇑(h.to_linear_map) y_1 = y) → (∃ (y_1 : A), y_1 ∈ I ∧ ⇑(h.to_linear_map) y_1 = x * y)\\t⊢ ∃ (y : A), (∀ (y_1 : A), y_1 ∈ J → y * y_1 ∈ I) ∧ ⇑(h.to_linear_map) y = x\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\tB : Type u_1,\\t_inst_4 : comm_ring B,\\t_inst_5 : algebra R B,\\tI J : submodule R A,\\th : A ≃ₐ[R] B,\\tx : B,\\thx : ∀ (y : B), (∃ (y_1 : A), y_1 ∈ J ∧ ⇑(h.to_linear_map) y_1 = y) → (∃ (y_1 : A), y_1 ∈ I ∧ ⇑(h.to_linear_map) y_1 = x * y),\\tz : A,\\thz : z ∈ J\\t⊢ ⇑(h.symm) x * z ∈ I\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\tB : Type u_1,\\t_inst_4 : comm_ring B,\\t_inst_5 : algebra R B,\\tI J : submodule R A,\\th : A ≃ₐ[R] B,\\tx : B,\\thx : ∀ (y : B), (∃ (y_1 : A), y_1 ∈ J ∧ ⇑(h.to_linear_map) y_1 = y) → (∃ (y_1 : A), y_1 ∈ I ∧ ⇑(h.to_linear_map) y_1 = x * y),\\tz : A,\\thz : z ∈ J,\\txz : A,\\txz_mem : xz ∈ I,\\thxz : ⇑(h.to_linear_map) xz = x * ⇑h z\\t⊢ ⇑(h.symm) x * z ∈ I\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\tB : Type u_1,\\t_inst_4 : comm_ring B,\\t_inst_5 : algebra R B,\\tI J : submodule R A,\\th : A ≃ₐ[R] B,\\tx : B,\\thx : ∀ (y : B), (∃ (y_1 : A), y_1 ∈ J ∧ ⇑(h.to_linear_map) y_1 = y) → (∃ (y_1 : A), y_1 ∈ I ∧ ⇑(h.to_linear_map) y_1 = x * y),\\tz : A,\\thz : z ∈ J,\\txz : A,\\txz_mem : xz ∈ I,\\thxz : ⇑(h.to_linear_map) xz = x * ⇑h z\\t⊢ ⇑(h.symm) x * z = xz\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_semiring R,\\tA : Type v,\\t_inst_2 : comm_semiring A,\\t_inst_3 : algebra R A,\\tB : Type u_1,\\t_inst_4 : comm_ring B,\\t_inst_5 : algebra R B,\\tI J : submodule R A,\\th : A ≃ₐ[R] B,\\tx : B,\\thx : ∀ (y : B), (∃ (y_1 : A), y_1 ∈ J ∧ ⇑(h.to_linear_map) y_1 = y) → (∃ (y_1 : A), y_1 ∈ I ∧ ⇑(h.to_linear_map) y_1 = x * y),\\tz : A,\\thz : z ∈ J,\\txz : A,\\txz_mem : xz ∈ I,\\thxz : ⇑(h.to_linear_map) xz = x * ⇑h z\\t⊢ ⇑h (⇑(h.symm) x * z) = ⇑h xz\\n ',\n", + " '<|startoftext|> R : Type u_1,\\tS : Type u_2,\\tM : Type u_3,\\t_inst_1 : semiring S,\\t_inst_2 : add_comm_monoid M,\\t_inst_3 : comm_semiring R,\\t_inst_4 : algebra R S,\\t_inst_5 : module S M,\\tr : R,\\tS : S,\\tM : restrict_scalars R S M\\t⊢ (r • S) • M = r • S • M\\n ',\n", + " '<|startoftext|> R : Type u_1,\\tS : Type u_2,\\tM : Type u_3,\\t_inst_1 : semiring S,\\t_inst_2 : add_comm_monoid M,\\t_inst_3 : comm_semiring R,\\t_inst_4 : algebra R S,\\t_inst_5 : module S M,\\tr : R,\\tS : S,\\tM : restrict_scalars R S M\\t⊢ ⇑(algebra_map R S) r • S • M = r • S • M\\n ',\n", + " '<|startoftext|> R : Type u,\\tA : Type v,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A,\\t_inst_3 : algebra R A,\\tS : subalgebra R A\\t⊢ S.to_submodule * S.to_submodule = S.to_submodule\\n ',\n", + " '<|startoftext|> R : Type u,\\tA : Type v,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A,\\t_inst_3 : algebra R A,\\tS : subalgebra R A\\t⊢ S.to_submodule * S.to_submodule ≤ S.to_submodule\\n ',\n", + " '<|startoftext|> R : Type u,\\tA : Type v,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A,\\t_inst_3 : algebra R A,\\tS : subalgebra R A\\t⊢ ∀ (m : A), m ∈ S.to_submodule → ∀ (n : A), n ∈ S.to_submodule → m * n ∈ S.to_submodule\\n ',\n", + " '<|startoftext|> R : Type u,\\tA : Type v,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A,\\t_inst_3 : algebra R A,\\tS : subalgebra R A\\t⊢ S.to_submodule * S.to_submodule ≤ S.to_submodule\\t\\tR : Type u,\\tA : Type v,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A,\\t_inst_3 : algebra R A,\\tS : subalgebra R A\\t⊢ S.to_submodule ≤ S.to_submodule * S.to_submodule\\n ',\n", + " '<|startoftext|> R : Type u,\\tA : Type v,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A,\\t_inst_3 : algebra R A,\\tS : subalgebra R A,\\ty : A,\\thy : y ∈ S.to_submodule,\\tz : A,\\thz : z ∈ S.to_submodule\\t⊢ y * z ∈ S.to_submodule\\n ',\n", + " '<|startoftext|> R : Type u,\\tA : Type v,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A,\\t_inst_3 : algebra R A,\\tS : subalgebra R A\\t⊢ S.to_submodule ≤ S.to_submodule * S.to_submodule\\n ',\n", + " '<|startoftext|> R : Type u,\\tA : Type v,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A,\\t_inst_3 : algebra R A,\\tS : subalgebra R A,\\tx : A,\\thx1 : x ∈ S.to_submodule\\t⊢ x ∈ S.to_submodule * S.to_submodule\\n ',\n", + " '<|startoftext|> R : Type u,\\tA : Type v,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A,\\t_inst_3 : algebra R A,\\tS : subalgebra R A,\\tx : A,\\thx1 : x ∈ S.to_submodule\\t⊢ x * 1 ∈ S.to_submodule * S.to_submodule\\n ',\n", + " '<|startoftext|> R : Type u,\\tA : Type v,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring A,\\t_inst_3 : algebra R A\\t⊢ ↑⊥ = set.range ⇑(algebra_map R A)\\n ',\n", + " '<|startoftext|> R : Type u,\\tS : Type v,\\tA : Type w,\\t_inst_1 : comm_semiring R,\\t_inst_2 : comm_semiring S,\\t_inst_3 : semiring A,\\t_inst_4 : algebra R S,\\t_inst_5 : algebra S A,\\t_inst_6 : algebra R A,\\t_inst_7 : is_scalar_tower R S A,\\tU V : subalgebra S A,\\tH : subalgebra.restrict_scalars R U = subalgebra.restrict_scalars R V,\\tx : A\\t⊢ x ∈ U ↔ x ∈ V\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : linear_ordered_field α,\\t_inst_2 : archimedean α,\\tx y : α,\\txpos : 0 < x,\\thx : x ≤ 1,\\typos : 0 < y,\\thy : y < 1\\t⊢ ∃ (n : ℕ), y ^ (n + 1) < x ∧ x ≤ y ^ n\\n ',\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : linear_ordered_field α,\\t_inst_2 : archimedean α,\\tx y : α,\\txpos : 0 < x,\\thx : x ≤ 1,\\typos : 0 < y,\\thy : y < 1,\\tn : ℕ,\\thn : y⁻¹ ^ n ≤ x⁻¹,\\th'n : x⁻¹ < y⁻¹ ^ (n + 1)\\t⊢ ∃ (n : ℕ), y ^ (n + 1) < x ∧ x ≤ y ^ n\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : linear_ordered_field α,\\t_inst_2 : archimedean α,\\tx y : α,\\txpos : 0 < x,\\thx : x ≤ 1,\\typos : 0 < y,\\thy : y < 1,\\tn : ℕ,\\thn : y⁻¹ ^ n ≤ x⁻¹,\\th'n : x⁻¹ < y⁻¹ ^ (n + 1)\\t⊢ y ^ (n + 1) < x\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : linear_ordered_field α,\\t_inst_2 : archimedean α,\\tx y : α,\\txpos : 0 < x,\\thx : x ≤ 1,\\typos : 0 < y,\\thy : y < 1,\\tn : ℕ,\\thn : y⁻¹ ^ n ≤ x⁻¹,\\th'n : x⁻¹ < y⁻¹ ^ (n + 1)\\t⊢ y ^ (n + 1) < x\\t\\tα : Type u_1,\\t_inst_1 : linear_ordered_field α,\\t_inst_2 : archimedean α,\\tx y : α,\\txpos : 0 < x,\\thx : x ≤ 1,\\typos : 0 < y,\\thy : y < 1,\\tn : ℕ,\\thn : y⁻¹ ^ n ≤ x⁻¹,\\th'n : x⁻¹ < y⁻¹ ^ (n + 1)\\t⊢ x ≤ y ^ n\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : linear_ordered_field α,\\t_inst_2 : archimedean α,\\tx y : α,\\txpos : 0 < x,\\thx : x ≤ 1,\\typos : 0 < y,\\thy : y < 1,\\tn : ℕ,\\thn : y⁻¹ ^ n ≤ x⁻¹,\\th'n : x⁻¹ < y⁻¹ ^ (n + 1)\\t⊢ x ≤ y ^ n\\n \",\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : linear_ordered_field α,\\tH : ∀ (x : α), ∃ (n : ℕ), x < ↑n,\\tx y : α,\\ty0 : 0 < y,\\tn : ℕ,\\th : x / y < ↑n\\t⊢ x < n • y\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : comm_monoid_with_zero α,\\tp : α,\\thp : prime p,\\ts : multiset β,\\tf : β → α,\\th : p ∣ (multiset.map f s).prod\\t⊢ ∃ (a : β) (H : a ∈ s), p ∣ f a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : monoid α,\\tx : α,\\th : ¬is_unit x\\t⊢ irreducible x ∨ ∃ (a b : α), ¬is_unit a ∧ ¬is_unit b ∧ a * b = x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : monoid α,\\tx : α,\\th : ¬is_unit x,\\t_inst : Π (p : Prop), decidable p\\t⊢ irreducible x ∨ ∃ (a b : α), ¬is_unit a ∧ ¬is_unit b ∧ a * b = x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : monoid α,\\tx : α,\\th : ¬is_unit x,\\t_inst : Π (p : Prop), decidable p,\\tH : ¬∃ (a b : α), ¬is_unit a ∧ ¬is_unit b ∧ a * b = x\\t⊢ irreducible x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : monoid α,\\tx : α,\\th : ¬is_unit x,\\t_inst : Π (p : Prop), decidable p,\\tH : ∀ (x_1 : α), ¬is_unit x_1 → ∀ (x_2 : α), ¬is_unit x_2 → ¬x_1 * x_2 = x\\t⊢ ∀ (a b : α), x = a * b → is_unit a ∨ is_unit b\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : monoid α,\\tx : α,\\th : ¬is_unit x,\\t_inst : Π (p : Prop), decidable p,\\tH : ∀ (x_1 : α), ¬is_unit x_1 → ∀ (x_2 : α), ¬is_unit x_2 → ¬x_1 * x_2 = x,\\ta b : α,\\th : x = a * b,\\to : ¬(is_unit a ∨ is_unit b)\\t⊢ false\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : monoid α,\\tx : α,\\th : ¬is_unit x,\\t_inst : Π (p : Prop), decidable p,\\tH : ∀ (x_1 : α), ¬is_unit x_1 → ∀ (x_2 : α), ¬is_unit x_2 → ¬x_1 * x_2 = x,\\ta b : α,\\th : x = a * b,\\to : ¬is_unit a ∧ ¬is_unit b\\t⊢ false\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tM : Type u_4,\\tN : Type u_5,\\t_inst_1 : comm_monoid M,\\t_inst_2 : comm_monoid N,\\ts : set α,\\tf : α → M,\\tg : M →* N,\\th₀ : (s ∩ function.mul_support f).finite\\t⊢ ⇑g (∏ᶠ (j : α) (H : j ∈ s), f j) = ∏ᶠ (i : α) (H : i ∈ s), ⇑g (f i)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tM : Type u_4,\\tN : Type u_5,\\t_inst_1 : comm_monoid M,\\t_inst_2 : comm_monoid N,\\ts : set α,\\tf : α → M,\\tg : M →* N,\\th₀ : (s ∩ function.mul_support f).finite\\t⊢ ∏ᶠ (i : α), ⇑g (∏ᶠ (H : i ∈ s), f i) = ∏ᶠ (i : α) (H : i ∈ s), ⇑g (f i)\\t\\tα : Type u_1,\\tM : Type u_4,\\tN : Type u_5,\\t_inst_1 : comm_monoid M,\\t_inst_2 : comm_monoid N,\\ts : set α,\\tf : α → M,\\tg : M →* N,\\th₀ : (s ∩ function.mul_support f).finite\\t⊢ (function.mul_support (λ (i : α), ∏ᶠ (H : i ∈ s), f i)).finite\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tM : Type u_4,\\tN : Type u_5,\\t_inst_1 : comm_monoid M,\\t_inst_2 : comm_monoid N,\\ts : set α,\\tf : α → M,\\tg : M →* N,\\th₀ : (s ∩ function.mul_support f).finite\\t⊢ ∏ᶠ (i : α), ⇑g (∏ᶠ (H : i ∈ s), f i) = ∏ᶠ (i : α) (H : i ∈ s), ⇑g (f i)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tM : Type u_4,\\tN : Type u_5,\\t_inst_1 : comm_monoid M,\\t_inst_2 : comm_monoid N,\\ts : set α,\\tf : α → M,\\tg : M →* N,\\th₀ : (s ∩ function.mul_support f).finite\\t⊢ (function.mul_support (λ (i : α), ∏ᶠ (H : i ∈ s), f i)).finite\\n ',\n", + " '<|startoftext|> δ : Type u_1,\\t_inst_2 : add_comm_monoid δ,\\ta b : ℕ,\\thab : a < b,\\tf : ℕ → δ\\t⊢ a ∉ finset.Ico (a + 1) b\\n ',\n", + " '<|startoftext|> δ : Type u_1,\\t_inst_2 : add_comm_monoid δ,\\ta b : ℕ,\\thab : a < b,\\tf : ℕ → δ,\\tha : a ∉ finset.Ico (a + 1) b\\t⊢ ∑ (k : ℕ) in finset.Ico a b, f k = f a + ∑ (k : ℕ) in finset.Ico (a + 1) b, f k\\n ',\n", + " '<|startoftext|> J : Type u,\\t𝒥 : category_theory.small_category J\\t⊢ category_theory.limits.has_limits_of_shape J CommSemiRing\\n ',\n", + " '<|startoftext|> J : Type ?,\\t𝒥 : category_theory.small_category J,\\tF : J ⥤ CommSemiRing\\t⊢ category_theory.limits.preserves_limit F (category_theory.forget₂ CommSemiRing SemiRing)\\n ',\n", + " '<|startoftext|> G H : Group,\\tf₁ f₂ : G ⟶ H,\\tw : ∀ (x : ↥G), ⇑f₁ x = ⇑f₂ x,\\tx : ↥G\\t⊢ ⇑f₁ x = ⇑f₂ x\\n ',\n", + " '<|startoftext|> G H : Group,\\tf₁ f₂ : G ⟶ H,\\tw : ∀ (x : ↥G), ⇑f₁ x = ⇑f₂ x\\t⊢ f₁ = f₂\\n ',\n", + " '<|startoftext|> J : Type ?,\\t_x : decidable_eq J,\\t_x : fintype J\\t⊢ category_theory.limits.has_biproducts_of_shape J AddCommGroup\\n ',\n", + " '<|startoftext|> J : Type ?,\\t_x : decidable_eq J,\\t_x : fintype J,\\tf : J → AddCommGroup\\t⊢ category_theory.limits.has_biproduct f\\n ',\n", + " '<|startoftext|> G H : AddCommGroup,\\tf : G ⟶ H\\t⊢ (AddCommGroup.kernel_iso_ker f).hom ≫ (add_monoid_hom.ker f).subtype = category_theory.limits.kernel.ι f\\n ',\n", + " '<|startoftext|> G H : AddCommGroup,\\tf : G ⟶ H\\t⊢ (AddCommGroup.kernel_iso_ker f).hom ≫ (add_monoid_hom.ker f).subtype = category_theory.limits.kernel.ι f\\n ',\n", + " '<|startoftext|> G H : AddCommGroup,\\tf : G ⟶ H,\\tx : ↥(category_theory.limits.kernel f)\\t⊢ ⇑((AddCommGroup.kernel_iso_ker f).hom ≫ (add_monoid_hom.ker f).subtype) x = ⇑(category_theory.limits.kernel.ι f) x\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\tC : Type u,\\t_inst_2 : category_theory.category C,\\tD : Type u,\\t_inst_3 : category_theory.category D,\\t_inst_4 : category_theory.preadditive D,\\t_inst_5 : category_theory.linear R D,\\tF : C ⥤ D,\\tX Y : C,\\tf : X ⟶ Y,\\tr : R\\t⊢ (category_theory.Free.lift R F).map (finsupp.single f r) = r • F.map f\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\tC : Type u,\\t_inst_2 : category_theory.category C,\\tD : Type u,\\t_inst_3 : category_theory.category D,\\t_inst_4 : category_theory.preadditive D,\\t_inst_5 : category_theory.linear R D,\\tF : C ⥤ D\\t⊢ ∀ {X Y : C} (f : X ⟶ Y), (category_theory.Free.embedding R C ⋙ category_theory.Free.lift R F).map f ≫ (category_theory.iso.refl ((category_theory.Free.embedding R C ⋙ category_theory.Free.lift R F).obj Y)).hom = (category_theory.iso.refl ((category_theory.Free.embedding R C ⋙ category_theory.Free.lift R F).obj X)).hom ≫ F.map f\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : ring R,\\tX Y : Module R,\\ti : X ≅ Y\\t⊢ function.left_inverse ⇑(i.inv) ⇑(i.hom)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : ring R,\\tX Y : Module R,\\ti : X ≅ Y\\t⊢ function.right_inverse ⇑(i.inv) ⇑(i.hom)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : ring R,\\tX Y : Module R,\\ti : X ≅ Y\\t⊢ ∀ (x y : ↥X), ⇑(i.hom) (x + y) = ⇑(i.hom) x + ⇑(i.hom) y\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : ring R,\\tX Y : Module R,\\ti : X ≅ Y\\t⊢ ∀ (r : R) (x : ↥X), ⇑(i.hom) (r • x) = ⇑(ring_hom.id R) r • ⇑(i.hom) x\\n ',\n", + " '<|startoftext|> R : Type v,\\t_inst_1 : ring R,\\tJ : Type v,\\t_inst_2 : category_theory.small_category J,\\tF : J ⥤ Module R,\\tj : J\\t⊢ ∀ (r : R) (x : ↥(F.obj j)), Module.colimits.cocone_fun F j (r • x) = ⇑(ring_hom.id R) r • Module.colimits.cocone_fun F j x\\n ',\n", + " '<|startoftext|> R : Type v,\\t_inst_1 : ring R,\\tJ : Type v,\\t_inst_2 : category_theory.small_category J,\\tF : J ⥤ Module R,\\tj : J,\\tr : R,\\tx : ↥(F.obj j)\\t⊢ Module.colimits.cocone_fun F j (r • x) = ⇑(ring_hom.id R) r • Module.colimits.cocone_fun F j x\\n ',\n", + " '<|startoftext|> R : Type v,\\t_inst_1 : ring R,\\tJ : Type v,\\t_inst_2 : category_theory.small_category J,\\tF : J ⥤ Module R,\\tj : J,\\tr : R,\\tx : ↥(F.obj j)\\t⊢ setoid.r (Module.colimits.prequotient.of j (r • x)) (Module.colimits.prequotient.smul (⇑(ring_hom.id R) r) (Module.colimits.prequotient.of j x))\\n ',\n", + " '<|startoftext|> R : Type v,\\t_inst_1 : ring R,\\tJ : Type v,\\t_inst_2 : category_theory.small_category J,\\tF : J ⥤ Module R,\\tj : J\\t⊢ ∀ (x y : ↥(F.obj j)), Module.colimits.cocone_fun F j (x + y) = Module.colimits.cocone_fun F j x + Module.colimits.cocone_fun F j y\\n ',\n", + " '<|startoftext|> R : Type v,\\t_inst_1 : ring R,\\tJ : Type v,\\t_inst_2 : category_theory.small_category J,\\tF : J ⥤ Module R,\\tj : J\\t⊢ ∀ (x y : ↥(F.obj j)), Module.colimits.cocone_fun F j (x + y) = Module.colimits.cocone_fun F j x + Module.colimits.cocone_fun F j y\\n ',\n", + " '<|startoftext|> R : Type v,\\t_inst_1 : ring R,\\tJ : Type v,\\t_inst_2 : category_theory.small_category J,\\tF : J ⥤ Module R,\\tj : J\\t⊢ ∀ (x y : ↥(F.obj j)), Module.colimits.cocone_fun F j (x + y) = Module.colimits.cocone_fun F j x + Module.colimits.cocone_fun F j y\\n ',\n", + " '<|startoftext|> R : Type v,\\t_inst_1 : ring R,\\tJ : Type v,\\t_inst_2 : category_theory.small_category J,\\tF : J ⥤ Module R,\\tj : J,\\tx y : ↥(F.obj j)\\t⊢ Module.colimits.cocone_fun F j (x + y) = Module.colimits.cocone_fun F j x + Module.colimits.cocone_fun F j y\\n ',\n", + " '<|startoftext|> R : Type v,\\t_inst_1 : ring R,\\tJ : Type v,\\t_inst_2 : category_theory.small_category J,\\tF : J ⥤ Module R,\\tj : J,\\tx y : ↥(F.obj j)\\t⊢ setoid.r (Module.colimits.prequotient.of j (x + y)) ((Module.colimits.prequotient.of j x).add (Module.colimits.prequotient.of j y))\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : ring R,\\tM : Module R,\\tm : ↥M\\t⊢ ⇑(⇑(finsupp.basis_single_one.constr ℕ) id) (finsupp.single m 1) = m\\n ',\n", + " '<|startoftext|> J : Type ?,\\t_x : category_theory.small_category J,\\t_x : category_theory.is_filtered J\\t⊢ category_theory.limits.preserves_colimits_of_shape J (category_theory.forget Mon)\\n ',\n", + " '<|startoftext|> J : Type ?,\\t𝒥 : category_theory.small_category J,\\tF : J ⥤ CommMon\\t⊢ category_theory.limits.preserves_limit F (category_theory.forget₂ CommMon Mon)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : non_assoc_semiring R,\\t_inst_2 : char_p R 1,\\tthis : ∀ (r : R), r = 0,\\ta b : R\\t⊢ a = b\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : non_assoc_semiring R,\\t_inst_2 : char_p R 1,\\tr : R\\t⊢ r = 1 * r\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : non_assoc_semiring R,\\t_inst_2 : char_p R 1,\\tr : R\\t⊢ 1 * r = ↑1 * r\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : non_assoc_semiring R,\\t_inst_2 : char_p R 1,\\tr : R\\t⊢ ↑1 * r = 0 * r\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : non_assoc_semiring R,\\t_inst_2 : char_p R 1,\\tr : R\\t⊢ 0 * r = 0\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\tn : ℕ,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\thyp : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1)\\t⊢ ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\thyp : 0 = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (0 - 1)\\t⊢ ((generalized_continued_fraction.of v).continuants_aux 0).a * ((generalized_continued_fraction.of v).continuants_aux (0 + 1)).b - ((generalized_continued_fraction.of v).continuants_aux 0).b * ((generalized_continued_fraction.of v).continuants_aux (0 + 1)).a = (-1) ^ 0\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\thyp : 0 = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (0 - 1)\\t⊢ ((generalized_continued_fraction.of v).continuants_aux 0).a * ((generalized_continued_fraction.of v).continuants_aux (0 + 1)).b - ((generalized_continued_fraction.of v).continuants_aux 0).b * ((generalized_continued_fraction.of v).continuants_aux (0 + 1)).a = (-1) ^ 0\\t\\tcase nat.succ\\tK : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1)\\t⊢ ((generalized_continued_fraction.of v).continuants_aux n.succ).a * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n.succ).b * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).a = (-1) ^ n.succ\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1)\\t⊢ ((generalized_continued_fraction.of v).continuants_aux n.succ).a * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n.succ).b * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).a = (-1) ^ n.succ\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1)\\t⊢ ((generalized_continued_fraction.of v).continuants_aux n.succ).a * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n.succ).b * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).a = (-1) ^ n.succ\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v\\t⊢ ((generalized_continued_fraction.of v).continuants_aux n.succ).a * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n.succ).b * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).a = (-1) ^ n.succ\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2)\\t⊢ ((generalized_continued_fraction.of v).continuants_aux n.succ).a * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n.succ).b * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).a = (-1) ^ n.succ\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1)\\t⊢ ((generalized_continued_fraction.of v).continuants_aux n.succ).a * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n.succ).b * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).a = (-1) ^ n.succ\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n\\t⊢ ((generalized_continued_fraction.of v).continuants_aux n.succ).a * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n.succ).b * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).a = (-1) ^ n.succ\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a\\t⊢ ((generalized_continued_fraction.of v).continuants_aux n.succ).a * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n.succ).b * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).a = (-1) ^ n.succ\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b\\t⊢ ((generalized_continued_fraction.of v).continuants_aux n.succ).a * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n.succ).b * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).a = (-1) ^ n.succ\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a\\t⊢ ((generalized_continued_fraction.of v).continuants_aux n.succ).a * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n.succ).b * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).a = (-1) ^ n.succ\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b\\t⊢ ((generalized_continued_fraction.of v).continuants_aux n.succ).a * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n.succ).b * ((generalized_continued_fraction.of v).continuants_aux (n.succ + 1)).a = (-1) ^ n.succ\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b\\t⊢ ¬g.terminated_at n\\t\\tK : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n\\t⊢ pA * conts.b - pB * conts.a = (-1) ^ (n + 1)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b\\t⊢ pA * conts.b - pB * conts.a = (-1) ^ (n + 1)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n\\t⊢ ∃ (gp : generalized_continued_fraction.pair K), g.s.nth n = option.some gp\\t\\tK : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp\\t⊢ pA * conts.b - pB * conts.a = (-1) ^ (n + 1)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n\\t⊢ pA * conts.b - pB * conts.a = (-1) ^ (n + 1)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp\\t⊢ pA * conts.b - pB * conts.a = (-1) ^ (n + 1)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp,\\tthis : pA * (ppB + gp.b * pB) - pB * (ppA + gp.b * pA) = (-1) ^ (n + 1)\\t⊢ pA * conts.b - pB * conts.a = (-1) ^ (n + 1)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp,\\tthis : pA * (ppB + gp.b * pB) - pB * (ppA + gp.b * pA) = (-1) ^ (n + 1)\\t⊢ gp.a = 1\\t\\tK : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp,\\tthis : pA * (ppB + gp.b * pB) - pB * (ppA + gp.b * pA) = (-1) ^ (n + 1),\\tgp_a_eq_one : gp.a = 1\\t⊢ pA * (gp.b * (g.continuants_aux (n + 1)).b + gp.a * (g.continuants_aux n).b) - pB * (gp.b * (g.continuants_aux (n + 1)).a + gp.a * (g.continuants_aux n).a) = (-1) ^ (n + 1)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp,\\tthis : pA * (ppB + gp.b * pB) - pB * (ppA + gp.b * pA) = (-1) ^ (n + 1)\\t⊢ pA * (gp.b * (g.continuants_aux (n + 1)).b + gp.a * (g.continuants_aux n).b) - pB * (gp.b * (g.continuants_aux (n + 1)).a + gp.a * (g.continuants_aux n).a) = (-1) ^ (n + 1)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp,\\tthis : pA * (ppB + gp.b * pB) - pB * (ppA + gp.b * pA) = (-1) ^ (n + 1),\\tgp_a_eq_one : gp.a = 1\\t⊢ pA * (gp.b * (g.continuants_aux (n + 1)).b + gp.a * (g.continuants_aux n).b) - pB * (gp.b * (g.continuants_aux (n + 1)).a + gp.a * (g.continuants_aux n).a) = (-1) ^ (n + 1)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp,\\tthis : pA * (ppB + gp.b * pB) - pB * (ppA + gp.b * pA) = (-1) ^ (n + 1),\\tgp_a_eq_one : gp.a = 1\\t⊢ pA * (gp.b * (g.continuants_aux (n + 1)).b + 1 * (g.continuants_aux n).b) - pB * (gp.b * (g.continuants_aux (n + 1)).a + 1 * (g.continuants_aux n).a) = pA * (ppB + gp.b * pB) - pB * (ppA + gp.b * pA)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp,\\tthis : pA * ppB - pB * ppA = (-1) ^ (n + 1)\\t⊢ pA * (ppB + gp.b * pB) - pB * (ppA + gp.b * pA) = (-1) ^ (n + 1)\\t\\tK : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp\\t⊢ pA * ppB - pB * ppA = (-1) ^ (n + 1)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp\\t⊢ pA * (ppB + gp.b * pB) - pB * (ppA + gp.b * pA) = (-1) ^ (n + 1)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp,\\tthis : pA * ppB - pB * ppA = (-1) ^ (n + 1)\\t⊢ pA * (ppB + gp.b * pB) - pB * (ppA + gp.b * pA) = pA * ppB + pA * gp.b * pB - pB * ppA - pB * gp.b * pA\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp,\\tthis : pA * ppB - pB * ppA = (-1) ^ (n + 1)\\t⊢ pA * ppB + pA * gp.b * pB - pB * ppA - pB * gp.b * pA = pA * ppB - pB * ppA\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp,\\tthis : pA * ppB - pB * ppA = (-1) ^ (n + 1)\\t⊢ pA * ppB - pB * ppA = (-1) ^ (n + 1)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp\\t⊢ pA * ppB - pB * ppA = (-1) ^ (n + 1)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp,\\tthis : ppA * pB - ppB * pA = (-1) ^ n\\t⊢ (-1) ^ (n + 1) = (-1) * (-1) ^ n\\t\\tK : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp,\\tthis : ppA * pB - ppB * pA = (-1) ^ n,\\tpow_succ_n : (-1) ^ (n + 1) = (-1) * (-1) ^ n\\t⊢ pA * ppB - pB * ppA = (-1) ^ (n + 1)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp,\\tthis : ppA * pB - ppB * pA = (-1) ^ n\\t⊢ pA * ppB - pB * ppA = (-1) ^ (n + 1)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp,\\tthis : ppA * pB - ppB * pA = (-1) ^ n,\\tpow_succ_n : (-1) ^ (n + 1) = (-1) * (-1) ^ n\\t⊢ pA * ppB - pB * ppA = (-1) ^ (n + 1)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp,\\tthis : ppA * pB - ppB * pA = (-1) ^ n,\\tpow_succ_n : (-1) ^ (n + 1) = (-1) * (-1) ^ n\\t⊢ pA * ppB - pB * ppA = (-1) * (ppA * pB - ppB * pA)\\n ',\n", + " '<|startoftext|> K : Type u_1,\\tv : K,\\t_inst_1 : linear_ordered_field K,\\t_inst_2 : floor_ring K,\\tn : ℕ,\\tIH : n = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n - 1) → ((generalized_continued_fraction.of v).continuants_aux n).a * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).b - ((generalized_continued_fraction.of v).continuants_aux n).b * ((generalized_continued_fraction.of v).continuants_aux (n + 1)).a = (-1) ^ n,\\thyp : n.succ = 0 ∨ ¬(generalized_continued_fraction.of v).terminated_at (n.succ - 1),\\tg : generalized_continued_fraction K := generalized_continued_fraction.of v,\\tconts : generalized_continued_fraction.pair K := g.continuants_aux (n + 2),\\tpred_conts : generalized_continued_fraction.pair K := g.continuants_aux (n + 1),\\tpred_conts_eq : pred_conts = g.continuants_aux (n + 1),\\tppred_conts : generalized_continued_fraction.pair K := g.continuants_aux n,\\tppred_conts_eq : ppred_conts = g.continuants_aux n,\\tpA : K := pred_conts.a,\\tpB : K := pred_conts.b,\\tppA : K := ppred_conts.a,\\tppB : K := ppred_conts.b,\\tnot_terminated_at_n : ¬g.terminated_at n,\\tgp : generalized_continued_fraction.pair K,\\ts_nth_eq : g.s.nth n = option.some gp\\t⊢ ppA * pB - ppB * pA = (-1) ^ n\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tN : Type u_2,\\tμ : M → N → N,\\t_inst_1 : partial_order N\\t⊢ covariant M N μ has_lt.lt → covariant M N μ has_le.le\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tN : Type u_2,\\tμ : M → N → N,\\t_inst_1 : partial_order N,\\th : covariant M N μ has_lt.lt,\\ta : M,\\tb c : N,\\tbc : b ≤ c\\t⊢ μ a b ≤ μ a c\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tN : Type u_2,\\tμ : M → N → N,\\t_inst_1 : partial_order N,\\th : covariant M N μ has_lt.lt,\\ta : M,\\tb : N,\\tbc : b ≤ b\\t⊢ μ a b ≤ μ a b\\t\\tM : Type u_1,\\tN : Type u_2,\\tμ : M → N → N,\\t_inst_1 : partial_order N,\\th : covariant M N μ has_lt.lt,\\ta : M,\\tb c : N,\\tbc : b ≤ c,\\tbc : b < c\\t⊢ μ a b ≤ μ a c\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tN : Type u_2,\\tμ : M → N → N,\\t_inst_1 : partial_order N,\\th : covariant M N μ has_lt.lt,\\ta : M,\\tb : N,\\tbc : b ≤ b\\t⊢ μ a b ≤ μ a b\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tN : Type u_2,\\tμ : M → N → N,\\t_inst_1 : partial_order N,\\th : covariant M N μ has_lt.lt,\\ta : M,\\tb c : N,\\tbc : b ≤ c,\\tbc : b < c\\t⊢ μ a b ≤ μ a c\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : ring R,\\tι : Type v,\\t_inst_2 : directed_order ι,\\tG : ι → Type w,\\t_inst_3 : Π (i : ι), add_comm_group (G i),\\t_inst_4 : Π (i : ι), module R (G i),\\tf : Π (i j : ι), i ≤ j → (G i →ₗ[R] G j),\\ti j : ι,\\tx : G i,\\th : i ≤ j\\t⊢ ⇑(module.direct_limit.totalize G f i j) x = dite (i ≤ j) (λ (h : i ≤ j), ⇑(f i j h) x) (λ (h : ¬i ≤ j), 0)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : ring R,\\tι : Type v,\\t_inst_2 : directed_order ι,\\tG : ι → Type w,\\t_inst_3 : Π (i : ι), add_comm_group (G i),\\t_inst_4 : Π (i : ι), module R (G i),\\tf : Π (i j : ι), i ≤ j → (G i →ₗ[R] G j),\\ti j : ι,\\tx : G i,\\th : i ≤ j\\t⊢ ⇑(module.direct_limit.totalize G f i j) x = dite (i ≤ j) (λ (h : i ≤ j), ⇑(f i j h) x) (λ (h : ¬i ≤ j), 0)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : ring R,\\tι : Type v,\\t_inst_2 : directed_order ι,\\tG : ι → Type w,\\t_inst_3 : Π (i : ι), add_comm_group (G i),\\t_inst_4 : Π (i : ι), module R (G i),\\tf : Π (i j : ι), i ≤ j → (G i →ₗ[R] G j),\\ti j : ι,\\tx : G i,\\th : i ≤ j\\t⊢ ⇑(dite (i ≤ j) (f i j) (λ (h : ¬i ≤ j), 0)) x = dite (i ≤ j) (λ (h : i ≤ j), ⇑(f i j h) x) (λ (h : ¬i ≤ j), 0)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : ring R,\\tι : Type v,\\t_inst_2 : directed_order ι,\\tG : ι → Type w,\\t_inst_3 : Π (i : ι), add_comm_group (G i),\\t_inst_4 : Π (i : ι), module R (G i),\\tf : Π (i j : ι), i ≤ j → (G i →ₗ[R] G j),\\ti j : ι,\\tx : G i,\\th : ¬i ≤ j\\t⊢ ⇑(module.direct_limit.totalize G f i j) x = dite (i ≤ j) (λ (h : i ≤ j), ⇑(f i j h) x) (λ (h : ¬i ≤ j), 0)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : ring R,\\tι : Type v,\\t_inst_2 : directed_order ι,\\tG : ι → Type w,\\t_inst_3 : Π (i : ι), add_comm_group (G i),\\t_inst_4 : Π (i : ι), module R (G i),\\tf : Π (i j : ι), i ≤ j → (G i →ₗ[R] G j),\\ti j : ι,\\tx : G i,\\th : ¬i ≤ j\\t⊢ ⇑(module.direct_limit.totalize G f i j) x = dite (i ≤ j) (λ (h : i ≤ j), ⇑(f i j h) x) (λ (h : ¬i ≤ j), 0)\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : ring R,\\tι : Type v,\\t_inst_2 : directed_order ι,\\tG : ι → Type w,\\t_inst_3 : Π (i : ι), add_comm_group (G i),\\t_inst_4 : Π (i : ι), module R (G i),\\tf : Π (i j : ι), i ≤ j → (G i →ₗ[R] G j),\\ti j : ι,\\tx : G i,\\th : ¬i ≤ j\\t⊢ ⇑(dite (i ≤ j) (f i j) (λ (h : ¬i ≤ j), 0)) x = dite (i ≤ j) (λ (h : i ≤ j), ⇑(f i j h) x) (λ (h : ¬i ≤ j), 0)\\n ',\n", + " '<|startoftext|> ι : Type v,\\t_inst_2 : directed_order ι,\\tG : ι → Type w,\\t_inst_3 : nonempty ι,\\t_inst_4 : Π (i : ι), field (G i),\\tf : Π (i j : ι), i ≤ j → G i → G j,\\tp : ring.direct_limit G f,\\ti : ι,\\tx : G i,\\tH : ⇑(ring.direct_limit.of G f i) x ≠ 0\\t⊢ ⇑(ring.direct_limit.of G f i) x * ⇑(ring.direct_limit.of G f i) x⁻¹ = 1\\n ',\n", + " '<|startoftext|> ι : Type v,\\t_inst_2 : directed_order ι,\\tG : ι → Type w,\\t_inst_3 : nonempty ι,\\t_inst_4 : Π (i : ι), field (G i),\\tf : Π (i j : ι), i ≤ j → G i → G j,\\tp : ring.direct_limit G f,\\ti : ι,\\tx : G i,\\tH : ⇑(ring.direct_limit.of G f i) x ≠ 0,\\th : x = 0\\t⊢ ⇑(ring.direct_limit.of G f i) x = 0\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_monoid α,\\ta b : α,\\th : a ∣ b,\\tc : α\\t⊢ a ∣ b * c\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_monoid α,\\ta b : α,\\th : a ∣ b,\\tc : α\\t⊢ a ∣ c * b\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_monoid_with_zero α,\\ta b : α,\\thd : a ∣ b,\\thnd : ¬b ∣ a\\t⊢ dvd_not_unit a b\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_monoid_with_zero α,\\tb : α,\\thd : 0 ∣ b,\\thnd : ¬b ∣ 0\\t⊢ false\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_monoid_with_zero α,\\ta b : α,\\thd : a ∣ b,\\thnd : ¬b ∣ a\\t⊢ a ≠ 0\\t\\tα : Type u_1,\\t_inst_1 : comm_monoid_with_zero α,\\ta b : α,\\thd : a ∣ b,\\thnd : ¬b ∣ a\\t⊢ ∃ (x : α), ¬is_unit x ∧ b = a * x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_monoid_with_zero α,\\ta b : α,\\thd : a ∣ b,\\thnd : ¬b ∣ a\\t⊢ a ≠ 0\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_monoid_with_zero α,\\ta b : α,\\thd : a ∣ b,\\thnd : ¬b ∣ a\\t⊢ ∃ (x : α), ¬is_unit x ∧ b = a * x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_monoid_with_zero α,\\ta c : α,\\thnd : ¬a * c ∣ a\\t⊢ ∃ (x : α), ¬is_unit x ∧ a * c = a * x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_monoid_with_zero α,\\ta c : α,\\thnd : ¬a * c ∣ a\\t⊢ ¬is_unit c\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_monoid_with_zero α,\\ta : α,\\tu : units α,\\thnd : ¬a * ↑u ∣ a\\t⊢ false\\n ',\n", + " \"<|startoftext|> R : Type u,\\t_inst_1 : euclidean_domain R,\\t_inst_2 : decidable_eq R,\\ts t r' s' t' : R\\t⊢ ite (0 = 0) (r', s', t') (let q : R := r' / 0 in euclidean_domain.xgcd_aux (r' % 0) (s' - q * s) (t' - q * t) 0 s t) = (r', s', t')\\n \",\n", + " \"<|startoftext|> R : Type u,\\t_inst_1 : euclidean_domain R,\\t_inst_2 : decidable_eq R,\\ts t r' s' t' : R\\t⊢ euclidean_domain.xgcd_aux 0 s t r' s' t' = (r', s', t')\\n \",\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : euclidean_domain R,\\t_inst_2 : decidable_eq R,\\tx : R\\t⊢ euclidean_domain.lcm 0 x = 0\\n ',\n", + " '<|startoftext|> K : Type u,\\t_inst_1 : division_ring K,\\ta : K\\t⊢ (-a)⁻¹ = -a⁻¹\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : linear_ordered_ring α,\\t_inst_2 : floor_ring α,\\ta : α\\t⊢ a < ↑⌊a⌋₊ + 1\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : linear_ordered_ring α,\\t_inst_2 : floor_ring α,\\ta : α\\t⊢ ↑⌊a⌋ ≤ ↑⌊a⌋₊\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : linear_ordered_ring α,\\t_inst_2 : floor_ring α,\\ta : α\\t⊢ ⌊a⌋ ≤ ↑⌊a⌋₊\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : linear_ordered_ring α,\\t_inst_2 : floor_ring α,\\tx : α\\t⊢ x < ↑⌊x⌋ + 1\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\tm n k : α,\\tH : k ∣ m * n\\t⊢ k ∣ gcd_monoid.gcd k m * n\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\tm n k : α,\\tH : k ∣ m * n\\t⊢ k ∣ gcd_monoid.gcd k m * ⇑normalize n\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\tm n k : α,\\tH : k ∣ m * n\\t⊢ k ∣ gcd_monoid.gcd k m * ⇑normalize n\\t\\tα : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\tm n k : α,\\tH : k ∣ m * n\\t⊢ gcd_monoid.gcd k m * ⇑normalize n ∣ gcd_monoid.gcd k m * n\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\tm n k : α,\\tH : k ∣ m * n\\t⊢ k ∣ gcd_monoid.gcd (k * n) (m * n)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\tm n k : α,\\tH : k ∣ m * n\\t⊢ gcd_monoid.gcd k m * ⇑normalize n ∣ gcd_monoid.gcd k m * n\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\tm n k : α,\\tH : k ∣ m * n\\t⊢ gcd_monoid.gcd k m * ⇑normalize n * ↑(normalization_monoid.norm_unit n)⁻¹ = gcd_monoid.gcd k m * n\\t\\tα : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\tm n k : α,\\tH : k ∣ m * n\\t⊢ has_lift_t (units α) α\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\tm n k : α,\\tH : k ∣ m * n\\t⊢ gcd_monoid.gcd k m * (n * ↑(normalization_monoid.norm_unit n * (normalization_monoid.norm_unit n)⁻¹)) = gcd_monoid.gcd k m * n\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\ta b : α,\\tthis : gcd_monoid.lcm a b = 0\\t⊢ ⇑normalize (gcd_monoid.lcm a b) = gcd_monoid.lcm a b\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\ta b : α,\\th_lcm : gcd_monoid.lcm a b ≠ 0\\t⊢ gcd_monoid.gcd a b = 0 → gcd_monoid.lcm a b = 0\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\ta b : α,\\th_lcm : gcd_monoid.lcm a b ≠ 0\\t⊢ gcd_monoid.gcd a b = 0 → gcd_monoid.lcm a b = 0\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\ta b : α,\\th_lcm : gcd_monoid.lcm a b ≠ 0\\t⊢ gcd_monoid.gcd a b = 0 → gcd_monoid.lcm a b = 0\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\ta b : α,\\th_lcm : gcd_monoid.lcm a b ≠ 0\\t⊢ gcd_monoid.gcd a b = 0 → gcd_monoid.lcm a b = 0\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\th_lcm : gcd_monoid.lcm 0 0 ≠ 0\\t⊢ 0 = 0 ∨ 0 = 0\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\th_lcm : gcd_monoid.lcm 0 0 ≠ 0\\t⊢ 0 = 0\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\ta b : α,\\th_lcm : gcd_monoid.lcm a b ≠ 0\\t⊢ a = 0 ∧ b = 0 → a = 0 ∨ b = 0\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\ta b : α,\\th_lcm : gcd_monoid.lcm a b ≠ 0,\\th1 : gcd_monoid.gcd a b ≠ 0\\t⊢ ⇑normalize (gcd_monoid.gcd a b * gcd_monoid.lcm a b) = gcd_monoid.gcd a b * gcd_monoid.lcm a b\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\ta b : α,\\th_lcm : gcd_monoid.lcm a b ≠ 0,\\th1 : gcd_monoid.gcd a b ≠ 0,\\th2 : ⇑normalize (gcd_monoid.gcd a b * gcd_monoid.lcm a b) = gcd_monoid.gcd a b * gcd_monoid.lcm a b\\t⊢ ⇑normalize (gcd_monoid.lcm a b) = gcd_monoid.lcm a b\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : decidable_eq α,\\tf : associates α →* α,\\thinv : function.right_inverse ⇑f associates.mk,\\ta b : α,\\tha : a ≠ 0,\\thb : b ≠ 0\\t⊢ ite (a * b = 0) 1 (classical.some _) = ite (a = 0) 1 (classical.some _) * ite (b = 0) 1 (classical.some _)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : decidable_eq α,\\tf : associates α →* α,\\thinv : function.right_inverse ⇑f associates.mk,\\ta b : α,\\tha : a ≠ 0,\\thb : b ≠ 0\\t⊢ ↑(classical.some _) = ↑(classical.some _) * ↑(classical.some _)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : decidable_eq α,\\tf : associates α →* α,\\thinv : function.right_inverse ⇑f associates.mk,\\ta b : α,\\tha : a ≠ 0,\\thb : b ≠ 0,\\tthis : a * b * ↑(classical.some _) = a * ↑(classical.some _) * (b * ↑(classical.some _))\\t⊢ ↑(classical.some _) = ↑(classical.some _) * ↑(classical.some _)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : decidable_eq α,\\tf : associates α →* α,\\thinv : function.right_inverse ⇑f associates.mk,\\ta b : α,\\tha : a ≠ 0,\\thb : b ≠ 0,\\tthis : a * b * ↑(classical.some _) = a * ↑(classical.some _) * (b * ↑(classical.some _))\\t⊢ ↑(classical.some _) = ↑(classical.some _) * ↑(classical.some _)\\t\\tα : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : decidable_eq α,\\tf : associates α →* α,\\thinv : function.right_inverse ⇑f associates.mk,\\ta b : α,\\tha : a ≠ 0,\\thb : b ≠ 0\\t⊢ a * b * ↑(classical.some _) = a * ↑(classical.some _) * (b * ↑(classical.some _))\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : decidable_eq α,\\tf : associates α →* α,\\thinv : function.right_inverse ⇑f associates.mk,\\ta b : α,\\tha : a ≠ 0,\\thb : b ≠ 0,\\tthis : a * b * ↑(classical.some _) = a * ↑(classical.some _) * (b * ↑(classical.some _))\\t⊢ a * b * ↑(classical.some _) = a * b * (↑(classical.some _) * ↑(classical.some _))\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : decidable_eq α,\\tf : associates α →* α,\\thinv : function.right_inverse ⇑f associates.mk,\\ta b : α,\\tha : a ≠ 0,\\thb : b ≠ 0\\t⊢ a * b * ↑(classical.some _) = a * ↑(classical.some _) * (b * ↑(classical.some _))\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : decidable_eq α,\\tf : associates α →* α,\\thinv : function.right_inverse ⇑f associates.mk,\\tu : units α\\t⊢ ite (↑u = 0) 1 (classical.some _) = u⁻¹\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : decidable_eq α,\\tf : associates α →* α,\\thinv : function.right_inverse ⇑f associates.mk,\\tu : units α\\t⊢ ↑(classical.some _) = ↑u⁻¹\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : decidable_eq α,\\tf : associates α →* α,\\thinv : function.right_inverse ⇑f associates.mk,\\tu : units α\\t⊢ ↑u * ↑(classical.some _) = ↑u * ↑u⁻¹\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\ts : finset β,\\tf : β → α,\\ta : α\\t⊢ s.gcd (λ (x : β), f x * a) = s.gcd f * ⇑normalize a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\ts : finset β,\\tf : β → α,\\ta : α,\\t_inst : Π (a : Prop), decidable a\\t⊢ s.gcd (λ (x : β), f x * a) = s.gcd f * ⇑normalize a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\ts : finset β,\\tf : β → α,\\ta : α,\\t_inst : Π (a : Prop), decidable a\\t⊢ ∅.gcd (λ (x : β), f x * a) = ∅.gcd f * ⇑normalize a\\t\\tα : Type u_1,\\tβ : Type u_2,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\ts : finset β,\\tf : β → α,\\ta : α,\\t_inst : Π (a : Prop), decidable a\\t⊢ ∀ ⦃a_1 : β⦄ {s : finset β}, a_1 ∉ s → s.gcd (λ (x : β), f x * a) = s.gcd f * ⇑normalize a → (has_insert.insert a_1 s).gcd (λ (x : β), f x * a) = (has_insert.insert a_1 s).gcd f * ⇑normalize a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\ts : finset β,\\tf : β → α,\\ta : α,\\t_inst : Π (a : Prop), decidable a\\t⊢ ∅.gcd (λ (x : β), f x * a) = ∅.gcd f * ⇑normalize a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\ts : finset β,\\tf : β → α,\\ta : α,\\t_inst : Π (a : Prop), decidable a\\t⊢ ∀ ⦃a_1 : β⦄ {s : finset β}, a_1 ∉ s → s.gcd (λ (x : β), f x * a) = s.gcd f * ⇑normalize a → (has_insert.insert a_1 s).gcd (λ (x : β), f x * a) = (has_insert.insert a_1 s).gcd f * ⇑normalize a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\ts : finset β,\\tf : β → α,\\ta : α,\\t_inst : Π (a : Prop), decidable a,\\tb : β,\\tt : finset β,\\thbt : b ∉ t,\\th : t.gcd (λ (x : β), f x * a) = t.gcd f * ⇑normalize a\\t⊢ (has_insert.insert b t).gcd (λ (x : β), f x * a) = (has_insert.insert b t).gcd f * ⇑normalize a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : comm_cancel_monoid_with_zero α,\\t_inst_2 : nontrivial α,\\t_inst_3 : gcd_monoid α,\\ts : finset β,\\tf : β → α,\\ta : α,\\t_inst : Π (a : Prop), decidable a,\\tb : β,\\tt : finset β,\\thbt : b ∉ t,\\th : t.gcd (λ (x : β), f x * a) = t.gcd f * ⇑normalize a\\t⊢ gcd_monoid.gcd (f b * a) (t.gcd f * ⇑normalize a) = gcd_monoid.gcd (f b * a) (t.gcd f * a)\\n ',\n", + " '<|startoftext|> G : Type u,\\t_inst_1 : add_comm_group G,\\ta b c : G\\t⊢ c + a - (c + b) = a - b\\n ',\n", + " '<|startoftext|> G : Type u_1,\\t_inst_1 : group G,\\ta b : G,\\th : commute a b\\t⊢ a * b * a⁻¹ = b\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tm₁ m₂ : monoid M,\\th_mul : monoid.mul = monoid.mul\\t⊢ m₁ = m₂\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tm₁ m₂ : monoid M,\\th_mul : monoid.mul = monoid.mul\\t⊢ m₁ = m₂\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tm₂ : monoid M,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\th_mul : monoid.mul = monoid.mul\\t⊢ {mul := mul₁, mul_assoc := m₁_mul_assoc, one := one₁, one_mul := one_mul₁, mul_one := mul_one₁, npow := npow₁, npow_zero\\' := npow_zero₁, npow_succ\\' := npow_succ₁} = m₂\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tmul₂ : M → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₂ : M,\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow₂ : ℕ → M → M,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\th_mul : monoid.mul = monoid.mul\\t⊢ {mul := mul₁, mul_assoc := m₁_mul_assoc, one := one₁, one_mul := one_mul₁, mul_one := mul_one₁, npow := npow₁, npow_zero\\' := npow_zero₁, npow_succ\\' := npow_succ₁} = {mul := mul₂, mul_assoc := m₂_mul_assoc, one := one₂, one_mul := one_mul₂, mul_one := mul_one₂, npow := npow₂, npow_zero\\' := npow_zero₂, npow_succ\\' := npow_succ₂}\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tmul₂ : M → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₂ : M,\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow₂ : ℕ → M → M,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\th_mul : mul₁ = mul₂\\t⊢ {mul := mul₁, mul_assoc := m₁_mul_assoc, one := one₁, one_mul := one_mul₁, mul_one := mul_one₁, npow := npow₁, npow_zero\\' := npow_zero₁, npow_succ\\' := npow_succ₁} = {mul := mul₂, mul_assoc := m₂_mul_assoc, one := one₂, one_mul := one_mul₂, mul_one := mul_one₂, npow := npow₂, npow_zero\\' := npow_zero₂, npow_succ\\' := npow_succ₂}\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tone₂ : M,\\tnpow₂ : ℕ → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous)\\t⊢ {mul := mul₁, mul_assoc := m₁_mul_assoc, one := one₁, one_mul := one_mul₁, mul_one := mul_one₁, npow := npow₁, npow_zero\\' := npow_zero₁, npow_succ\\' := npow_succ₁} = {mul := mul₁, mul_assoc := m₂_mul_assoc, one := one₂, one_mul := one_mul₂, mul_one := mul_one₂, npow := npow₂, npow_zero\\' := npow_zero₂, npow_succ\\' := npow_succ₂}\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tone₂ : M,\\tnpow₂ : ℕ → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous)\\t⊢ one₁ = one₂\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tone₂ : M,\\tnpow₂ : ℕ → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous)\\t⊢ one₁ = one₂\\t\\tM : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tone₂ : M,\\tnpow₂ : ℕ → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\th_one : one₁ = one₂\\t⊢ {mul := mul₁, mul_assoc := m₁_mul_assoc, one := one₁, one_mul := one_mul₁, mul_one := mul_one₁, npow := npow₁, npow_zero\\' := npow_zero₁, npow_succ\\' := npow_succ₁} = {mul := mul₁, mul_assoc := m₂_mul_assoc, one := one₂, one_mul := one_mul₂, mul_one := mul_one₂, npow := npow₂, npow_zero\\' := npow_zero₂, npow_succ\\' := npow_succ₂}\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tone₂ : M,\\tnpow₂ : ℕ → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous)\\t⊢ 1 * one₁ = one₂\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tone₂ : M,\\tnpow₂ : ℕ → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\th_one : one₁ = one₂\\t⊢ {mul := mul₁, mul_assoc := m₁_mul_assoc, one := one₁, one_mul := one_mul₁, mul_one := mul_one₁, npow := npow₁, npow_zero\\' := npow_zero₁, npow_succ\\' := npow_succ₁} = {mul := mul₁, mul_assoc := m₂_mul_assoc, one := one₂, one_mul := one_mul₂, mul_one := mul_one₂, npow := npow₂, npow_zero\\' := npow_zero₂, npow_succ\\' := npow_succ₂}\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow₂ : ℕ → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous)\\t⊢ {mul := mul₁, mul_assoc := m₁_mul_assoc, one := one₁, one_mul := one_mul₁, mul_one := mul_one₁, npow := npow₁, npow_zero\\' := npow_zero₁, npow_succ\\' := npow_succ₁} = {mul := mul₁, mul_assoc := m₂_mul_assoc, one := one₁, one_mul := one_mul₂, mul_one := mul_one₂, npow := npow₂, npow_zero\\' := npow_zero₂, npow_succ\\' := npow_succ₂}\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow₂ : ℕ → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous)\\t⊢ npow₁ = npow₂\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow₂ : ℕ → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tn : ℕ,\\tx : M\\t⊢ npow₁ n x = npow₂ n x\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow₂ : ℕ → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tx : M\\t⊢ npow₁ 0 x = npow₂ 0 x\\t\\tcase nat.succ\\tM : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow₂ : ℕ → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tx : M,\\td : ℕ,\\thd : npow₁ d x = npow₂ d x\\t⊢ npow₁ d.succ x = npow₂ d.succ x\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow₂ : ℕ → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tx : M\\t⊢ npow₁ 0 x = npow₂ 0 x\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow₂ : ℕ → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous)\\t⊢ npow₁ = npow₂\\t\\tM : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow₂ : ℕ → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\th_npow : npow₁ = npow₂\\t⊢ {mul := mul₁, mul_assoc := m₁_mul_assoc, one := one₁, one_mul := one_mul₁, mul_one := mul_one₁, npow := npow₁, npow_zero\\' := npow_zero₁, npow_succ\\' := npow_succ₁} = {mul := mul₁, mul_assoc := m₂_mul_assoc, one := one₁, one_mul := one_mul₂, mul_one := mul_one₂, npow := npow₂, npow_zero\\' := npow_zero₂, npow_succ\\' := npow_succ₂}\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow₂ : ℕ → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tx : M,\\td : ℕ,\\thd : npow₁ d x = npow₂ d x\\t⊢ npow₁ d.succ x = npow₂ d.succ x\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tmul₁ : M → M → M,\\tm₁_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone₁ : M,\\tone_mul₁ : ∀ (a : M), 1 * a = a,\\tmul_one₁ : ∀ (a : M), a * 1 = a,\\tnpow₁ : ℕ → M → M,\\tnpow_zero₁ : auto_param (∀ (x : M), npow₁ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₁ : auto_param (∀ (n : ℕ) (x : M), npow₁ n.succ x = x * npow₁ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow₂ : ℕ → M → M,\\tm₂_mul_assoc : ∀ (a b c : M), a * b * c = a * (b * c),\\tone_mul₂ : ∀ (a : M), 1 * a = a,\\tmul_one₂ : ∀ (a : M), a * 1 = a,\\tnpow_zero₂ : auto_param (∀ (x : M), npow₂ 0 x = 1) (name.mk_string \"try_refl_tac\" name.anonymous),\\tnpow_succ₂ : auto_param (∀ (n : ℕ) (x : M), npow₂ n.succ x = x * npow₂ n x) (name.mk_string \"try_refl_tac\" name.anonymous),\\th_npow : npow₁ = npow₂\\t⊢ {mul := mul₁, mul_assoc := m₁_mul_assoc, one := one₁, one_mul := one_mul₁, mul_one := mul_one₁, npow := npow₁, npow_zero\\' := npow_zero₁, npow_succ\\' := npow_succ₁} = {mul := mul₁, mul_assoc := m₂_mul_assoc, one := one₁, one_mul := one_mul₂, mul_one := mul_one₂, npow := npow₂, npow_zero\\' := npow_zero₂, npow_succ\\' := npow_succ₂}\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tN : Type u_2,\\tA : Type u_3,\\tmM : mul_one_class M,\\tgN : mul_one_class N,\\tgA : comm_group A,\\tφ : N →* A,\\tψ : M →* N,\\tx : M\\t⊢ ⇑(φ⁻¹.comp ψ) x = ⇑(φ.comp ψ)⁻¹ x\\n ',\n", + " '<|startoftext|> M : Type u_1,\\tN : Type u_2,\\tA : Type u_3,\\tmM : mul_one_class M,\\tgN : mul_one_class N,\\tgA : comm_group A,\\tφ : N →* A,\\tψ : M →* N\\t⊢ φ⁻¹.comp ψ = (φ.comp ψ)⁻¹\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), monoid_with_zero (f i)\\t⊢ monoid_with_zero (Π (i : I), f i)\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), monoid_with_zero (f i)\\t⊢ monoid_with_zero (Π (i : I), f i)\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), monoid_with_zero (f i)\\t⊢ ∀ (a b c : Π (i : I), f i), a * b * c = a * (b * c)\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), monoid_with_zero (f i)\\t⊢ ∀ (a : Π (i : I), f i), 1 * a = a\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), monoid_with_zero (f i)\\t⊢ ∀ (a : Π (i : I), f i), a * 1 = a\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), monoid_with_zero (f i)\\t⊢ ∀ (x : Π (i : I), f i), (λ (i : I), monoid.npow 0 (x i)) = 1\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), monoid_with_zero (f i)\\t⊢ ∀ (n : ℕ) (x : Π (i : I), f i), (λ (i : I), monoid.npow n.succ (x i)) = x * λ (i : I), monoid.npow n (x i)\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), monoid_with_zero (f i)\\t⊢ ∀ (a : Π (i : I), f i), 0 * a = 0\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), monoid_with_zero (f i)\\t⊢ ∀ (a : Π (i : I), f i), a * 0 = 0\\n ',\n", + " '<|startoftext|> M : Type u_5,\\tN : Type u_6,\\t_inst_1 : monoid M,\\t_inst_2 : monoid N,\\tu : units M × units N\\t⊢ (↑(u.fst), ↑(u.snd)) * (↑(u.fst)⁻¹, ↑(u.snd)⁻¹) = 1\\n ',\n", + " '<|startoftext|> M : Type u_5,\\tN : Type u_6,\\t_inst_1 : monoid M,\\t_inst_2 : monoid N,\\tu : units M × units N\\t⊢ (↑(u.fst)⁻¹, ↑(u.snd)⁻¹) * (↑(u.fst), ↑(u.snd)) = 1\\n ',\n", + " '<|startoftext|> M : Type u_5,\\tN : Type u_6,\\t_inst_1 : monoid M,\\t_inst_2 : monoid N,\\tu : units (M × N)\\t⊢ (λ (u : units M × units N), {val := (↑(u.fst), ↑(u.snd)), inv := (↑(u.fst)⁻¹, ↑(u.snd)⁻¹), val_inv := _, inv_val := _}) (⇑((units.map (monoid_hom.fst M N)).prod (units.map (monoid_hom.snd M N))) u) = u\\n ',\n", + " '<|startoftext|> M : Type u_5,\\tN : Type u_6,\\t_inst_1 : monoid M,\\t_inst_2 : monoid N,\\t_x : units M × units N,\\t_fun_match : ∀ (_a : units M × units N), ⇑((units.map (monoid_hom.fst M N)).prod (units.map (monoid_hom.snd M N))) ((λ (u : units M × units N), {val := (↑(u.fst), ↑(u.snd)), inv := (↑(u.fst)⁻¹, ↑(u.snd)⁻¹), val_inv := _, inv_val := _}) _a) = _a,\\tu₁ : units M,\\tu₂ : units N\\t⊢ ⇑((units.map (monoid_hom.fst M N)).prod (units.map (monoid_hom.snd M N))) ((λ (u : units M × units N), {val := (↑(u.fst), ↑(u.snd)), inv := (↑(u.fst)⁻¹, ↑(u.snd)⁻¹), val_inv := _, inv_val := _}) (u₁, u₂)) = (u₁, u₂)\\n ',\n", + " '<|startoftext|> M : Type u_5,\\t_inst_4 : monoid M,\\tA : Type u_6,\\t_inst_5 : add_monoid A,\\t_inst_6 : distrib_mul_action M A,\\tB : Type u_8,\\t_inst_9 : add_monoid B,\\t_inst_10 : distrib_mul_action M B,\\tf g : A →+[M] B,\\th : ↑f = ↑g,\\ta : A\\t⊢ ⇑f a = ⇑g a\\n ',\n", + " '<|startoftext|> M : Type u_5,\\t_inst_4 : monoid M,\\tA : Type u_6,\\t_inst_5 : add_monoid A,\\t_inst_6 : distrib_mul_action M A,\\tB : Type u_8,\\t_inst_9 : add_monoid B,\\t_inst_10 : distrib_mul_action M B,\\tf g : A →+[M] B,\\th : ↑f = ↑g\\t⊢ f = g\\n ',\n", + " '<|startoftext|> M : Type u_5,\\t_inst_4 : monoid M,\\tR : Type u_11,\\t_inst_15 : semiring R,\\t_inst_16 : mul_semiring_action M R,\\tS : Type u_13,\\t_inst_19 : semiring S,\\t_inst_20 : mul_semiring_action M S,\\tf : R →+*[M] S,\\tx : R\\t⊢ ⇑((mul_semiring_action_hom.id M).comp f) x = ⇑f x\\n ',\n", + " '<|startoftext|> R : Type u₁,\\t_inst_1 : integral_domain R,\\ta b : R,\\th : a ^ 2 = b ^ 2\\t⊢ a = b ∨ a = -b\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\ta b : R\\t⊢ a ^ 4 + 4 * b ^ 4 = (a ^ 2 - 2 * a * b + 2 * b ^ 2) * (a ^ 2 + 2 * a * b + 2 * b ^ 2)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_2 : linear_ordered_add_comm_group α,\\tn : ℤ,\\ta : α\\t⊢ abs (n • a) = abs n • abs a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_2 : linear_ordered_add_comm_group α,\\tn : ℤ,\\ta : α,\\tn0 : 0 ≤ n\\t⊢ abs (n • a) = abs n • abs a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_2 : linear_ordered_add_comm_group α,\\tn : ℤ,\\ta : α,\\tn0 : 0 ≤ n\\t⊢ abs (n • a) = abs n • abs a\\t\\tα : Type u_1,\\t_inst_2 : linear_ordered_add_comm_group α,\\tn : ℤ,\\ta : α,\\tn0 : ¬0 ≤ n\\t⊢ abs (n • a) = abs n • abs a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_2 : linear_ordered_add_comm_group α,\\ta : α,\\tn : ℕ\\t⊢ abs (↑n • a) = abs ↑n • abs a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_2 : linear_ordered_add_comm_group α,\\tn : ℤ,\\ta : α,\\tn0 : ¬0 ≤ n\\t⊢ abs (n • a) = abs n • abs a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_2 : linear_ordered_add_comm_group α,\\tn : ℤ,\\ta : α,\\tn0 : ¬0 ≤ n,\\tm : ℕ,\\th : ↑m = -n\\t⊢ abs (n • a) = abs n • abs a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_2 : linear_ordered_add_comm_group α,\\tn : ℤ,\\ta : α,\\tn0 : ¬0 ≤ n,\\tm : ℕ,\\th : ↑m = -n\\t⊢ abs (m • a) = m • abs a\\n ',\n", + " '<|startoftext|> R : Type u₁,\\t_inst_1 : semiring R,\\ta b : R,\\tn : ℕ\\t⊢ n • (a * b) = a * n • b\\n ',\n", + " '<|startoftext|> n : ℤ\\t⊢ n • 1 = n\\n ',\n", + " '<|startoftext|> M : Type u_1,\\t_inst_5 : comm_monoid M,\\tm : M,\\tn : ℕ,\\th : 0 < n\\t⊢ is_unit (m ^ n) ↔ is_unit m\\n ',\n", + " '<|startoftext|> M : Type u_1,\\t_inst_5 : comm_monoid M,\\tm : M,\\tp : ℕ,\\th : 0 < p.succ\\t⊢ is_unit (m ^ p.succ) ↔ is_unit m\\n ',\n", + " '<|startoftext|> M : Type u_1,\\t_inst_5 : comm_monoid M,\\tm : M,\\tp : ℕ,\\th : 0 < p.succ,\\th : is_unit (m ^ p.succ)\\t⊢ is_unit m\\n ',\n", + " \"<|startoftext|> M : Type u_1,\\t_inst_5 : comm_monoid M,\\tm : M,\\tp : ℕ,\\th : 0 < p.succ,\\tk k' : M,\\thk : k * k' = 1,\\thk' : k' * k = 1,\\th : ↑{val := k, inv := k', val_inv := hk, inv_val := hk'} = m ^ p.succ\\t⊢ is_unit m\\n \",\n", + " \"<|startoftext|> M : Type u_1,\\t_inst_5 : comm_monoid M,\\tm : M,\\tp : ℕ,\\th : 0 < p.succ,\\tk k' : M,\\thk : k * k' = 1,\\thk' : k' * k = 1,\\th : k = m ^ p.succ\\t⊢ is_unit m\\n \",\n", + " \"<|startoftext|> M : Type u_1,\\t_inst_5 : comm_monoid M,\\tm : M,\\tp : ℕ,\\th : 0 < p.succ,\\tk k' : M,\\thk : k * k' = 1,\\thk' : k' * k = 1,\\th : k = m ^ p.succ\\t⊢ m * (m ^ p * k') = 1\\t\\tM : Type u_1,\\t_inst_5 : comm_monoid M,\\tm : M,\\tp : ℕ,\\th : 0 < p.succ,\\tk k' : M,\\thk : k * k' = 1,\\thk' : k' * k = 1,\\th : k = m ^ p.succ\\t⊢ m ^ p * k' * m = 1\\t\\tM : Type u_1,\\t_inst_5 : comm_monoid M,\\tm : M,\\tp : ℕ,\\th : 0 < p.succ,\\tk k' : M,\\thk : k * k' = 1,\\thk' : k' * k = 1,\\th : k = m ^ p.succ\\t⊢ ↑{val := m, inv := m ^ p * k', val_inv := ?m_1, inv_val := ?m_2} = m\\n \",\n", + " \"<|startoftext|> M : Type u_1,\\t_inst_5 : comm_monoid M,\\tm : M,\\tp : ℕ,\\th : 0 < p.succ,\\tk k' : M,\\thk : k * k' = 1,\\thk' : k' * k = 1,\\th : k = m ^ p.succ\\t⊢ m * (m ^ p * k') = 1\\n \",\n", + " \"<|startoftext|> M : Type u_1,\\t_inst_5 : comm_monoid M,\\tm : M,\\tp : ℕ,\\th : 0 < p.succ,\\tk k' : M,\\thk : k * k' = 1,\\thk' : k' * k = 1,\\th : k = m ^ p.succ\\t⊢ m ^ p * k' * m = 1\\t\\tM : Type u_1,\\t_inst_5 : comm_monoid M,\\tm : M,\\tp : ℕ,\\th : 0 < p.succ,\\tk k' : M,\\thk : k * k' = 1,\\thk' : k' * k = 1,\\th : k = m ^ p.succ\\t⊢ ↑{val := m, inv := m ^ p * k', val_inv := _, inv_val := ?m_1} = m\\n \",\n", + " \"<|startoftext|> M : Type u_1,\\t_inst_5 : comm_monoid M,\\tm : M,\\tp : ℕ,\\th : 0 < p.succ,\\tk k' : M,\\thk : k * k' = 1,\\thk' : k' * k = 1,\\th : k = m ^ p.succ\\t⊢ m ^ p * k' * m = 1\\n \",\n", + " '<|startoftext|> x : ℤ,\\th : 1 < x.nat_abs\\t⊢ function.injective (has_pow.pow x)\\n ',\n", + " '<|startoftext|> x : ℤ,\\th : 1 < x.nat_abs,\\tthis : function.injective (int.nat_abs ∘ has_pow.pow x)\\t⊢ function.injective (has_pow.pow x)\\t\\tx : ℤ,\\th : 1 < x.nat_abs\\t⊢ function.injective (int.nat_abs ∘ has_pow.pow x)\\n ',\n", + " '<|startoftext|> x : ℤ,\\th : 1 < x.nat_abs,\\tthis : function.injective (int.nat_abs ∘ has_pow.pow x)\\t⊢ function.injective (has_pow.pow x)\\n ',\n", + " '<|startoftext|> x : ℤ,\\th : 1 < x.nat_abs\\t⊢ function.injective (int.nat_abs ∘ has_pow.pow x)\\n ',\n", + " '<|startoftext|> x : ℤ,\\th : 1 < x.nat_abs\\t⊢ int.nat_abs ∘ has_pow.pow x = has_pow.pow x.nat_abs\\n ',\n", + " \"<|startoftext|> M : Type u_1,\\t_inst_5 : comm_monoid M,\\tm : M,\\tp : ℕ,\\th : 0 < p.succ,\\tk k' : M,\\thk : k * k' = 1,\\thk' : k' * k = 1,\\th : k = m ^ p.succ\\t⊢ ↑{val := m, inv := m ^ p * k', val_inv := _, inv_val := _} = m\\n \",\n", + " '<|startoftext|> x : ℤ,\\th : 1 < x.nat_abs,\\tn : ℕ\\t⊢ (int.nat_abs ∘ has_pow.pow x) n = x.nat_abs ^ n\\n ',\n", + " \"<|startoftext|> M₀ : Type u_1,\\tM₀' : Type u_3,\\t_inst_1 : has_mul M₀,\\t_inst_2 : has_zero M₀,\\t_inst_3 : has_mul M₀',\\t_inst_4 : has_zero M₀',\\t_inst_5 : no_zero_divisors M₀',\\tf : M₀ → M₀',\\thf : function.injective f,\\tzero : f 0 = 0,\\tmul : ∀ (x y : M₀), f (x * y) = f x * f y,\\tx y : M₀,\\tH : x * y = 0\\t⊢ f x * f y = 0\\n \",\n", + " \"<|startoftext|> M₀ : Type u_1,\\tM₀' : Type u_3,\\t_inst_1 : has_mul M₀,\\t_inst_2 : has_zero M₀,\\t_inst_3 : has_mul M₀',\\t_inst_4 : has_zero M₀',\\t_inst_5 : no_zero_divisors M₀',\\tf : M₀ → M₀',\\thf : function.injective f,\\tzero : f 0 = 0,\\tmul : ∀ (x y : M₀), f (x * y) = f x * f y,\\tx y : M₀,\\tH : x * y = 0,\\tthis : f x * f y = 0,\\tH : f x = 0\\t⊢ f x = f 0\\n \",\n", + " \"<|startoftext|> M₀ : Type u_1,\\tM₀' : Type u_3,\\t_inst_1 : has_mul M₀,\\t_inst_2 : has_zero M₀,\\t_inst_3 : has_mul M₀',\\t_inst_4 : has_zero M₀',\\t_inst_5 : no_zero_divisors M₀',\\tf : M₀ → M₀',\\thf : function.injective f,\\tzero : f 0 = 0,\\tmul : ∀ (x y : M₀), f (x * y) = f x * f y,\\tx y : M₀,\\tH : x * y = 0,\\tthis : f x * f y = 0,\\tH : f y = 0\\t⊢ f y = f 0\\n \",\n", + " \"<|startoftext|> M₀ : Type u_1,\\tM₀' : Type u_3,\\t_inst_1 : cancel_monoid_with_zero M₀,\\t_inst_2 : has_zero M₀',\\t_inst_3 : has_mul M₀',\\t_inst_4 : has_one M₀',\\tf : M₀' → M₀,\\thf : function.injective f,\\tzero : f 0 = 0,\\tone : f 1 = 1,\\tmul : ∀ (x y : M₀'), f (x * y) = f x * f y,\\tx y z : M₀',\\thx : x ≠ 0,\\tH : x * y = x * z\\t⊢ f x * f y = f x * f z\\n \",\n", + " \"<|startoftext|> M₀ : Type u_1,\\tM₀' : Type u_3,\\t_inst_1 : cancel_monoid_with_zero M₀,\\t_inst_2 : has_zero M₀',\\t_inst_3 : has_mul M₀',\\t_inst_4 : has_one M₀',\\tf : M₀' → M₀,\\thf : function.injective f,\\tzero : f 0 = 0,\\tone : f 1 = 1,\\tmul : ∀ (x y : M₀'), f (x * y) = f x * f y,\\tx y z : M₀',\\thx : x ≠ 0,\\tH : x * y = x * z\\t⊢ f (x * z) = f (x * z)\\n \",\n", + " \"<|startoftext|> M₀ : Type u_1,\\tM₀' : Type u_3,\\t_inst_1 : cancel_monoid_with_zero M₀,\\t_inst_2 : has_zero M₀',\\t_inst_3 : has_mul M₀',\\t_inst_4 : has_one M₀',\\tf : M₀' → M₀,\\thf : function.injective f,\\tzero : f 0 = 0,\\tone : f 1 = 1,\\tmul : ∀ (x y : M₀'), f (x * y) = f x * f y,\\tx y z : M₀',\\thx : x ≠ 0,\\tH : x * y = x * z\\t⊢ f x * f y = f x * f z\\n \",\n", + " \"<|startoftext|> M₀ : Type u_1,\\tM₀' : Type u_3,\\t_inst_1 : cancel_monoid_with_zero M₀,\\t_inst_2 : has_zero M₀',\\t_inst_3 : has_mul M₀',\\t_inst_4 : has_one M₀',\\tf : M₀' → M₀,\\thf : function.injective f,\\tzero : f 0 = 0,\\tone : f 1 = 1,\\tmul : ∀ (x y : M₀'), f (x * y) = f x * f y,\\tx y z : M₀',\\thx : y ≠ 0,\\tH : x * y = z * y\\t⊢ f x * f y = f z * f y\\n \",\n", + " \"<|startoftext|> M₀ : Type u_1,\\tM₀' : Type u_3,\\t_inst_1 : cancel_monoid_with_zero M₀,\\t_inst_2 : has_zero M₀',\\t_inst_3 : has_mul M₀',\\t_inst_4 : has_one M₀',\\tf : M₀' → M₀,\\thf : function.injective f,\\tzero : f 0 = 0,\\tone : f 1 = 1,\\tmul : ∀ (x y : M₀'), f (x * y) = f x * f y,\\tx y z : M₀',\\thx : y ≠ 0,\\tH : x * y = z * y\\t⊢ f (z * y) = f (z * y)\\n \",\n", + " \"<|startoftext|> M₀ : Type u_1,\\tM₀' : Type u_3,\\t_inst_1 : cancel_monoid_with_zero M₀,\\t_inst_2 : has_zero M₀',\\t_inst_3 : has_mul M₀',\\t_inst_4 : has_one M₀',\\tf : M₀' → M₀,\\thf : function.injective f,\\tzero : f 0 = 0,\\tone : f 1 = 1,\\tmul : ∀ (x y : M₀'), f (x * y) = f x * f y,\\tx y z : M₀',\\thx : y ≠ 0,\\tH : x * y = z * y\\t⊢ f x * f y = f z * f y\\n \",\n", + " \"<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\tx : G₀,\\th : x ≠ 0,\\ty y' : G₀,\\tw : (λ (y : G₀), y * x) y = (λ (y : G₀), y * x) y'\\t⊢ y = y'\\n \",\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\ta b c : G₀,\\thb : b ≠ 0\\t⊢ a = b⁻¹ * c → b * a = c\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\ta b c : G₀,\\thb : b ≠ 0\\t⊢ b * a = c → a = b⁻¹ * c\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\tb c : G₀,\\thb : b ≠ 0\\t⊢ b * (b⁻¹ * c) = c\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\ta b c : G₀,\\thb : b ≠ 0\\t⊢ a = b⁻¹ * c ↔ b * a = c\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\ta b : G₀,\\thb : b ≠ 0\\t⊢ a = b⁻¹ * (b * a)\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\ta b c : G₀,\\thb : b ≠ 0\\t⊢ a = b⁻¹ * c ↔ b * a = c\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\ta b c : G₀,\\thb : b ≠ 0\\t⊢ a = b⁻¹ * c ↔ b * a = c\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\tx y : G₀,\\thx : x = 0\\t⊢ (x * y)⁻¹ = y⁻¹ * x⁻¹\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\tx y : G₀,\\thx : x = 0\\t⊢ (x * y)⁻¹ = y⁻¹ * x⁻¹\\t\\tG₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\tx y : G₀,\\thx : ¬x = 0\\t⊢ (x * y)⁻¹ = y⁻¹ * x⁻¹\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\tx y : G₀\\t⊢ (x * y)⁻¹ = y⁻¹ * x⁻¹\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\tx y : G₀,\\thx : ¬x = 0,\\thy : y = 0\\t⊢ (x * y)⁻¹ = y⁻¹ * x⁻¹\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\tx y : G₀,\\thx : ¬x = 0,\\thy : y = 0\\t⊢ (x * y)⁻¹ = y⁻¹ * x⁻¹\\t\\tG₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\tx y : G₀,\\thx : ¬x = 0,\\thy : ¬y = 0\\t⊢ (x * y)⁻¹ = y⁻¹ * x⁻¹\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\tx y : G₀,\\thx : ¬x = 0\\t⊢ (x * y)⁻¹ = y⁻¹ * x⁻¹\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\tx y : G₀,\\thx : ¬x = 0,\\thy : ¬y = 0\\t⊢ (x * y)⁻¹ = y⁻¹ * x⁻¹\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\tx y : G₀,\\thx : ¬x = 0,\\thy : ¬y = 0\\t⊢ y⁻¹ * x⁻¹ = (x * y)⁻¹\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\tx y : G₀,\\thx : ¬x = 0,\\thy : ¬y = 0\\t⊢ y⁻¹ * x⁻¹ * (x * y) = 1\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\ta : G₀,\\th : a ≠ 0\\t⊢ 1 / a * a = 1\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\ta : G₀\\t⊢ 1 / (1 / a) = a\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : group_with_zero G₀,\\ta b c : G₀,\\thc : c ≠ 0\\t⊢ a / c = b / c ↔ a = b\\n ',\n", + " '<|startoftext|> G₀ : Type u_2,\\t_inst_1 : comm_group_with_zero G₀,\\ta b c : G₀\\t⊢ b / c * a = b * a / c\\n ',\n", + " '<|startoftext|> M : Type u_1,\\t_inst_1 : monoid_with_zero M,\\t_inst_2 : nontrivial M,\\tn : ℕ\\t⊢ 0 ^ n = 0 → 0 < n\\n ',\n", + " '<|startoftext|> M : Type u_1,\\t_inst_1 : monoid_with_zero M,\\t_inst_2 : nontrivial M,\\tn : ℕ\\t⊢ 0 < n → 0 ^ n = 0\\n ',\n", + " '<|startoftext|> M : Type u_1,\\t_inst_1 : monoid_with_zero M,\\t_inst_2 : nontrivial M,\\tn : ℕ\\t⊢ 0 ^ n = 0 ↔ 0 < n\\n ',\n", + " '<|startoftext|> M : Type u_1,\\t_inst_1 : monoid_with_zero M,\\t_inst_2 : nontrivial M,\\tn : ℕ\\t⊢ 0 ^ n = 0 ↔ 0 < n\\n ',\n", + " '<|startoftext|> M : Type u_1,\\t_inst_1 : monoid_with_zero M,\\t_inst_2 : nontrivial M,\\tn : ℕ,\\th : 0 ^ n = 0\\t⊢ n ≠ 0\\n ',\n", + " '<|startoftext|> M : Type u_1,\\t_inst_1 : monoid_with_zero M,\\t_inst_2 : nontrivial M,\\th : 0 ^ 0 = 0\\t⊢ false\\n ',\n", + " '<|startoftext|> M : Type u_1,\\t_inst_1 : monoid_with_zero M,\\t_inst_2 : nontrivial M,\\tn : ℕ,\\th : 0 ^ n = 0\\t⊢ 0 < n\\t\\tM : Type u_1,\\t_inst_1 : monoid_with_zero M,\\t_inst_2 : nontrivial M,\\tn : ℕ,\\th : 0 < n\\t⊢ 0 ^ n = 0\\n ',\n", + " '<|startoftext|> M : Type u_1,\\t_inst_1 : monoid_with_zero M,\\t_inst_2 : nontrivial M,\\tn : ℕ,\\th : 0 ^ n = 0\\t⊢ 0 < n\\n ',\n", + " '<|startoftext|> M : Type u_1,\\t_inst_1 : monoid_with_zero M,\\t_inst_2 : nontrivial M,\\tn : ℕ,\\th : 0 < n\\t⊢ 0 ^ n = 0\\n ',\n", + " '<|startoftext|> G₀ : Type u_1,\\t_inst_1 : group_with_zero G₀,\\ta : G₀,\\tinv_fpow : ∀ (n : ℤ), a⁻¹ ^ n = (a ^ n)⁻¹,\\tn : ℕ\\t⊢ a⁻¹ ^ ↑n = (a ^ ↑n)⁻¹\\n ',\n", + " '<|startoftext|> G₀ : Type u_1,\\t_inst_1 : group_with_zero G₀,\\ta : G₀,\\tinv_fpow : ∀ (n : ℤ), a⁻¹ ^ n = (a ^ n)⁻¹,\\tn : ℕ\\t⊢ a⁻¹ ^ -[1+ n] = (a ^ -[1+ n])⁻¹\\n ',\n", + " \"<|startoftext|> β : Type u_1,\\t_inst_1 : add_comm_group β,\\tb : β,\\tV : Type u_2,\\t_inst_2 : category_theory.category V,\\t_inst_3 : category_theory.limits.has_zero_morphisms V,\\tX : category_theory.differential_object (category_theory.graded_object_with_shift b V),\\ti j : β,\\tw : ¬(complex_shape.up' b).rel i j\\t⊢ dite (i + b = j) (λ (h : i + b = j), X.d i ≫ category_theory.eq_to_hom _) (λ (h : ¬i + b = j), 0) = 0\\n \",\n", + " '<|startoftext|> β : Type u_1,\\t_inst_1 : add_comm_group β,\\tb : β,\\tV : Type u_2,\\t_inst_2 : category_theory.category V,\\t_inst_3 : category_theory.limits.has_zero_morphisms V,\\tX : category_theory.differential_object (category_theory.graded_object_with_shift b V),\\ti j : β,\\tw : ¬i + b = j\\t⊢ dite (i + b = j) (λ (h : i + b = j), X.d i ≫ category_theory.eq_to_hom _) (λ (h : ¬i + b = j), 0) = 0\\n ',\n", + " '<|startoftext|> β : Type u_1,\\t_inst_1 : add_comm_group β,\\tb : β,\\tV : Type u_2,\\t_inst_2 : category_theory.category V,\\t_inst_3 : category_theory.limits.has_zero_morphisms V,\\tX : category_theory.differential_object (category_theory.graded_object_with_shift b V),\\ti j k : β,\\thjk : j + b = k,\\thij : i + b = j\\t⊢ dite (i + b = j) (λ (h : i + b = j), X.d i ≫ category_theory.eq_to_hom _) (λ (h : ¬i + b = j), 0) ≫ dite (j + b = k) (λ (h : j + b = k), X.d j ≫ category_theory.eq_to_hom _) (λ (h : ¬j + b = k), 0) = 0\\n ',\n", + " \"<|startoftext|> β : Type u_1,\\t_inst_1 : add_comm_group β,\\tb : β,\\tV : Type u_2,\\t_inst_2 : category_theory.category V,\\t_inst_3 : category_theory.limits.has_zero_morphisms V,\\tX : category_theory.differential_object (category_theory.graded_object_with_shift b V),\\ti j k : β,\\thij : (complex_shape.up' b).rel i j,\\thjk : (complex_shape.up' b).rel j k\\t⊢ dite (i + b = j) (λ (h : i + b = j), X.d i ≫ category_theory.eq_to_hom _) (λ (h : ¬i + b = j), 0) ≫ dite (j + b = k) (λ (h : j + b = k), X.d j ≫ category_theory.eq_to_hom _) (λ (h : ¬j + b = k), 0) = 0\\n \",\n", + " '<|startoftext|> β : Type u_1,\\t_inst_1 : add_comm_group β,\\tb : β,\\tV : Type u_2,\\t_inst_2 : category_theory.category V,\\t_inst_3 : category_theory.limits.has_zero_morphisms V,\\tX : category_theory.differential_object (category_theory.graded_object_with_shift b V),\\ti : β\\t⊢ dite (i + b = i + b) (λ (h : i + b = i + b), X.d i ≫ category_theory.eq_to_hom _) (λ (h : ¬i + b = i + b), 0) ≫ dite (i + b + b = i + b + b) (λ (h : i + b + b = i + b + b), X.d (i + b) ≫ category_theory.eq_to_hom _) (λ (h : ¬i + b + b = i + b + b), 0) = 0\\n ',\n", + " '<|startoftext|> β : Type u_1,\\t_inst_1 : add_comm_group β,\\tb : β,\\tV : Type u_2,\\t_inst_2 : category_theory.category V,\\t_inst_3 : category_theory.limits.has_zero_morphisms V,\\tX : category_theory.differential_object (category_theory.graded_object_with_shift b V),\\ti : β\\t⊢ X.d i ≫ X.d (i + b) = 0\\n ',\n", + " \"<|startoftext|> β : Type u_1,\\t_inst_1 : add_comm_group β,\\tb : β,\\tV : Type u_2,\\t_inst_2 : category_theory.category V,\\t_inst_3 : category_theory.limits.has_zero_morphisms V,\\tX Y : category_theory.differential_object (category_theory.graded_object_with_shift b V),\\tf : X ⟶ Y,\\ti j : β,\\th : (complex_shape.up' b).rel i j\\t⊢ f.f i ≫ {X := λ (i : β), Y.X i, d := λ (i j : β), dite (i + b = j) (λ (h : i + b = j), Y.d i ≫ category_theory.eq_to_hom _) (λ (h : ¬i + b = j), 0), shape' := _, d_comp_d' := _}.d i j = {X := λ (i : β), X.X i, d := λ (i j : β), dite (i + b = j) (λ (h : i + b = j), X.d i ≫ category_theory.eq_to_hom _) (λ (h : ¬i + b = j), 0), shape' := _, d_comp_d' := _}.d i j ≫ f.f j\\n \",\n", + " '<|startoftext|> β : Type u_1,\\t_inst_1 : add_comm_group β,\\tb : β,\\tV : Type u_2,\\t_inst_2 : category_theory.category V,\\t_inst_3 : category_theory.limits.has_zero_morphisms V,\\tX Y : category_theory.differential_object (category_theory.graded_object_with_shift b V),\\tf : X ⟶ Y,\\ti j : β,\\th : i + b = j\\t⊢ f.f i ≫ dite (i + b = j) (λ (h : i + b = j), Y.d i ≫ category_theory.eq_to_hom _) (λ (h : ¬i + b = j), 0) = dite (i + b = j) (λ (h : i + b = j), X.d i ≫ category_theory.eq_to_hom _) (λ (h : ¬i + b = j), 0) ≫ f.f j\\n ',\n", + " '<|startoftext|> β : Type u_1,\\t_inst_1 : add_comm_group β,\\tb : β,\\tV : Type u_2,\\t_inst_2 : category_theory.category V,\\t_inst_3 : category_theory.limits.has_zero_morphisms V,\\tX Y : category_theory.differential_object (category_theory.graded_object_with_shift b V),\\tf : X ⟶ Y,\\ti : β\\t⊢ f.f i ≫ dite (i + b = i + b) (λ (h : i + b = i + b), Y.d i ≫ category_theory.eq_to_hom _) (λ (h : ¬i + b = i + b), 0) = dite (i + b = i + b) (λ (h : i + b = i + b), X.d i ≫ category_theory.eq_to_hom _) (λ (h : ¬i + b = i + b), 0) ≫ f.f (i + b)\\n ',\n", + " '<|startoftext|> β : Type u_1,\\t_inst_1 : add_comm_group β,\\tb : β,\\tV : Type u_2,\\t_inst_2 : category_theory.category V,\\t_inst_3 : category_theory.limits.has_zero_morphisms V,\\tX Y : category_theory.differential_object (category_theory.graded_object_with_shift b V),\\tf : X ⟶ Y,\\ti : β\\t⊢ f.f i ≫ Y.d i = X.d i ≫ f.f (i + b)\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_images V,\\tA B C : V,\\tg : B ⟶ C,\\t_inst_3 : category_theory.limits.has_zero_morphisms V,\\t_inst_4 : category_theory.limits.has_equalizers V,\\t_inst_5 : category_theory.exact 0 g\\t⊢ (category_theory.limits.kernel_subobject g).arrow = 0\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_images V,\\tA B C : V,\\tg : B ⟶ C,\\t_inst_3 : category_theory.limits.has_zero_morphisms V,\\t_inst_4 : category_theory.limits.has_equalizers V,\\t_inst_5 : category_theory.exact 0 g\\t⊢ category_theory.limits.factor_thru_image_subobject 0 ≫ image_to_kernel 0 g category_theory.exact.w ≫ (category_theory.limits.kernel_subobject g).arrow = category_theory.limits.factor_thru_image_subobject 0 ≫ image_to_kernel 0 g category_theory.exact.w ≫ 0\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tα : Type u_2,\\t_inst_3 : add_right_cancel_semigroup α,\\t_inst_4 : has_one α,\\t_inst_5 : decidable_eq α,\\tX : α → V,\\td_X : Π (n : α), X n ⟶ X (n + 1),\\tsq_X : ∀ (n : α), d_X n ≫ d_X (n + 1) = 0,\\tY : α → V,\\td_Y : Π (n : α), Y n ⟶ Y (n + 1),\\tsq_Y : ∀ (n : α), d_Y n ≫ d_Y (n + 1) = 0,\\tf : Π (i : α), X i ⟶ Y i,\\tcomm : ∀ (i : α), f i ≫ d_Y i = d_X i ≫ f (i + 1),\\tn m : α\\t⊢ (complex_shape.up α).rel n m → f n ≫ (cochain_complex.of Y d_Y sq_Y).d n m = (cochain_complex.of X d_X sq_X).d n m ≫ f m\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tα : Type u_2,\\t_inst_3 : add_right_cancel_semigroup α,\\t_inst_4 : has_one α,\\t_inst_5 : decidable_eq α,\\tX : α → V,\\td_X : Π (n : α), X n ⟶ X (n + 1),\\tsq_X : ∀ (n : α), d_X n ≫ d_X (n + 1) = 0,\\tY : α → V,\\td_Y : Π (n : α), Y n ⟶ Y (n + 1),\\tsq_Y : ∀ (n : α), d_Y n ≫ d_Y (n + 1) = 0,\\tf : Π (i : α), X i ⟶ Y i,\\tcomm : ∀ (i : α), f i ≫ d_Y i = d_X i ≫ f (i + 1),\\tn m : α,\\th : n + 1 = m\\t⊢ (complex_shape.up α).rel n m → f n ≫ (cochain_complex.of Y d_Y sq_Y).d n m = (cochain_complex.of X d_X sq_X).d n m ≫ f m\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tα : Type u_2,\\t_inst_3 : add_right_cancel_semigroup α,\\t_inst_4 : has_one α,\\t_inst_5 : decidable_eq α,\\tX : α → V,\\td_X : Π (n : α), X n ⟶ X (n + 1),\\tsq_X : ∀ (n : α), d_X n ≫ d_X (n + 1) = 0,\\tY : α → V,\\td_Y : Π (n : α), Y n ⟶ Y (n + 1),\\tsq_Y : ∀ (n : α), d_Y n ≫ d_Y (n + 1) = 0,\\tf : Π (i : α), X i ⟶ Y i,\\tcomm : ∀ (i : α), f i ≫ d_Y i = d_X i ≫ f (i + 1),\\tn m : α,\\th : n + 1 = m\\t⊢ (complex_shape.up α).rel n m → f n ≫ (cochain_complex.of Y d_Y sq_Y).d n m = (cochain_complex.of X d_X sq_X).d n m ≫ f m\\t\\tV : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tα : Type u_2,\\t_inst_3 : add_right_cancel_semigroup α,\\t_inst_4 : has_one α,\\t_inst_5 : decidable_eq α,\\tX : α → V,\\td_X : Π (n : α), X n ⟶ X (n + 1),\\tsq_X : ∀ (n : α), d_X n ≫ d_X (n + 1) = 0,\\tY : α → V,\\td_Y : Π (n : α), Y n ⟶ Y (n + 1),\\tsq_Y : ∀ (n : α), d_Y n ≫ d_Y (n + 1) = 0,\\tf : Π (i : α), X i ⟶ Y i,\\tcomm : ∀ (i : α), f i ≫ d_Y i = d_X i ≫ f (i + 1),\\tn m : α,\\th : ¬n + 1 = m\\t⊢ (complex_shape.up α).rel n m → f n ≫ (cochain_complex.of Y d_Y sq_Y).d n m = (cochain_complex.of X d_X sq_X).d n m ≫ f m\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tα : Type u_2,\\t_inst_3 : add_right_cancel_semigroup α,\\t_inst_4 : has_one α,\\t_inst_5 : decidable_eq α,\\tX : α → V,\\td_X : Π (n : α), X n ⟶ X (n + 1),\\tsq_X : ∀ (n : α), d_X n ≫ d_X (n + 1) = 0,\\tY : α → V,\\td_Y : Π (n : α), Y n ⟶ Y (n + 1),\\tsq_Y : ∀ (n : α), d_Y n ≫ d_Y (n + 1) = 0,\\tf : Π (i : α), X i ⟶ Y i,\\tcomm : ∀ (i : α), f i ≫ d_Y i = d_X i ≫ f (i + 1),\\tn : α\\t⊢ (complex_shape.up α).rel n (n + 1) → f n ≫ (cochain_complex.of Y d_Y sq_Y).d n (n + 1) = (cochain_complex.of X d_X sq_X).d n (n + 1) ≫ f (n + 1)\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tα : Type u_2,\\t_inst_3 : add_right_cancel_semigroup α,\\t_inst_4 : has_one α,\\t_inst_5 : decidable_eq α,\\tX : α → V,\\td_X : Π (n : α), X n ⟶ X (n + 1),\\tsq_X : ∀ (n : α), d_X n ≫ d_X (n + 1) = 0,\\tY : α → V,\\td_Y : Π (n : α), Y n ⟶ Y (n + 1),\\tsq_Y : ∀ (n : α), d_Y n ≫ d_Y (n + 1) = 0,\\tf : Π (i : α), X i ⟶ Y i,\\tcomm : ∀ (i : α), f i ≫ d_Y i = d_X i ≫ f (i + 1),\\tn m : α,\\th : ¬n + 1 = m\\t⊢ (complex_shape.up α).rel n m → f n ≫ 0 = 0 ≫ f m\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tα : Type u_2,\\t_inst_3 : add_right_cancel_semigroup α,\\t_inst_4 : has_one α,\\t_inst_5 : decidable_eq α,\\tX : α → V,\\td_X : Π (n : α), X n ⟶ X (n + 1),\\tsq_X : ∀ (n : α), d_X n ≫ d_X (n + 1) = 0,\\tY : α → V,\\td_Y : Π (n : α), Y n ⟶ Y (n + 1),\\tsq_Y : ∀ (n : α), d_Y n ≫ d_Y (n + 1) = 0,\\tf : Π (i : α), X i ⟶ Y i,\\tcomm : ∀ (i : α), f i ≫ d_Y i = d_X i ≫ f (i + 1),\\tn m : α,\\th : ¬n + 1 = m\\t⊢ (complex_shape.up α).rel n m → f n ≫ (cochain_complex.of Y d_Y sq_Y).d n m = (cochain_complex.of X d_X sq_X).d n m ≫ f m\\n ',\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tX₀ X₁ X₂ : V,\\td₀ : X₀ ⟶ X₁,\\td₁ : X₁ ⟶ X₂,\\ts : d₀ ≫ d₁ = 0,\\tsucc : Π (t : Σ' (X₀ X₁ X₂ : V) (d₀ : X₀ ⟶ X₁) (d₁ : X₁ ⟶ X₂), d₀ ≫ d₁ = 0), Σ' (X₃ : V) (d₂ : t.snd.snd.fst ⟶ X₃), t.snd.snd.snd.snd.fst ≫ d₂ = 0\\t⊢ ite (2 = 1 + 1) (d₁ ≫ 𝟙 X₂) 0 = d₁\\n \",\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tX₀ X₁ X₂ : V,\\td₀ : X₀ ⟶ X₁,\\td₁ : X₁ ⟶ X₂,\\ts : d₀ ≫ d₁ = 0,\\tsucc : Π (t : Σ' (X₀ X₁ X₂ : V) (d₀ : X₀ ⟶ X₁) (d₁ : X₁ ⟶ X₂), d₀ ≫ d₁ = 0), Σ' (X₃ : V) (d₂ : t.snd.snd.fst ⟶ X₃), t.snd.snd.snd.snd.fst ≫ d₂ = 0\\t⊢ (cochain_complex.mk X₀ X₁ X₂ d₀ d₁ s succ).d 1 2 = d₁\\n \",\n", + " '<|startoftext|> ι : Type u_1,\\tV : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tc : complex_shape ι,\\t_inst_3 : category_theory.limits.has_zero_object V,\\t_inst_4 : category_theory.limits.has_kernels V,\\tC₁ C₂ C₃ : homological_complex V c,\\tf : C₁ ⟶ C₂,\\tg : C₂ ⟶ C₃,\\ti : ι\\t⊢ (C₃.cycles i).factor_thru ((C₁.cycles i).arrow ≫ (f ≫ g).f i) _ = (C₂.cycles i).factor_thru ((C₁.cycles i).arrow ≫ f.f i) _ ≫ (C₃.cycles i).factor_thru ((C₂.cycles i).arrow ≫ g.f i) _\\n ',\n", + " '<|startoftext|> ι : Type u_1,\\tV : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tc : complex_shape ι,\\t_inst_3 : category_theory.limits.has_zero_object V,\\t_inst_4 : category_theory.limits.has_kernels V,\\tC₁ C₂ C₃ : homological_complex V c,\\tf : C₁ ⟶ C₂,\\tg : C₂ ⟶ C₃,\\ti : ι\\t⊢ cycles_map (f ≫ g) i = cycles_map f i ≫ cycles_map g i\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tA B C : V,\\tf : A ⟶ B,\\t_inst_3 : category_theory.limits.has_image f,\\tg : B ⟶ C,\\t_inst_4 : category_theory.limits.has_kernel g,\\tw : f ≫ g = 0,\\t_inst_5 : category_theory.limits.has_cokernel (image_to_kernel f g w),\\tD : V,\\tk : ↑(category_theory.limits.kernel_subobject g) ⟶ D,\\tp : image_to_kernel f g w ≫ k = 0\\t⊢ homology.π f g w ≫ homology.desc f g w k p = k\\n ',\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tA B C : V,\\tf : A ⟶ B,\\tg : B ⟶ C,\\tw : f ≫ g = 0,\\tf' : A ⟶ B,\\tg' : B ⟶ C,\\tw' : f' ≫ g' = 0,\\t_inst_3 : category_theory.limits.has_kernels V,\\t_inst_4 : category_theory.limits.has_cokernels V,\\t_inst_5 : category_theory.limits.has_images V,\\t_inst_6 : category_theory.limits.has_image_maps V,\\tpf : f = f',\\tpg : g = g'\\t⊢ homology.map w w' {left := 𝟙 (category_theory.arrow.mk f).left, right := 𝟙 (category_theory.arrow.mk f).right, w' := _} {left := 𝟙 (category_theory.arrow.mk g).left, right := 𝟙 (category_theory.arrow.mk g).right, w' := _} rfl ≫ homology.map w' w {left := 𝟙 (category_theory.arrow.mk f').left, right := 𝟙 (category_theory.arrow.mk f').right, w' := _} {left := 𝟙 (category_theory.arrow.mk g').left, right := 𝟙 (category_theory.arrow.mk g').right, w' := _} rfl = 𝟙 (homology f g w)\\n \",\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tA B C : V,\\tf : A ⟶ B,\\tg : B ⟶ C,\\tw : f ≫ g = 0,\\tf' : A ⟶ B,\\tg' : B ⟶ C,\\tw' : f' ≫ g' = 0,\\t_inst_3 : category_theory.limits.has_kernels V,\\t_inst_4 : category_theory.limits.has_cokernels V,\\t_inst_5 : category_theory.limits.has_images V,\\t_inst_6 : category_theory.limits.has_image_maps V,\\tpf : f = f',\\tpg : g = g'\\t⊢ homology.π f g w ≫ homology.map w w' {left := 𝟙 (category_theory.arrow.mk f).left, right := 𝟙 (category_theory.arrow.mk f).right, w' := _} {left := 𝟙 (category_theory.arrow.mk g).left, right := 𝟙 (category_theory.arrow.mk g).right, w' := _} rfl ≫ homology.map w' w {left := 𝟙 (category_theory.arrow.mk f').left, right := 𝟙 (category_theory.arrow.mk f').right, w' := _} {left := 𝟙 (category_theory.arrow.mk g').left, right := 𝟙 (category_theory.arrow.mk g').right, w' := _} rfl = homology.π f g w ≫ 𝟙 (homology f g w)\\n \",\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tA B C : V,\\tf : A ⟶ B,\\tg : B ⟶ C,\\tw : f ≫ g = 0,\\tf' : A ⟶ B,\\tg' : B ⟶ C,\\tw' : f' ≫ g' = 0,\\t_inst_3 : category_theory.limits.has_kernels V,\\t_inst_4 : category_theory.limits.has_cokernels V,\\t_inst_5 : category_theory.limits.has_images V,\\t_inst_6 : category_theory.limits.has_image_maps V,\\tpf : f = f',\\tpg : g = g'\\t⊢ category_theory.limits.kernel_subobject_map ({left := 𝟙 (category_theory.arrow.mk g).left, right := 𝟙 (category_theory.arrow.mk g).right, w' := _} ≫ {left := 𝟙 (category_theory.arrow.mk g').left, right := 𝟙 (category_theory.arrow.mk g').right, w' := _}) ≫ homology.π f g w = homology.π f g w\\n \",\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tA B C : V,\\tf : A ⟶ B,\\tg : B ⟶ C,\\tw : f ≫ g = 0,\\tf' : A ⟶ B,\\tg' : B ⟶ C,\\tw' : f' ≫ g' = 0,\\t_inst_3 : category_theory.limits.has_kernels V,\\t_inst_4 : category_theory.limits.has_cokernels V,\\t_inst_5 : category_theory.limits.has_images V,\\t_inst_6 : category_theory.limits.has_image_maps V,\\tpf : f = f',\\tpg : g = g'\\t⊢ category_theory.limits.kernel_subobject_map ({left := 𝟙 (category_theory.arrow.mk g).left, right := 𝟙 (category_theory.arrow.mk g).right, w' := _} ≫ {left := 𝟙 (category_theory.arrow.mk g').left, right := 𝟙 (category_theory.arrow.mk g').right, w' := _}) = 𝟙 ↑(category_theory.limits.kernel_subobject g)\\n \",\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tA B C : V,\\tf : A ⟶ B,\\tg : B ⟶ C,\\tw : f ≫ g = 0,\\tf' : A ⟶ B,\\tg' : B ⟶ C,\\tw' : f' ≫ g' = 0,\\t_inst_3 : category_theory.limits.has_kernels V,\\t_inst_4 : category_theory.limits.has_cokernels V,\\t_inst_5 : category_theory.limits.has_images V,\\t_inst_6 : category_theory.limits.has_image_maps V,\\tpf : f = f',\\tpg : g = g'\\t⊢ ({left := 𝟙 (category_theory.arrow.mk g).left, right := 𝟙 (category_theory.arrow.mk g).right, w' := _} ≫ {left := 𝟙 (category_theory.arrow.mk g').left, right := 𝟙 (category_theory.arrow.mk g').right, w' := _}).left = (𝟙 (category_theory.arrow.mk g)).left\\n \",\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tA B C : V,\\tf : A ⟶ B,\\tg : B ⟶ C,\\tw : f ≫ g = 0,\\tf' : A ⟶ B,\\tg' : B ⟶ C,\\tw' : f' ≫ g' = 0,\\t_inst_3 : category_theory.limits.has_kernels V,\\t_inst_4 : category_theory.limits.has_cokernels V,\\t_inst_5 : category_theory.limits.has_images V,\\t_inst_6 : category_theory.limits.has_image_maps V,\\tpf : f = f',\\tpg : g = g'\\t⊢ ({left := 𝟙 (category_theory.arrow.mk g).left, right := 𝟙 (category_theory.arrow.mk g).right, w' := _} ≫ {left := 𝟙 (category_theory.arrow.mk g').left, right := 𝟙 (category_theory.arrow.mk g').right, w' := _}).right = (𝟙 (category_theory.arrow.mk g)).right\\n \",\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tA B C : V,\\tf : A ⟶ B,\\tg : B ⟶ C,\\tw : f ≫ g = 0,\\tf' : A ⟶ B,\\tg' : B ⟶ C,\\tw' : f' ≫ g' = 0,\\t_inst_3 : category_theory.limits.has_kernels V,\\t_inst_4 : category_theory.limits.has_cokernels V,\\t_inst_5 : category_theory.limits.has_images V,\\t_inst_6 : category_theory.limits.has_image_maps V,\\tpf : f = f',\\tpg : g = g'\\t⊢ {left := 𝟙 (category_theory.arrow.mk g).left, right := 𝟙 (category_theory.arrow.mk g).right, w' := _} ≫ {left := 𝟙 (category_theory.arrow.mk g').left, right := 𝟙 (category_theory.arrow.mk g').right, w' := _} = 𝟙 (category_theory.arrow.mk g)\\n \",\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tA B C : V,\\tf : A ⟶ B,\\tg : B ⟶ C,\\tw : f ≫ g = 0,\\tf' : A ⟶ B,\\tg' : B ⟶ C,\\tw' : f' ≫ g' = 0,\\t_inst_3 : category_theory.limits.has_kernels V,\\t_inst_4 : category_theory.limits.has_cokernels V,\\t_inst_5 : category_theory.limits.has_images V,\\t_inst_6 : category_theory.limits.has_image_maps V,\\tpf : f = f',\\tpg : g = g'\\t⊢ {left := 𝟙 (category_theory.arrow.mk g).left, right := 𝟙 (category_theory.arrow.mk g).right, w' := _} ≫ {left := 𝟙 (category_theory.arrow.mk g').left, right := 𝟙 (category_theory.arrow.mk g').right, w' := _} = 𝟙 (category_theory.arrow.mk g)\\n \",\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tA B C : V,\\tf : A ⟶ B,\\tg : B ⟶ C,\\tw : f ≫ g = 0,\\tf' : A ⟶ B,\\tg' : B ⟶ C,\\tw' : f' ≫ g' = 0,\\t_inst_3 : category_theory.limits.has_kernels V,\\t_inst_4 : category_theory.limits.has_cokernels V,\\t_inst_5 : category_theory.limits.has_images V,\\t_inst_6 : category_theory.limits.has_image_maps V,\\tpf : f = f',\\tpg : g = g'\\t⊢ homology.map w' w {left := 𝟙 (category_theory.arrow.mk f').left, right := 𝟙 (category_theory.arrow.mk f').right, w' := _} {left := 𝟙 (category_theory.arrow.mk g').left, right := 𝟙 (category_theory.arrow.mk g').right, w' := _} rfl ≫ homology.map w w' {left := 𝟙 (category_theory.arrow.mk f).left, right := 𝟙 (category_theory.arrow.mk f).right, w' := _} {left := 𝟙 (category_theory.arrow.mk g).left, right := 𝟙 (category_theory.arrow.mk g).right, w' := _} rfl = 𝟙 (homology f' g' w')\\n \",\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tA B C : V,\\tf : A ⟶ B,\\tg : B ⟶ C,\\tw : f ≫ g = 0,\\tf' : A ⟶ B,\\tg' : B ⟶ C,\\tw' : f' ≫ g' = 0,\\t_inst_3 : category_theory.limits.has_kernels V,\\t_inst_4 : category_theory.limits.has_cokernels V,\\t_inst_5 : category_theory.limits.has_images V,\\t_inst_6 : category_theory.limits.has_image_maps V,\\tpf : f = f',\\tpg : g = g'\\t⊢ homology.π f' g' w' ≫ homology.map w' w {left := 𝟙 (category_theory.arrow.mk f').left, right := 𝟙 (category_theory.arrow.mk f').right, w' := _} {left := 𝟙 (category_theory.arrow.mk g').left, right := 𝟙 (category_theory.arrow.mk g').right, w' := _} rfl ≫ homology.map w w' {left := 𝟙 (category_theory.arrow.mk f).left, right := 𝟙 (category_theory.arrow.mk f).right, w' := _} {left := 𝟙 (category_theory.arrow.mk g).left, right := 𝟙 (category_theory.arrow.mk g).right, w' := _} rfl = homology.π f' g' w' ≫ 𝟙 (homology f' g' w')\\n \",\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tA B C : V,\\tf : A ⟶ B,\\tg : B ⟶ C,\\tw : f ≫ g = 0,\\tf' : A ⟶ B,\\tg' : B ⟶ C,\\tw' : f' ≫ g' = 0,\\t_inst_3 : category_theory.limits.has_kernels V,\\t_inst_4 : category_theory.limits.has_cokernels V,\\t_inst_5 : category_theory.limits.has_images V,\\t_inst_6 : category_theory.limits.has_image_maps V,\\tpf : f = f',\\tpg : g = g'\\t⊢ category_theory.limits.kernel_subobject_map ({left := 𝟙 (category_theory.arrow.mk g').left, right := 𝟙 (category_theory.arrow.mk g').right, w' := _} ≫ {left := 𝟙 (category_theory.arrow.mk g).left, right := 𝟙 (category_theory.arrow.mk g).right, w' := _}) ≫ homology.π f' g' w' = homology.π f' g' w'\\n \",\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tA B C : V,\\tf : A ⟶ B,\\tg : B ⟶ C,\\tw : f ≫ g = 0,\\tf' : A ⟶ B,\\tg' : B ⟶ C,\\tw' : f' ≫ g' = 0,\\t_inst_3 : category_theory.limits.has_kernels V,\\t_inst_4 : category_theory.limits.has_cokernels V,\\t_inst_5 : category_theory.limits.has_images V,\\t_inst_6 : category_theory.limits.has_image_maps V,\\tpf : f = f',\\tpg : g = g'\\t⊢ category_theory.limits.kernel_subobject_map ({left := 𝟙 (category_theory.arrow.mk g').left, right := 𝟙 (category_theory.arrow.mk g').right, w' := _} ≫ {left := 𝟙 (category_theory.arrow.mk g).left, right := 𝟙 (category_theory.arrow.mk g).right, w' := _}) = 𝟙 ↑(category_theory.limits.kernel_subobject g')\\n \",\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tA B C : V,\\tf : A ⟶ B,\\tg : B ⟶ C,\\tw : f ≫ g = 0,\\tf' : A ⟶ B,\\tg' : B ⟶ C,\\tw' : f' ≫ g' = 0,\\t_inst_3 : category_theory.limits.has_kernels V,\\t_inst_4 : category_theory.limits.has_cokernels V,\\t_inst_5 : category_theory.limits.has_images V,\\t_inst_6 : category_theory.limits.has_image_maps V,\\tpf : f = f',\\tpg : g = g'\\t⊢ ({left := 𝟙 (category_theory.arrow.mk g').left, right := 𝟙 (category_theory.arrow.mk g').right, w' := _} ≫ {left := 𝟙 (category_theory.arrow.mk g).left, right := 𝟙 (category_theory.arrow.mk g).right, w' := _}).left = (𝟙 (category_theory.arrow.mk g')).left\\n \",\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tA B C : V,\\tf : A ⟶ B,\\tg : B ⟶ C,\\tw : f ≫ g = 0,\\tf' : A ⟶ B,\\tg' : B ⟶ C,\\tw' : f' ≫ g' = 0,\\t_inst_3 : category_theory.limits.has_kernels V,\\t_inst_4 : category_theory.limits.has_cokernels V,\\t_inst_5 : category_theory.limits.has_images V,\\t_inst_6 : category_theory.limits.has_image_maps V,\\tpf : f = f',\\tpg : g = g'\\t⊢ ({left := 𝟙 (category_theory.arrow.mk g').left, right := 𝟙 (category_theory.arrow.mk g').right, w' := _} ≫ {left := 𝟙 (category_theory.arrow.mk g).left, right := 𝟙 (category_theory.arrow.mk g).right, w' := _}).right = (𝟙 (category_theory.arrow.mk g')).right\\n \",\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tA B C : V,\\tf : A ⟶ B,\\tg : B ⟶ C,\\tw : f ≫ g = 0,\\tf' : A ⟶ B,\\tg' : B ⟶ C,\\tw' : f' ≫ g' = 0,\\t_inst_3 : category_theory.limits.has_kernels V,\\t_inst_4 : category_theory.limits.has_cokernels V,\\t_inst_5 : category_theory.limits.has_images V,\\t_inst_6 : category_theory.limits.has_image_maps V,\\tpf : f = f',\\tpg : g = g'\\t⊢ {left := 𝟙 (category_theory.arrow.mk g').left, right := 𝟙 (category_theory.arrow.mk g').right, w' := _} ≫ {left := 𝟙 (category_theory.arrow.mk g).left, right := 𝟙 (category_theory.arrow.mk g).right, w' := _} = 𝟙 (category_theory.arrow.mk g')\\n \",\n", + " \"<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\tA B C : V,\\tf : A ⟶ B,\\tg : B ⟶ C,\\tw : f ≫ g = 0,\\tf' : A ⟶ B,\\tg' : B ⟶ C,\\tw' : f' ≫ g' = 0,\\t_inst_3 : category_theory.limits.has_kernels V,\\t_inst_4 : category_theory.limits.has_cokernels V,\\t_inst_5 : category_theory.limits.has_images V,\\t_inst_6 : category_theory.limits.has_image_maps V,\\tpf : f = f',\\tpg : g = g'\\t⊢ {left := 𝟙 (category_theory.arrow.mk g').left, right := 𝟙 (category_theory.arrow.mk g').right, w' := _} ≫ {left := 𝟙 (category_theory.arrow.mk g).left, right := 𝟙 (category_theory.arrow.mk g).right, w' := _} = 𝟙 (category_theory.arrow.mk g')\\n \",\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\t_inst_3 : category_theory.limits.has_zero_object V,\\tι : Type u_1,\\t_inst_4 : decidable_eq ι,\\tc : complex_shape ι,\\tj : ι,\\tX Y : V,\\tf : (homological_complex.single V c j).obj X ⟶ (homological_complex.single V c j).obj Y,\\ti : ι,\\th : ¬i = j\\t⊢ f.f i = 0\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\t_inst_3 : category_theory.limits.has_zero_object V,\\tι : Type u_1,\\t_inst_4 : decidable_eq ι,\\tc : complex_shape ι,\\tj : ι,\\tX Y : V,\\tf : (homological_complex.single V c j).obj X ⟶ (homological_complex.single V c j).obj Y,\\ti : ι,\\th : ¬i = j\\t⊢ ((homological_complex.single V c j).obj Y).X i ≅ 0\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\t_inst_3 : category_theory.limits.has_zero_object V,\\tι : Type u_1,\\t_inst_4 : decidable_eq ι,\\tc : complex_shape ι,\\tj : ι,\\tX Y : V,\\tf : (homological_complex.single V c j).obj X ⟶ (homological_complex.single V c j).obj Y,\\ti : ι,\\th : ¬i = j\\t⊢ ite (i = j) Y 0 ≅ 0\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\t_inst_3 : category_theory.limits.has_zero_object V,\\tι : Type u_1,\\t_inst_4 : decidable_eq ι,\\tc : complex_shape ι,\\tj : ι,\\tA : V\\t⊢ ((homological_complex.single V c j).obj A).X j = A\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\t_inst_3 : category_theory.limits.has_zero_object V,\\tι : Type u_1,\\t_inst_4 : decidable_eq ι,\\tc : complex_shape ι,\\tj : ι,\\tX Y : V,\\tf : (homological_complex.single V c j).obj X ⟶ (homological_complex.single V c j).obj Y\\t⊢ X = ((homological_complex.single V c j).obj X).X j\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\t_inst_3 : category_theory.limits.has_zero_object V,\\tι : Type u_1,\\t_inst_4 : decidable_eq ι,\\tc : complex_shape ι,\\tj : ι,\\tX Y : V,\\tf : (homological_complex.single V c j).obj X ⟶ (homological_complex.single V c j).obj Y\\t⊢ ((homological_complex.single V c j).obj Y).X j = Y\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\t_inst_3 : category_theory.limits.has_zero_object V,\\tι : Type u_1,\\t_inst_4 : decidable_eq ι,\\tc : complex_shape ι,\\tj : ι,\\tX Y : V,\\tf : (homological_complex.single V c j).obj X ⟶ (homological_complex.single V c j).obj Y\\t⊢ (homological_complex.single V c j).map (category_theory.eq_to_hom _ ≫ f.f j ≫ category_theory.eq_to_hom _) = f\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\t_inst_3 : category_theory.limits.has_zero_object V,\\tι : Type u_1,\\t_inst_4 : decidable_eq ι,\\tc : complex_shape ι,\\tj : ι,\\tX Y : V,\\tf : (homological_complex.single V c j).obj X ⟶ (homological_complex.single V c j).obj Y,\\ti : ι\\t⊢ ((homological_complex.single V c j).map (category_theory.eq_to_hom _ ≫ f.f j ≫ category_theory.eq_to_hom _)).f i = f.f i\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\t_inst_3 : category_theory.limits.has_zero_object V,\\tι : Type u_1,\\t_inst_4 : decidable_eq ι,\\tc : complex_shape ι,\\tj : ι,\\tX Y : V,\\tf : (homological_complex.single V c j).obj X ⟶ (homological_complex.single V c j).obj Y,\\ti : ι\\t⊢ dite (i = j) (λ (h : i = j), category_theory.eq_to_hom _ ≫ (category_theory.eq_to_hom _ ≫ f.f j ≫ category_theory.eq_to_hom _) ≫ category_theory.eq_to_hom _) (λ (h : ¬i = j), 0) = f.f i\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\t_inst_3 : category_theory.limits.has_zero_object V,\\tι : Type u_1,\\t_inst_4 : decidable_eq ι,\\tc : complex_shape ι,\\tX Y : V,\\ti : ι,\\tf : (homological_complex.single V c i).obj X ⟶ (homological_complex.single V c i).obj Y\\t⊢ category_theory.eq_to_hom _ ≫ (category_theory.eq_to_hom _ ≫ f.f i ≫ category_theory.eq_to_hom _) ≫ category_theory.eq_to_hom _ = f.f i\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\t_inst_3 : category_theory.limits.has_zero_object V,\\tι : Type u_1,\\t_inst_4 : decidable_eq ι,\\tc : complex_shape ι,\\tj : ι,\\tX Y : V,\\tf : (homological_complex.single V c j).obj X ⟶ (homological_complex.single V c j).obj Y,\\ti : ι,\\th : i = j\\t⊢ category_theory.eq_to_hom _ ≫ (category_theory.eq_to_hom _ ≫ f.f j ≫ category_theory.eq_to_hom _) ≫ category_theory.eq_to_hom _ = f.f i\\t\\tV : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\t_inst_3 : category_theory.limits.has_zero_object V,\\tι : Type u_1,\\t_inst_4 : decidable_eq ι,\\tc : complex_shape ι,\\tj : ι,\\tX Y : V,\\tf : (homological_complex.single V c j).obj X ⟶ (homological_complex.single V c j).obj Y,\\ti : ι,\\th : ¬i = j\\t⊢ 0 = f.f i\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\t_inst_3 : category_theory.limits.has_zero_object V,\\tι : Type u_1,\\t_inst_4 : decidable_eq ι,\\tc : complex_shape ι,\\tj : ι,\\tX Y : V,\\tf : (homological_complex.single V c j).obj X ⟶ (homological_complex.single V c j).obj Y,\\ti : ι,\\th : i = j\\t⊢ category_theory.eq_to_hom _ ≫ (category_theory.eq_to_hom _ ≫ f.f j ≫ category_theory.eq_to_hom _) ≫ category_theory.eq_to_hom _ = f.f i\\n ',\n", + " '<|startoftext|> V : Type u,\\t_inst_1 : category_theory.category V,\\t_inst_2 : category_theory.limits.has_zero_morphisms V,\\t_inst_3 : category_theory.limits.has_zero_object V,\\tι : Type u_1,\\t_inst_4 : decidable_eq ι,\\tc : complex_shape ι,\\tj : ι,\\tX Y : V,\\tf : (homological_complex.single V c j).obj X ⟶ (homological_complex.single V c j).obj Y,\\ti : ι,\\th : ¬i = j\\t⊢ 0 = f.f i\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tM : Type u_4,\\t_inst_1 : mul_one_class M,\\ts : set α,\\tf : α → M,\\ta : α,\\tha : a ∈ s\\t⊢ sᶜ.mul_indicator f a * s.mul_indicator f a = f a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tM : Type u_4,\\t_inst_1 : mul_one_class M,\\ts : set α,\\tf : α → M,\\ta : α,\\tha : a ∉ s\\t⊢ sᶜ.mul_indicator f a * s.mul_indicator f a = f a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tM : Type u_4,\\t_inst_1 : mul_zero_class M,\\ta : α,\\ts : set α,\\tf g : α → M\\t⊢ ite (a ∈ s) (f a * g a) 0 = f a * ite (a ∈ s) (g a) 0\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tM : Type u_4,\\t_inst_1 : mul_zero_class M,\\ta : α,\\ts : set α,\\tf g : α → M,\\th : a ∈ s\\t⊢ f a * g a = f a * g a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tM : Type u_4,\\t_inst_1 : mul_zero_class M,\\ta : α,\\ts : set α,\\tf g : α → M,\\th : a ∈ s\\t⊢ f a * g a = f a * g a\\t\\tα : Type u_1,\\tM : Type u_4,\\t_inst_1 : mul_zero_class M,\\ta : α,\\ts : set α,\\tf g : α → M,\\th : a ∉ s\\t⊢ 0 = f a * 0\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tM : Type u_4,\\t_inst_1 : mul_zero_class M,\\ta : α,\\ts : set α,\\tf g : α → M,\\th : a ∉ s\\t⊢ 0 = f a * 0\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tM : Type u_4,\\t_inst_1 : mul_zero_class M,\\ta : α,\\ts : set α,\\tf g : α → M\\t⊢ s.indicator (λ (a : α), f a * g a) a = f a * s.indicator g a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tι : Sort u_2,\\tM : Type u_3,\\t_inst_3 : complete_lattice M,\\t_inst_4 : has_one M,\\th1 : ⊥ = 1,\\ts : ι → set α,\\tf : α → M,\\tx : α\\t⊢ (⋃ (i : ι), s i).mul_indicator f x = ⨆ (i : ι), (s i).mul_indicator f x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tι : Sort u_2,\\tM : Type u_3,\\t_inst_3 : complete_lattice M,\\t_inst_4 : has_one M,\\th1 : ⊥ = 1,\\ts : ι → set α,\\tf : α → M,\\tx : α,\\thx : x ∈ ⋃ (i : ι), s i\\t⊢ (⋃ (i : ι), s i).mul_indicator f x = ⨆ (i : ι), (s i).mul_indicator f x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tι : Sort u_2,\\tM : Type u_3,\\t_inst_3 : complete_lattice M,\\t_inst_4 : has_one M,\\th1 : ⊥ = 1,\\ts : ι → set α,\\tf : α → M,\\tx : α,\\thx : x ∈ ⋃ (i : ι), s i\\t⊢ f x = ⨆ (i : ι), (s i).mul_indicator f x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tι : Sort u_2,\\tM : Type u_3,\\t_inst_3 : complete_lattice M,\\t_inst_4 : has_one M,\\th1 : ⊥ = 1,\\ts : ι → set α,\\tf : α → M,\\tx : α,\\thx : ∃ (i : ι), x ∈ s i\\t⊢ f x = ⨆ (i : ι), (s i).mul_indicator f x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tι : Sort u_2,\\tM : Type u_3,\\t_inst_3 : complete_lattice M,\\t_inst_4 : has_one M,\\th1 : ⊥ = 1,\\ts : ι → set α,\\tf : α → M,\\tx : α,\\thx : ∃ (i : ι), x ∈ s i\\t⊢ f x ≤ ⨆ (i : ι), (s i).mul_indicator f x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tι : Sort u_2,\\tM : Type u_3,\\t_inst_3 : complete_lattice M,\\t_inst_4 : has_one M,\\th1 : ⊥ = 1,\\ts : ι → set α,\\tf : α → M,\\tx : α,\\ti : ι,\\thi : x ∈ s i\\t⊢ f x ≤ ⨆ (i : ι), (s i).mul_indicator f x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tι : Sort u_2,\\tM : Type u_3,\\t_inst_3 : complete_lattice M,\\t_inst_4 : has_one M,\\th1 : ⊥ = 1,\\ts : ι → set α,\\tf : α → M,\\tx : α,\\thx : x ∈ ⋃ (i : ι), s i\\t⊢ (⋃ (i : ι), s i).mul_indicator f x = ⨆ (i : ι), (s i).mul_indicator f x\\t\\tα : Type u_1,\\tι : Sort u_2,\\tM : Type u_3,\\t_inst_3 : complete_lattice M,\\t_inst_4 : has_one M,\\th1 : ⊥ = 1,\\ts : ι → set α,\\tf : α → M,\\tx : α,\\thx : x ∉ ⋃ (i : ι), s i\\t⊢ (⋃ (i : ι), s i).mul_indicator f x = ⨆ (i : ι), (s i).mul_indicator f x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tι : Sort u_2,\\tM : Type u_3,\\t_inst_3 : complete_lattice M,\\t_inst_4 : has_one M,\\th1 : ⊥ = 1,\\ts : ι → set α,\\tf : α → M,\\tx : α,\\thx : x ∉ ⋃ (i : ι), s i\\t⊢ (⋃ (i : ι), s i).mul_indicator f x = ⨆ (i : ι), (s i).mul_indicator f x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tι : Sort u_2,\\tM : Type u_3,\\t_inst_3 : complete_lattice M,\\t_inst_4 : has_one M,\\th1 : ⊥ = 1,\\ts : ι → set α,\\tf : α → M,\\tx : α,\\thx : x ∉ ⋃ (i : ι), s i\\t⊢ 1 = ⨆ (i : ι), (s i).mul_indicator f x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tι : Sort u_2,\\tM : Type u_3,\\t_inst_3 : complete_lattice M,\\t_inst_4 : has_one M,\\th1 : ⊥ = 1,\\ts : ι → set α,\\tf : α → M,\\tx : α,\\thx : ∀ (x_1 : ι), x ∉ s x_1\\t⊢ 1 = ⨆ (i : ι), (s i).mul_indicator f x\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : monoid α,\\ta : α,\\t_x : invertible a,\\t_fun_match : ∀ (_a b : invertible a), _a = b,\\tb : α,\\thba : b * a = 1,\\thab : a * b = 1,\\t_x : invertible a,\\t_fun_match : ∀ (_a : invertible a), {inv_of := b, inv_of_mul_self := hba, mul_inv_of_self := hab} = _a,\\tc : α,\\thca : c * a = 1,\\thac : a * c = 1\\t⊢ {inv_of := b, inv_of_mul_self := hba, mul_inv_of_self := hab} = {inv_of := c, inv_of_mul_self := hca, mul_inv_of_self := hac}\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : monoid α,\\ta : α,\\t_x : invertible a,\\t_fun_match : ∀ (_a b : invertible a), _a = b,\\tb : α,\\thba : b * a = 1,\\thab : a * b = 1,\\t_x : invertible a,\\t_fun_match : ∀ (_a : invertible a), {inv_of := b, inv_of_mul_self := hba, mul_inv_of_self := hab} = _a,\\tc : α,\\thca : c * a = 1,\\thac : a * c = 1\\t⊢ b = c\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : monoid α,\\ta b : α,\\t_inst_2 : invertible a,\\t_inst_3 : invertible b\\t⊢ ⅟ b * ⅟ a * (a * b) = 1\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : monoid α,\\ta b : α,\\t_inst_2 : invertible a,\\t_inst_3 : invertible b\\t⊢ a * b * (⅟ b * ⅟ a) = 1\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : group_with_zero α,\\ta b : α,\\t_inst_2 : invertible a,\\t_inst_3 : invertible b\\t⊢ b / a * (a / b) = 1\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : group_with_zero α,\\ta b : α,\\t_inst_2 : invertible a,\\t_inst_3 : invertible b\\t⊢ a / b * (b / a) = 1\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : monoid α,\\ta b : α,\\t_inst_2 : invertible b\\t⊢ a * b * ⅟ b = a\\n ',\n", + " '<|startoftext|> R : Type u,\\tL : Type v,\\tM : Type w,\\tN : Type w₁,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\t_inst_4 : add_comm_group M,\\t_inst_5 : add_comm_group N,\\t_inst_7 : module R M,\\t_inst_8 : module R N,\\t_inst_10 : lie_ring_module L M,\\t_inst_11 : lie_ring_module L N,\\t_inst_13 : lie_module R L M,\\t_inst_14 : lie_module R L N,\\tf : M →ₗ⁅R,L⁆ N\\t⊢ ∀ {x : L} {m : M}, (-↑f).to_fun ⁅x,m⁆ = ⁅x,(-↑f).to_fun m⁆\\n ',\n", + " '<|startoftext|> R : Type u,\\tX : Type v,\\t_inst_1 : comm_ring R\\t⊢ ∀ (t : R) (x y : free_lie_algebra R X), ⁅x,t • y⁆ = t • ⁅x,y⁆\\n ',\n", + " '<|startoftext|> R : Type u,\\tX : Type v,\\t_inst_1 : comm_ring R,\\tt : R,\\ta c : lib R X\\t⊢ ⁅quot.mk (free_lie_algebra.rel R X) a,t • quot.mk (free_lie_algebra.rel R X) c⁆ = t • ⁅quot.mk (free_lie_algebra.rel R X) a,quot.mk (free_lie_algebra.rel R X) c⁆\\n ',\n", + " '<|startoftext|> R : Type u,\\tX : Type v,\\t_inst_1 : comm_ring R,\\tt : R,\\ta c : lib R X\\t⊢ quot.mk (free_lie_algebra.rel R X) (a • t • c) = quot.mk (free_lie_algebra.rel R X) (t • a • c)\\n ',\n", + " '<|startoftext|> R : Type u,\\tX : Type v,\\t_inst_1 : comm_ring R,\\tx : X\\t⊢ (⇑((⇑(universal_enveloping_algebra.lift R) (⇑(free_lie_algebra.lift R) (free_algebra.ι R))).comp (⇑(free_algebra.lift R) (⇑(universal_enveloping_algebra.ι R) ∘ free_lie_algebra.of R))) ∘ free_algebra.ι R) x = (⇑(alg_hom.id R (free_algebra R X)) ∘ free_algebra.ι R) x\\n ',\n", + " '<|startoftext|> R : Type u,\\tX : Type v,\\t_inst_1 : comm_ring R\\t⊢ (⇑(universal_enveloping_algebra.lift R) (⇑(free_lie_algebra.lift R) (free_algebra.ι R))).comp (⇑(free_algebra.lift R) (⇑(universal_enveloping_algebra.ι R) ∘ free_lie_algebra.of R)) = alg_hom.id R (free_algebra R X)\\n ',\n", + " '<|startoftext|> R : Type u,\\tX : Type v,\\t_inst_1 : comm_ring R,\\tx : X\\t⊢ ⇑(↑((⇑(free_algebra.lift R) (⇑(universal_enveloping_algebra.ι R) ∘ free_lie_algebra.of R)).comp (⇑(universal_enveloping_algebra.lift R) (⇑(free_lie_algebra.lift R) (free_algebra.ι R)))).comp (universal_enveloping_algebra.ι R)) (free_lie_algebra.of R x) = ⇑(↑(alg_hom.id R (universal_enveloping_algebra R (free_lie_algebra R X))).comp (universal_enveloping_algebra.ι R)) (free_lie_algebra.of R x)\\n ',\n", + " '<|startoftext|> R : Type u,\\tX : Type v,\\t_inst_1 : comm_ring R\\t⊢ (⇑(free_algebra.lift R) (⇑(universal_enveloping_algebra.ι R) ∘ free_lie_algebra.of R)).comp (⇑(universal_enveloping_algebra.lift R) (⇑(free_lie_algebra.lift R) (free_algebra.ι R))) = alg_hom.id R (universal_enveloping_algebra R (free_lie_algebra R X))\\n ',\n", + " '<|startoftext|> R : Type u,\\tL : Type v,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\tI I₁ I₂ : lie_ideal R L\\t⊢ lie_ideal.comap I.incl ⁅I ⊓ I₁,I ⊓ I₂⁆ = ?m_1\\n ',\n", + " '<|startoftext|> R : Type u,\\tL : Type v,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\tI I₁ I₂ : lie_ideal R L\\t⊢ I.incl = ?m_1\\t\\tR : Type u,\\tL : Type v,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\tI I₁ I₂ : lie_ideal R L\\t⊢ ⁅I ⊓ I₁,I ⊓ I₂⁆ = ?m_1\\n ',\n", + " '<|startoftext|> R : Type u,\\tL : Type v,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\tI I₁ I₂ : lie_ideal R L\\t⊢ ⁅I ⊓ I₁,I ⊓ I₂⁆ = ?m_1\\n ',\n", + " '<|startoftext|> R : Type u,\\tL : Type v,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\tI I₁ I₂ : lie_ideal R L\\t⊢ ⁅lie_ideal.comap I.incl I₁,lie_ideal.comap I.incl I₂⁆ = lie_ideal.comap I.incl ⁅I ⊓ I₁,I ⊓ I₂⁆\\n ',\n", + " '<|startoftext|> R : Type u,\\tL : Type v,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\tI I₁ I₂ : lie_ideal R L\\t⊢ ⁅lie_ideal.comap I.incl I₁,lie_ideal.comap I.incl I₂⁆ = lie_ideal.comap I.incl ⁅I.incl.ideal_range ⊓ I₁,I.incl.ideal_range ⊓ I₂⁆\\n ',\n", + " '<|startoftext|> R : Type u,\\tL : Type v,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\tI I₁ I₂ : lie_ideal R L\\t⊢ I.incl.is_ideal_morphism\\n ',\n", + " '<|startoftext|> R : Type u,\\tL : Type v,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\tI I₁ I₂ : lie_ideal R L\\t⊢ ⁅lie_ideal.comap I.incl I₁,lie_ideal.comap I.incl I₂⁆ = ⁅lie_ideal.comap I.incl I₁,lie_ideal.comap I.incl I₂⁆ ⊔ I.incl.ker\\t\\tR : Type u,\\tL : Type v,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\tI I₁ I₂ : lie_ideal R L\\t⊢ I.incl.is_ideal_morphism\\n ',\n", + " \"<|startoftext|> R : Type u,\\tL : Type v,\\tL' : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\t_inst_4 : lie_ring L',\\t_inst_5 : lie_algebra R L',\\te : L ≃ₗ⁅R⁆ L'\\t⊢ lie_algebra.is_nilpotent R L → lie_algebra.is_nilpotent R L'\\n \",\n", + " \"<|startoftext|> R : Type u,\\tL : Type v,\\tL' : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\t_inst_4 : lie_ring L',\\t_inst_5 : lie_algebra R L',\\te : L ≃ₗ⁅R⁆ L'\\t⊢ lie_algebra.is_nilpotent R L' → lie_algebra.is_nilpotent R L\\n \",\n", + " \"<|startoftext|> R : Type u,\\tL : Type v,\\tL' : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\t_inst_4 : lie_ring L',\\t_inst_5 : lie_algebra R L',\\te : L ≃ₗ⁅R⁆ L'\\t⊢ lie_algebra.is_nilpotent R L ↔ lie_algebra.is_nilpotent R L'\\n \",\n", + " \"<|startoftext|> R : Type u,\\tL : Type v,\\tL' : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\t_inst_4 : lie_ring L',\\t_inst_5 : lie_algebra R L',\\te : L ≃ₗ⁅R⁆ L'\\t⊢ lie_algebra.is_nilpotent R L ↔ lie_algebra.is_nilpotent R L'\\n \",\n", + " \"<|startoftext|> R : Type u,\\tL : Type v,\\tL' : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\t_inst_4 : lie_ring L',\\t_inst_5 : lie_algebra R L',\\te : L ≃ₗ⁅R⁆ L',\\th : lie_algebra.is_nilpotent R L\\t⊢ lie_algebra.is_nilpotent R L'\\t\\tR : Type u,\\tL : Type v,\\tL' : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\t_inst_4 : lie_ring L',\\t_inst_5 : lie_algebra R L',\\te : L ≃ₗ⁅R⁆ L',\\th : lie_algebra.is_nilpotent R L'\\t⊢ lie_algebra.is_nilpotent R L\\n \",\n", + " \"<|startoftext|> R : Type u,\\tL : Type v,\\tL' : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\t_inst_4 : lie_ring L',\\t_inst_5 : lie_algebra R L',\\te : L ≃ₗ⁅R⁆ L',\\th : lie_algebra.is_nilpotent R L\\t⊢ lie_algebra.is_nilpotent R L'\\n \",\n", + " \"<|startoftext|> R : Type u,\\tL : Type v,\\tL' : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\t_inst_4 : lie_ring L',\\t_inst_5 : lie_algebra R L',\\te : L ≃ₗ⁅R⁆ L',\\th : lie_algebra.is_nilpotent R L'\\t⊢ lie_algebra.is_nilpotent R L\\n \",\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R,\\tA : Type v,\\t_inst_8 : ring A,\\t_inst_9 : algebra R A,\\ta b : A\\t⊢ ⇑(⇑(lie_algebra.ad R A) a) b = ⇑((algebra.lmul_left R - algebra.lmul_right R) a) b\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R,\\tA : Type v,\\t_inst_8 : ring A,\\t_inst_9 : algebra R A\\t⊢ ⇑(lie_algebra.ad R A) = algebra.lmul_left R - algebra.lmul_right R\\n ',\n", + " '<|startoftext|> R : Type u,\\tn : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : decidable_eq n,\\t_inst_3 : fintype n,\\tJ A B : matrix n n R,\\thA : Aᵀ * J = J * -A,\\thB : Bᵀ * J = J * -B\\t⊢ J * -B * -A - J * -A * -B = J * -(A * B - B * A)\\n ',\n", + " '<|startoftext|> R : Type u,\\tM : Type v,\\t_inst_1 : comm_ring R,\\t_inst_2 : add_comm_group M,\\t_inst_3 : module R M,\\tB : bilin_form R M,\\tN : Type w,\\t_inst_4 : add_comm_group N,\\t_inst_5 : module R N,\\te : N ≃ₗ[R] M\\t⊢ ↥(skew_adjoint_lie_subalgebra (B.comp ↑e ↑e)) ≃ₗ⁅R⁆ ↥(skew_adjoint_lie_subalgebra B)\\n ',\n", + " '<|startoftext|> R : Type u,\\tM : Type v,\\t_inst_1 : comm_ring R,\\t_inst_2 : add_comm_group M,\\t_inst_3 : module R M,\\tB : bilin_form R M,\\tN : Type w,\\t_inst_4 : add_comm_group N,\\t_inst_5 : module R N,\\te : N ≃ₗ[R] M\\t⊢ lie_subalgebra.map ↑(e.lie_conj) (skew_adjoint_lie_subalgebra (B.comp ↑e ↑e)) = skew_adjoint_lie_subalgebra B\\n ',\n", + " '<|startoftext|> R : Type u,\\tM : Type v,\\t_inst_1 : comm_ring R,\\t_inst_2 : add_comm_group M,\\t_inst_3 : module R M,\\tB : bilin_form R M,\\tN : Type w,\\t_inst_4 : add_comm_group N,\\t_inst_5 : module R N,\\te : N ≃ₗ[R] M,\\tf : module.End R M\\t⊢ f ∈ lie_subalgebra.map ↑(e.lie_conj) (skew_adjoint_lie_subalgebra (B.comp ↑e ↑e)) ↔ f ∈ skew_adjoint_lie_subalgebra B\\n ',\n", + " '<|startoftext|> R : Type u,\\tM : Type v,\\t_inst_1 : comm_ring R,\\t_inst_2 : add_comm_group M,\\t_inst_3 : module R M,\\tB : bilin_form R M,\\tN : Type w,\\t_inst_4 : add_comm_group N,\\t_inst_5 : module R N,\\te : N ≃ₗ[R] M,\\tf : module.End R M\\t⊢ ⇑(↑(e.lie_conj).symm) f ∈ ↑(skew_adjoint_lie_subalgebra (B.comp ↑e ↑e)) ↔ f ∈ skew_adjoint_lie_subalgebra B\\n ',\n", + " '<|startoftext|> R : Type u,\\tn : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : decidable_eq n,\\t_inst_3 : fintype n,\\tJ A B : matrix n n R,\\thA : A ∈ skew_adjoint_matrices_submodule J,\\thB : B ∈ skew_adjoint_matrices_submodule J\\t⊢ ⁅A,B⁆ ∈ skew_adjoint_matrices_submodule J\\n ',\n", + " '<|startoftext|> R : Type u,\\tn : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : decidable_eq n,\\t_inst_3 : fintype n,\\tJ A B : matrix n n R,\\thA : J.is_skew_adjoint A,\\thB : J.is_skew_adjoint B\\t⊢ ⁅A,B⁆ᵀ ⬝ J = J ⬝ -⁅A,B⁆\\n ',\n", + " '<|startoftext|> R : Type u,\\tn : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : decidable_eq n,\\t_inst_3 : fintype n,\\tJ A B : matrix n n R,\\thA : J.is_skew_adjoint A,\\thB : J.is_skew_adjoint B\\t⊢ J.is_skew_adjoint ⁅A,B⁆\\n ',\n", + " '<|startoftext|> R : Type u,\\tn : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : decidable_eq n,\\t_inst_3 : fintype n,\\tJ A B : matrix n n R,\\thB : J.is_skew_adjoint B,\\thA : Aᵀ ⬝ J = J ⬝ -A\\t⊢ ⁅A,B⁆ᵀ ⬝ J = J ⬝ -⁅A,B⁆\\n ',\n", + " '<|startoftext|> R : Type u,\\tn : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : decidable_eq n,\\t_inst_3 : fintype n,\\tJ A B : matrix n n R,\\thA : Aᵀ ⬝ J = J ⬝ -A,\\thB : Bᵀ ⬝ J = J ⬝ -B\\t⊢ ⁅A,B⁆ᵀ ⬝ J = J ⬝ -⁅A,B⁆\\n ',\n", + " '<|startoftext|> R : Type u,\\tn : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : decidable_eq n,\\t_inst_3 : fintype n,\\tJ A B : matrix n n R,\\thA : Aᵀ * J = J * -A,\\thB : Bᵀ * J = J * -B\\t⊢ ⁅A,B⁆ᵀ * J = J * -⁅A,B⁆\\n ',\n", + " '<|startoftext|> R : Type u,\\tL₁ : Type v,\\tL₂ : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L₁,\\t_inst_3 : lie_ring L₂,\\t_inst_4 : lie_algebra R L₁,\\t_inst_5 : lie_algebra R L₂,\\tf : L₁ →ₗ⁅R⁆ L₂,\\th : function.injective ⇑f,\\tx y : L₁\\t⊢ (linear_equiv.of_injective ↑f _).to_fun ⁅x,y⁆ = ⁅(linear_equiv.of_injective ↑f _).to_fun x,(linear_equiv.of_injective ↑f _).to_fun y⁆\\n ',\n", + " '<|startoftext|> R : Type u,\\tL₁ : Type v,\\tL₂ : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L₁,\\t_inst_3 : lie_ring L₂,\\t_inst_4 : lie_algebra R L₁,\\t_inst_5 : lie_algebra R L₂,\\tf : L₁ →ₗ⁅R⁆ L₂,\\th : function.injective ⇑f,\\tx y : L₁\\t⊢ ↑((linear_equiv.of_injective ↑f _).to_fun ⁅x,y⁆) = ↑⁅(linear_equiv.of_injective ↑f _).to_fun x,(linear_equiv.of_injective ↑f _).to_fun y⁆\\n ',\n", + " '<|startoftext|> R : Type u,\\tL₁ : Type v,\\tL₂ : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L₁,\\t_inst_3 : lie_ring L₂,\\t_inst_4 : lie_algebra R L₁,\\t_inst_5 : lie_algebra R L₂,\\tf : L₁ →ₗ⁅R⁆ L₂,\\th : function.injective ⇑f\\t⊢ function.injective ⇑↑f\\n ',\n", + " \"<|startoftext|> R : Type u,\\tL : Type v,\\tM : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\t_inst_4 : add_comm_group M,\\t_inst_5 : module R M,\\t_inst_6 : lie_ring_module L M,\\t_inst_7 : lie_module R L M,\\tN N' : lie_submodule R L M,\\th : ↑N = ↑N'\\t⊢ N = N'\\n \",\n", + " \"<|startoftext|> R : Type u,\\tL : Type v,\\tM : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\t_inst_4 : add_comm_group M,\\t_inst_5 : module R M,\\t_inst_6 : lie_ring_module L M,\\t_inst_7 : lie_module R L M,\\tN N' : lie_submodule R L M,\\th : ↑N = ↑N',\\tm : M\\t⊢ m ∈ N ↔ m ∈ N'\\n \",\n", + " \"<|startoftext|> R : Type u,\\tL : Type v,\\tM : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\t_inst_4 : add_comm_group M,\\t_inst_5 : module R M,\\t_inst_6 : lie_ring_module L M,\\t_inst_7 : lie_module R L M,\\tN N' : lie_submodule R L M,\\th : ↑N = ↑N',\\tm : M\\t⊢ m ∈ ↑N' ↔ m ∈ N'\\n \",\n", + " '<|startoftext|> R : Type u,\\tM : Type w,\\t_inst_1 : ring R,\\t_inst_2 : add_comm_group M,\\t_inst_3 : module R M,\\tr : R,\\tx : M\\t⊢ -r • x + r • x = 0\\n ',\n", + " '<|startoftext|> R : Type u,\\tM : Type w,\\t_inst_1 : semiring R,\\t_inst_2 : add_comm_monoid M,\\t_inst_3 : module R M,\\t_inst_4 : no_zero_smul_divisors R M,\\t_inst_5 : char_zero R,\\tc : ℕ,\\tx : M\\t⊢ c • x = 0 → c = 0 ∨ x = 0\\n ',\n", + " '<|startoftext|> R : Type u,\\tM : Type w,\\t_inst_1 : semiring R,\\t_inst_2 : add_comm_monoid M,\\t_inst_3 : module R M,\\t_inst_4 : no_zero_smul_divisors R M,\\t_inst_5 : char_zero R,\\tc : ℕ,\\tx : M\\t⊢ ↑c = 0 ∨ x = 0 → c = 0 ∨ x = 0\\n ',\n", + " '<|startoftext|> R : Type u,\\tM : Type w,\\t_inst_1 : semiring R,\\t_inst_2 : add_comm_monoid M,\\t_inst_3 : module R M,\\t_inst_4 : no_zero_smul_divisors R M,\\t_inst_5 : char_zero R\\t⊢ ∀ {c : ℕ} {x : M}, c • x = 0 → c = 0 ∨ x = 0\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : semiring R,\\tm : ℕ\\t⊢ m • 1 = ↑m\\n ',\n", + " '<|startoftext|> R : Type u_1,\\tM : Type u_7,\\tM₂ : Type u_9,\\t_inst_1 : semiring R,\\t_inst_2 : add_comm_monoid M,\\t_inst_3 : add_comm_monoid M₂,\\t_inst_4 : module R M,\\t_inst_5 : module R M₂,\\tf g : M →+[R] M₂,\\th : ↑f = ↑g,\\tm : M\\t⊢ ⇑f m = ⇑g m\\n ',\n", + " '<|startoftext|> R : Type u_1,\\tM : Type u_7,\\tM₂ : Type u_9,\\t_inst_1 : semiring R,\\t_inst_2 : add_comm_monoid M,\\t_inst_3 : add_comm_monoid M₂,\\t_inst_4 : module R M,\\t_inst_5 : module R M₂,\\tf g : M →+[R] M₂,\\th : ↑f = ↑g\\t⊢ f = g\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : ring R,\\tP : Type (max u v),\\t_inst_2 : add_comm_group P,\\t_inst_3 : module R P,\\tι : Type u_1,\\tb : basis ι R P\\t⊢ module.projective R P\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : ring R,\\tP : Type (max u v),\\t_inst_2 : add_comm_group P,\\t_inst_3 : module R P,\\tι : Type u_1,\\tb : basis ι R P\\t⊢ function.left_inverse ⇑(finsupp.total P P R id) ⇑(⇑(b.constr ℕ) (λ (i : ι), finsupp.single (⇑b i) 1))\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : ring R,\\tP : Type (max u v),\\t_inst_2 : add_comm_group P,\\t_inst_3 : module R P,\\tι : Type u_1,\\tb : basis ι R P,\\tm : P\\t⊢ ⇑(finsupp.total P P R id) (⇑(⇑(b.constr ℕ) (λ (i : ι), finsupp.single (⇑b i) 1)) m) = m\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : ring R,\\tP : Type (max u v),\\t_inst_2 : add_comm_group P,\\t_inst_3 : module R P,\\tι : Type u_1,\\tb : basis ι R P,\\tm : P\\t⊢ (⇑(b.repr) m).sum (λ (i : ι) (d : R), d • ⇑b i) = m\\n ',\n", + " '<|startoftext|> R : Type u,\\tM : Type v,\\t_inst_1 : semiring R,\\t_inst_2 : add_comm_monoid M,\\t_inst_3 : module R M,\\tc f : ulift R,\\tg : M\\t⊢ (c + f) • g = c • g + f • g\\n ',\n", + " '<|startoftext|> R : Type u,\\tM : Type v,\\t_inst_1 : semiring R,\\t_inst_2 : add_comm_monoid M,\\t_inst_3 : module R M,\\tf : ulift R,\\tg : M,\\tc : R\\t⊢ ({down := c} + f) • g = {down := c} • g + f • g\\n ',\n", + " '<|startoftext|> R : Type u,\\tM : Type v,\\t_inst_1 : semiring R,\\t_inst_2 : add_comm_monoid M,\\t_inst_3 : module R M,\\tf : M\\t⊢ 0 • f = 0\\n ',\n", + " \"<|startoftext|> k : Type u₁,\\tG : Type u₂,\\tR : Type u_1,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring k,\\t_inst_3 : algebra R k,\\t_inst_4 : add_monoid G,\\tr : R,\\ta : add_monoid_algebra k G\\t⊢ r • a = {to_fun := (add_monoid_algebra.single_zero_ring_hom.comp (algebra_map R k)).to_fun, map_one' := _, map_mul' := _, map_zero' := _, map_add' := _}.to_fun r * a\\n \",\n", + " \"<|startoftext|> k : Type u₁,\\tG : Type u₂,\\tR : Type u_1,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring k,\\t_inst_3 : algebra R k,\\t_inst_4 : add_monoid G,\\tr : R,\\ta : add_monoid_algebra k G,\\ta_1 : G\\t⊢ ⇑(r • a) a_1 = ⇑({to_fun := (add_monoid_algebra.single_zero_ring_hom.comp (algebra_map R k)).to_fun, map_one' := _, map_mul' := _, map_zero' := _, map_add' := _}.to_fun r * a) a_1\\n \",\n", + " \"<|startoftext|> k : Type u₁,\\tG : Type u₂,\\tR : Type u_1,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring k,\\t_inst_3 : algebra R k,\\t_inst_4 : add_monoid G,\\tr : R,\\tf : add_monoid_algebra k G\\t⊢ {to_fun := (add_monoid_algebra.single_zero_ring_hom.comp (algebra_map R k)).to_fun, map_one' := _, map_mul' := _, map_zero' := _, map_add' := _}.to_fun r * f = f * {to_fun := (add_monoid_algebra.single_zero_ring_hom.comp (algebra_map R k)).to_fun, map_one' := _, map_mul' := _, map_zero' := _, map_add' := _}.to_fun r\\n \",\n", + " \"<|startoftext|> k : Type u₁,\\tG : Type u₂,\\tR : Type u_1,\\t_inst_1 : comm_semiring R,\\t_inst_2 : semiring k,\\t_inst_3 : algebra R k,\\t_inst_4 : add_monoid G,\\tr : R,\\tf : add_monoid_algebra k G,\\ta : G\\t⊢ ⇑({to_fun := (add_monoid_algebra.single_zero_ring_hom.comp (algebra_map R k)).to_fun, map_one' := _, map_mul' := _, map_zero' := _, map_add' := _}.to_fun r * f) a = ⇑(f * {to_fun := (add_monoid_algebra.single_zero_ring_hom.comp (algebra_map R k)).to_fun, map_one' := _, map_mul' := _, map_zero' := _, map_add' := _}.to_fun r) a\\n \",\n", + " '<|startoftext|> k : Type u₁,\\tG : Type u₂,\\t_inst_1 : comm_semiring k,\\t_inst_2 : add_monoid G,\\tA : Type u₃,\\t_inst_3 : semiring A,\\t_inst_4 : algebra k A,\\tF : multiplicative G →* A,\\tf : monoid_algebra k G\\t⊢ ⇑(⇑(add_monoid_algebra.lift k G A) F) f = finsupp.sum f (λ (a : G) (b : k), b • ⇑F (⇑multiplicative.of_add a))\\n ',\n", + " '<|startoftext|> k : Type u₁,\\tG : Type u₂,\\t_inst_1 : semiring k,\\t_inst_2 : mul_one_class G,\\tR : Type u_1,\\t_inst_3 : semiring R,\\tf : k →+* R,\\tg : G →* R,\\ta b : monoid_algebra k G,\\th_comm : ∀ {x y : G}, y ∈ a.support → commute (⇑f (⇑b x)) (⇑g y)\\t⊢ ⇑(monoid_algebra.lift_nc ↑f g) a * ⇑(monoid_algebra.lift_nc ↑f g) b = ?m_1\\n ',\n", + " '<|startoftext|> k : Type u₁,\\tG : Type u₂,\\t_inst_1 : semiring k,\\t_inst_2 : mul_one_class G,\\tR : Type u_1,\\t_inst_3 : semiring R,\\tf : k →+* R,\\tg : G →* R,\\ta b : monoid_algebra k G,\\th_comm : ∀ {x y : G}, y ∈ a.support → commute (⇑f (⇑b x)) (⇑g y)\\t⊢ ⇑(monoid_algebra.lift_nc ↑f g) (a * b) = ⇑(monoid_algebra.lift_nc ↑f g) a * ⇑(monoid_algebra.lift_nc ↑f g) b\\n ',\n", + " '<|startoftext|> k : Type u₁,\\tG : Type u₂,\\t_inst_1 : semiring k,\\t_inst_2 : mul_one_class G,\\tR : Type u_1,\\t_inst_3 : semiring R,\\tf : k →+* R,\\tg : G →* R,\\ta b : monoid_algebra k G,\\th_comm : ∀ {x y : G}, y ∈ a.support → commute (⇑f (⇑b x)) (⇑g y)\\t⊢ ⇑(monoid_algebra.lift_nc ↑f g) (a * b) = ⇑(monoid_algebra.lift_nc ↑f g) (finsupp.sum a finsupp.single) * ⇑(monoid_algebra.lift_nc ↑f g) (finsupp.sum b finsupp.single)\\n ',\n", + " '<|startoftext|> k : Type u₁,\\tG : Type u₂,\\t_inst_1 : semiring k,\\t_inst_2 : mul_one_class G,\\tR : Type u_1,\\t_inst_3 : semiring R,\\tf : k →+* R,\\tg : G →* R,\\ta b : monoid_algebra k G,\\th_comm : ∀ {x y : G}, y ∈ a.support → commute (⇑f (⇑b x)) (⇑g y)\\t⊢ finsupp.sum a (λ (a : G) (b_1 : k), finsupp.sum b (λ (a_1 : G) (b : k), ⇑↑f (b_1 * b) * ⇑g (a * a_1))) = finsupp.sum a (λ (a : G) (c : k), finsupp.sum b (λ (a_1 : G) (c_1 : k), ⇑↑f c * ⇑g a * (⇑↑f c_1 * ⇑g a_1)))\\n ',\n", + " '<|startoftext|> k : Type u₁,\\tG : Type u₂,\\t_inst_1 : semiring k,\\t_inst_2 : mul_one_class G,\\tR : Type u_1,\\t_inst_3 : semiring R,\\tf : k →+* R,\\tg : G →* R,\\ta b : monoid_algebra k G,\\th_comm : ∀ {x y : G}, y ∈ a.support → commute (⇑f (⇑b x)) (⇑g y),\\ty : G,\\thy : y ∈ a.support,\\tx : G,\\thx : x ∈ b.support\\t⊢ (λ (a_1 : G) (b : k), ⇑↑f (⇑a y * b) * ⇑g (y * a_1)) x (⇑b x) = (λ (a_1 : G) (c : k), ⇑↑f (⇑a y) * ⇑g y * (⇑↑f c * ⇑g a_1)) x (⇑b x)\\n ',\n", + " \"<|startoftext|> R : Type u,\\tA : Type v,\\tB : Type w,\\t_inst_1 : monoid R,\\t_inst_2 : non_unital_non_assoc_semiring A,\\t_inst_3 : distrib_mul_action R A,\\t_inst_4 : non_unital_non_assoc_semiring B,\\t_inst_5 : distrib_mul_action R B,\\tf : non_unital_alg_hom R A B,\\th₁ : ∀ (m : R) (x : A), ⇑f (m • x) = m • ⇑f x,\\th₂ : ⇑f 0 = 0,\\th₃ : ∀ (x y : A), ⇑f (x + y) = ⇑f x + ⇑f y,\\th₄ : ∀ (x y : A), ⇑f (x * y) = ⇑f x * ⇑f y,\\tx : A\\t⊢ ⇑↑{to_fun := ⇑f, map_smul' := h₁, map_zero' := h₂, map_add' := h₃, map_mul' := h₄} x = ⇑{to_fun := ⇑f, map_smul' := h₁, map_zero' := h₂, map_add' := h₃} x\\n \",\n", + " \"<|startoftext|> R : Type u,\\tA : Type v,\\tB : Type w,\\t_inst_1 : monoid R,\\t_inst_2 : non_unital_non_assoc_semiring A,\\t_inst_3 : distrib_mul_action R A,\\t_inst_4 : non_unital_non_assoc_semiring B,\\t_inst_5 : distrib_mul_action R B,\\tf : non_unital_alg_hom R A B,\\th₁ : ∀ (m : R) (x : A), ⇑f (m • x) = m • ⇑f x,\\th₂ : ⇑f 0 = 0,\\th₃ : ∀ (x y : A), ⇑f (x + y) = ⇑f x + ⇑f y,\\th₄ : ∀ (x y : A), ⇑f (x * y) = ⇑f x * ⇑f y\\t⊢ ↑{to_fun := ⇑f, map_smul' := h₁, map_zero' := h₂, map_add' := h₃, map_mul' := h₄} = {to_fun := ⇑f, map_smul' := h₁, map_zero' := h₂, map_add' := h₃}\\n \",\n", + " '<|startoftext|> R : Type u,\\tA : Type v,\\t_inst_1 : monoid R,\\t_inst_2 : non_unital_non_assoc_semiring A,\\t_inst_3 : distrib_mul_action R A\\t⊢ ∀ (x y : A), 1.to_fun (x * y) = 1.to_fun x * 1.to_fun y\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : has_mul α,\\ta x y : α\\t⊢ semiconj_by (opposite.op a) (opposite.op y) (opposite.op x) ↔ semiconj_by a x y\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : has_mul α,\\ta x y : α\\t⊢ semiconj_by (opposite.op a) (opposite.op y) (opposite.op x) → semiconj_by a x y\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : has_mul α,\\ta x y : α,\\th : semiconj_by (opposite.op a) (opposite.op y) (opposite.op x)\\t⊢ semiconj_by a x y\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : has_mul α,\\ta x y : α\\t⊢ semiconj_by (opposite.op a) (opposite.op y) (opposite.op x) → semiconj_by a x y\\t\\tα : Type u,\\t_inst_1 : has_mul α,\\ta x y : α\\t⊢ semiconj_by a x y → semiconj_by (opposite.op a) (opposite.op y) (opposite.op x)\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : has_mul α,\\ta x y : α,\\th : semiconj_by (opposite.op a) (opposite.op y) (opposite.op x)\\t⊢ semiconj_by (opposite.unop (opposite.op a)) (opposite.unop (opposite.op x)) (opposite.unop (opposite.op y))\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : has_mul α,\\ta x y : α\\t⊢ semiconj_by a x y → semiconj_by (opposite.op a) (opposite.op y) (opposite.op x)\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : has_mul α,\\ta x y : α,\\th : semiconj_by a x y\\t⊢ semiconj_by (opposite.op a) (opposite.op y) (opposite.op x)\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : preorder α,\\t_inst_2 : decidable_rel has_lt.lt,\\ta b : α\\t⊢ (cmp a b).swap = cmp b a\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : preorder α,\\t_inst_2 : decidable_rel has_lt.lt,\\ta b : α\\t⊢ (ite (a < b) ordering.lt (ite (b < a) ordering.gt ordering.eq)).swap = ite (b < a) ordering.lt (ite (a < b) ordering.gt ordering.eq)\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : preorder α,\\t_inst_2 : decidable_rel has_lt.lt,\\ta b : α\\t⊢ (ite (a < b) ordering.lt (ite (b < a) ordering.gt ordering.eq)).swap = ite (b < a) ordering.lt (ite (a < b) ordering.gt ordering.eq)\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : preorder α,\\t_inst_2 : decidable_rel has_lt.lt,\\ta b : α,\\th : a < b\\t⊢ (ite (a < b) ordering.lt (ite (b < a) ordering.gt ordering.eq)).swap = ite (b < a) ordering.lt (ite (a < b) ordering.gt ordering.eq)\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : preorder α,\\t_inst_2 : decidable_rel has_lt.lt,\\ta b : α,\\th : ¬a < b\\t⊢ (ite (a < b) ordering.lt (ite (b < a) ordering.gt ordering.eq)).swap = ite (b < a) ordering.lt (ite (a < b) ordering.gt ordering.eq)\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : preorder α,\\t_inst_2 : decidable_rel has_lt.lt,\\ta b : α\\t⊢ (ite (a < b) ordering.lt (ite (b < a) ordering.gt ordering.eq)).swap = ite (b < a) ordering.lt (ite (a < b) ordering.gt ordering.eq)\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : preorder α,\\t_inst_2 : decidable_rel has_lt.lt,\\ta b : α,\\th : a < b,\\th₂ : b < a\\t⊢ (ite (a < b) ordering.lt (ite (b < a) ordering.gt ordering.eq)).swap = ite (b < a) ordering.lt (ite (a < b) ordering.gt ordering.eq)\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : preorder α,\\t_inst_2 : decidable_rel has_lt.lt,\\ta b : α,\\th : a < b,\\th₂ : ¬b < a\\t⊢ (ite (a < b) ordering.lt (ite (b < a) ordering.gt ordering.eq)).swap = ite (b < a) ordering.lt (ite (a < b) ordering.gt ordering.eq)\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : preorder α,\\t_inst_2 : decidable_rel has_lt.lt,\\ta b : α,\\th : ¬a < b,\\th₂ : b < a\\t⊢ (ite (a < b) ordering.lt (ite (b < a) ordering.gt ordering.eq)).swap = ite (b < a) ordering.lt (ite (a < b) ordering.gt ordering.eq)\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : preorder α,\\t_inst_2 : decidable_rel has_lt.lt,\\ta b : α,\\th : ¬a < b,\\th₂ : ¬b < a\\t⊢ (ite (a < b) ordering.lt (ite (b < a) ordering.gt ordering.eq)).swap = ite (b < a) ordering.lt (ite (a < b) ordering.gt ordering.eq)\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : preorder α,\\t_inst_2 : decidable_rel has_lt.lt,\\ta b : α,\\th : a < b,\\th₂ : b < a\\t⊢ false\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : linear_order α,\\tx : α\\t⊢ cmp x x = ordering.eq\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : linear_ordered_field α,\\ta b c : α,\\th : 0 < b\\t⊢ a * b⁻¹ < c ↔ a < c * b\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : linear_ordered_field α,\\ta b : α,\\tha : a < 0,\\thb : b < 0\\t⊢ a ≤ b⁻¹ ↔ b ≤ a⁻¹\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : linear_ordered_field α,\\ta b : α,\\thb : b < 0\\t⊢ a / b < 1 ↔ b < a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : linear_ordered_field α,\\ta b : α,\\tha : 0 < a,\\thb : 0 < b\\t⊢ 1 / a ≤ b ↔ 1 / b ≤ a\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : group α,\\t_inst_2 : has_lt α,\\t_inst_3 : covariant_class α α has_mul.mul has_lt.lt,\\ta b : α\\t⊢ a * a⁻¹ < a * b ↔ 1 < a * b\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : group α,\\t_inst_2 : has_lt α,\\t_inst_3 : covariant_class α α has_mul.mul has_lt.lt,\\ta b : α\\t⊢ b * a < b * b⁻¹ ↔ b * a < 1\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : comm_group α,\\t_inst_2 : has_lt α,\\t_inst_3 : covariant_class α α has_mul.mul has_lt.lt,\\ta b c d : α\\t⊢ a * b⁻¹ < c * d⁻¹ ↔ a * d < c * b\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : group α,\\t_inst_2 : has_lt α,\\t_inst_3 : covariant_class α α (function.swap has_mul.mul) has_lt.lt,\\ta b : α\\t⊢ 1 < a / b ↔ b < a\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : linear_ordered_ring α,\\tx y : α,\\th₁ : x ≤ y,\\th₂ : -x ≤ y\\t⊢ x * x ≤ y * y\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : linear_ordered_ring α,\\tx y : α,\\th₁ : x ≤ y,\\th₂ : -x ≤ y,\\t_inst : decidable_rel has_le.le\\t⊢ x * x ≤ y * y\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : linear_ordered_ring α,\\tx y : α,\\th₁ : x ≤ y,\\th₂ : -x ≤ y,\\t_inst : decidable_rel has_le.le\\t⊢ abs x * abs x ≤ y * y\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : linear_ordered_comm_ring α,\\ta b : α\\t⊢ abs (a - b) * abs (a - b) = a * a + b * b - (1 + 1) * a * b\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : linear_ordered_comm_ring α,\\ta b : α\\t⊢ (a - b) * (a - b) = a * a + b * b - (1 + 1) * a * b\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_2 : ordered_semiring α,\\ta b c : α\\t⊢ 0 ≤ c → a ≤ c → 0 ≤ b → b ≤ 1 → a * b ≤ c\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_2 : ordered_semiring α,\\ta b c : α,\\t_inst : Π (a : Prop), decidable a\\t⊢ 0 ≤ c → a ≤ c → 0 ≤ b → b ≤ 1 → a * b ≤ c\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_2 : ordered_semiring α,\\ta b c : α\\t⊢ 0 ≤ c → a ≤ c → 0 ≤ b → b ≤ 1 → a * b ≤ c\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : linear_ordered_semiring α,\\ta b c : α,\\th : 0 < c\\t⊢ c * a < c * b ↔ a < b\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : linear_ordered_semiring α,\\ta b c : α,\\th : 0 < c,\\t_inst : decidable_rel has_le.le\\t⊢ c * a < c * b ↔ a < b\\n ',\n", + " '<|startoftext|> α : Type u,\\t_inst_1 : linear_ordered_semiring α,\\ta b c : α,\\th : 0 < c\\t⊢ c * a < c * b ↔ a < b\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : partial_order α,\\t_inst_2 : add_comm_monoid α,\\t_inst_3 : has_sub α,\\t_inst_4 : has_ordered_sub α,\\ta b c : α,\\t_inst_5 : covariant_class α α has_add.add has_le.le\\t⊢ a - (b + c) = a - b - c\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : partial_order α,\\t_inst_2 : add_comm_monoid α,\\t_inst_3 : has_sub α,\\t_inst_4 : has_ordered_sub α,\\ta b c : α,\\t_inst_5 : covariant_class α α has_add.add has_le.le\\t⊢ a ≤ b + (c + (a - b - c))\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : partial_order α,\\t_inst_2 : add_comm_monoid α,\\t_inst_3 : has_sub α,\\t_inst_4 : has_ordered_sub α,\\ta b c : α,\\t_inst_5 : covariant_class α α has_add.add has_le.le\\t⊢ a ≤ b + c + (a - b - c)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : partial_order α,\\t_inst_2 : add_comm_monoid α,\\t_inst_3 : has_sub α,\\t_inst_4 : has_ordered_sub α,\\ta b c : α,\\t_inst_5 : covariant_class α α has_add.add has_le.le\\t⊢ a ≤ b + c + (a - b - c)\\t\\tα : Type u_1,\\t_inst_1 : partial_order α,\\t_inst_2 : add_comm_monoid α,\\t_inst_3 : has_sub α,\\t_inst_4 : has_ordered_sub α,\\ta b c : α,\\t_inst_5 : covariant_class α α has_add.add has_le.le\\t⊢ a ≤ b + (c + (a - (b + c)))\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : partial_order α,\\t_inst_2 : add_comm_monoid α,\\t_inst_3 : has_sub α,\\t_inst_4 : has_ordered_sub α,\\ta b c : α,\\t_inst_5 : covariant_class α α has_add.add has_le.le\\t⊢ a ≤ b + c + (a - (b + c))\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : partial_order α,\\t_inst_2 : add_comm_monoid α,\\t_inst_3 : has_sub α,\\t_inst_4 : has_ordered_sub α,\\ta b c : α,\\t_inst_5 : covariant_class α α has_add.add has_le.le\\t⊢ a ≤ b + (c + (a - (b + c)))\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : canonically_ordered_add_monoid α,\\t_inst_2 : has_sub α,\\t_inst_3 : has_ordered_sub α,\\ta b c : α,\\th : c ≤ b\\t⊢ a - c ≤ b - c ↔ a ≤ b\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : canonically_ordered_add_monoid α,\\t_inst_2 : has_sub α,\\t_inst_3 : has_ordered_sub α,\\ta b : α\\t⊢ a - b = 0 ↔ a ≤ b\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : canonically_ordered_add_monoid α,\\t_inst_2 : has_sub α,\\t_inst_3 : has_ordered_sub α,\\ta b c : α,\\th₁ : a ≤ b,\\th₂ : a ≤ c,\\th₃ : b - a = c - a\\t⊢ b = c\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : canonically_ordered_add_monoid α,\\t_inst_2 : has_sub α,\\t_inst_3 : has_ordered_sub α,\\ta b c : α,\\thb : add_le_cancellable b,\\th : b ≤ a\\t⊢ a - b + c = a + c - b\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : canonically_ordered_add_monoid α,\\t_inst_2 : has_sub α,\\t_inst_3 : has_ordered_sub α,\\ta b c : α,\\tha : add_le_cancellable a,\\thc : add_le_cancellable c,\\th : c ≤ a\\t⊢ a - b ≤ a - c ↔ c ≤ b\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : canonically_ordered_add_monoid α,\\t_inst_2 : has_sub α,\\t_inst_3 : has_ordered_sub α,\\ta b c : α,\\tha : add_le_cancellable a,\\thc : add_le_cancellable c,\\th : c ≤ a\\t⊢ a - b ≤ a - c → c ≤ b\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : canonically_ordered_add_monoid α,\\t_inst_2 : has_sub α,\\t_inst_3 : has_ordered_sub α,\\ta b c : α,\\tha : add_le_cancellable a,\\thc : add_le_cancellable c,\\th : c ≤ a\\t⊢ a + c ≤ a + b → c ≤ b\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : canonically_ordered_add_monoid α,\\t_inst_2 : has_sub α,\\t_inst_3 : has_ordered_sub α,\\ta b : α,\\thba : add_le_cancellable (b - a),\\th : a ≤ b\\t⊢ b - (b - a) = a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\ta b c : α,\\t_inst_1 : linear_ordered_comm_group_with_zero α,\\th : c ≠ 0,\\thab : a * c ≤ b\\t⊢ a * c ≤ b * c⁻¹ * c\\n ',\n", + " '<|startoftext|> α : Type u_1,\\ta b c d : α,\\t_inst_1 : linear_ordered_comm_group_with_zero α,\\thab : a < b,\\thcd : c < d,\\thb : b ≠ 0,\\thd : d ≠ 0,\\tha : a = 0\\t⊢ a * c < b * d\\n ',\n", + " '<|startoftext|> α : Type u_1,\\ta b c d : α,\\t_inst_1 : linear_ordered_comm_group_with_zero α,\\thab : a < b,\\thcd : c < d,\\thb : b ≠ 0,\\thd : d ≠ 0,\\tha : a = 0\\t⊢ b * d ≠ 0\\n ',\n", + " '<|startoftext|> α : Type u_1,\\ta b c d : α,\\t_inst_1 : linear_ordered_comm_group_with_zero α,\\thab : a < b,\\thcd : c < d,\\thb : b ≠ 0,\\thd : d ≠ 0,\\tha : ¬a = 0,\\thc : c = 0\\t⊢ a * c < b * d\\n ',\n", + " '<|startoftext|> α : Type u_1,\\ta b c d : α,\\t_inst_1 : linear_ordered_comm_group_with_zero α,\\thab : a < b,\\thcd : c < d,\\thb : b ≠ 0,\\thd : d ≠ 0,\\tha : ¬a = 0,\\thc : c = 0\\t⊢ b * d ≠ 0\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\tγ : Type u_3,\\tf : α → β,\\tc : α,\\t_inst_1 : add_comm_monoid α,\\t_inst_2 : has_neg β,\\t_inst_3 : division_ring γ,\\t_inst_4 : module γ α,\\th : function.antiperiodic f c,\\ta : γ,\\tha : a ≠ 0,\\tx : α\\t⊢ (λ (x : α), f (a • x)) (x + a⁻¹ • c) = -(λ (x : α), f (a • x)) x\\n ',\n", + " '<|startoftext|> ⊢ comm_group punit\\n ',\n", + " '<|startoftext|> ⊢ comm_group punit\\n ',\n", + " '<|startoftext|> ⊢ comm_group punit\\n ',\n", + " '<|startoftext|> ⊢ punit → punit → punit → punit.star = punit.star\\n ',\n", + " '<|startoftext|> ⊢ ∀ (a : punit), punit.star = a\\n ',\n", + " '<|startoftext|> ⊢ punit → punit.star = 1\\n ',\n", + " '<|startoftext|> ⊢ ℕ → ∀ (x : punit), punit.star = x * punit.star\\n ',\n", + " '<|startoftext|> ⊢ ∀ (a : punit), punit → punit.star = a * punit.star\\n ',\n", + " '<|startoftext|> ⊢ ℕ → ∀ (a : punit), punit.star = a * punit.star\\n ',\n", + " '<|startoftext|> ⊢ ℕ → punit → punit.star = punit.star\\n ',\n", + " '<|startoftext|> ⊢ ∀ (a : punit), a⁻¹ * a = 1\\n ',\n", + " '<|startoftext|> ⊢ ∀ (a b : punit), a * b = b * a\\n ',\n", + " '<|startoftext|> a b c : punit\\t⊢ punit.star = punit.star\\n ',\n", + " '<|startoftext|> a : punit\\t⊢ punit.star = a\\n ',\n", + " '<|startoftext|> x : punit\\t⊢ punit.star = 1\\n ',\n", + " '<|startoftext|> n : ℕ,\\tx : punit\\t⊢ punit.star = x * punit.star\\n ',\n", + " '<|startoftext|> a b : punit\\t⊢ punit.star = a * punit.star\\n ',\n", + " '<|startoftext|> a : punit\\t⊢ punit.star = 1\\n ',\n", + " '<|startoftext|> n : ℕ,\\ta : punit\\t⊢ punit.star = a * punit.star\\n ',\n", + " '<|startoftext|> n : ℕ,\\ta : punit\\t⊢ punit.star = punit.star\\n ',\n", + " '<|startoftext|> a : punit\\t⊢ a⁻¹ * a = 1\\n ',\n", + " '<|startoftext|> a b : punit\\t⊢ a * b = b * a\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : rack R,\\tx y : R\\t⊢ (x ◃ x) ◃ y = x ◃ y\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : rack R,\\tx y : R,\\th : opposite.op x ◃ opposite.op x = opposite.op y ◃ opposite.op y ↔ opposite.op x = opposite.op y\\t⊢ x ◃⁻¹ x = y ◃⁻¹ y ↔ x = y\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : rack R,\\tx y : R\\t⊢ x ◃⁻¹ x = y ◃⁻¹ y ↔ x = y\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\tc₁ c₂ : R,\\ta b : ℍ[R,c₁,c₂]\\t⊢ {re := (a + b).re, im_i := -(a + b).im_i, im_j := -(a + b).im_j, im_k := -(a + b).im_k} = {re := a.re, im_i := -a.im_i, im_j := -a.im_j, im_k := -a.im_k} + {re := b.re, im_i := -b.im_i, im_j := -b.im_j, im_k := -b.im_k}\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\tc₁ c₂ : R,\\ta b : ℍ[R,c₁,c₂]\\t⊢ {re := (a + b).re, im_i := -(a + b).im_i, im_j := -(a + b).im_j, im_k := -(a + b).im_k} = {re := a.re, im_i := -a.im_i, im_j := -a.im_j, im_k := -a.im_k} + {re := b.re, im_i := -b.im_i, im_j := -b.im_j, im_k := -b.im_k}\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\tc₁ c₂ : R,\\ta b : ℍ[R,c₁,c₂]\\t⊢ {re := (a + b).re, im_i := -(a + b).im_i, im_j := -(a + b).im_j, im_k := -(a + b).im_k}.re = ({re := a.re, im_i := -a.im_i, im_j := -a.im_j, im_k := -a.im_k} + {re := b.re, im_i := -b.im_i, im_j := -b.im_j, im_k := -b.im_k}).re\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\tc₁ c₂ : R,\\ta b : ℍ[R,c₁,c₂]\\t⊢ {re := (a + b).re, im_i := -(a + b).im_i, im_j := -(a + b).im_j, im_k := -(a + b).im_k}.im_i = ({re := a.re, im_i := -a.im_i, im_j := -a.im_j, im_k := -a.im_k} + {re := b.re, im_i := -b.im_i, im_j := -b.im_j, im_k := -b.im_k}).im_i\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\tc₁ c₂ : R,\\ta b : ℍ[R,c₁,c₂]\\t⊢ {re := (a + b).re, im_i := -(a + b).im_i, im_j := -(a + b).im_j, im_k := -(a + b).im_k}.im_j = ({re := a.re, im_i := -a.im_i, im_j := -a.im_j, im_k := -a.im_k} + {re := b.re, im_i := -b.im_i, im_j := -b.im_j, im_k := -b.im_k}).im_j\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\tc₁ c₂ : R,\\ta b : ℍ[R,c₁,c₂]\\t⊢ {re := (a + b).re, im_i := -(a + b).im_i, im_j := -(a + b).im_j, im_k := -(a + b).im_k}.im_k = ({re := a.re, im_i := -a.im_i, im_j := -a.im_j, im_k := -a.im_k} + {re := b.re, im_i := -b.im_i, im_j := -b.im_j, im_k := -b.im_k}).im_k\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\tc₁ c₂ r : R,\\ta : ℍ[R,c₁,c₂]\\t⊢ {re := (r • a).re, im_i := -(r • a).im_i, im_j := -(r • a).im_j, im_k := -(r • a).im_k} = ⇑(ring_hom.id R) r • {re := a.re, im_i := -a.im_i, im_j := -a.im_j, im_k := -a.im_k}\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\tc₁ c₂ r : R,\\ta : ℍ[R,c₁,c₂]\\t⊢ {re := (r • a).re, im_i := -(r • a).im_i, im_j := -(r • a).im_j, im_k := -(r • a).im_k} = ⇑(ring_hom.id R) r • {re := a.re, im_i := -a.im_i, im_j := -a.im_j, im_k := -a.im_k}\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\tc₁ c₂ r : R,\\ta : ℍ[R,c₁,c₂]\\t⊢ {re := (r • a).re, im_i := -(r • a).im_i, im_j := -(r • a).im_j, im_k := -(r • a).im_k}.re = (⇑(ring_hom.id R) r • {re := a.re, im_i := -a.im_i, im_j := -a.im_j, im_k := -a.im_k}).re\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\tc₁ c₂ r : R,\\ta : ℍ[R,c₁,c₂]\\t⊢ {re := (r • a).re, im_i := -(r • a).im_i, im_j := -(r • a).im_j, im_k := -(r • a).im_k}.im_i = (⇑(ring_hom.id R) r • {re := a.re, im_i := -a.im_i, im_j := -a.im_j, im_k := -a.im_k}).im_i\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\tc₁ c₂ r : R,\\ta : ℍ[R,c₁,c₂]\\t⊢ {re := (r • a).re, im_i := -(r • a).im_i, im_j := -(r • a).im_j, im_k := -(r • a).im_k}.im_j = (⇑(ring_hom.id R) r • {re := a.re, im_i := -a.im_i, im_j := -a.im_j, im_k := -a.im_k}).im_j\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\tc₁ c₂ r : R,\\ta : ℍ[R,c₁,c₂]\\t⊢ {re := (r • a).re, im_i := -(r • a).im_i, im_j := -(r • a).im_j, im_k := -(r • a).im_k}.im_k = (⇑(ring_hom.id R) r • {re := a.re, im_i := -a.im_i, im_j := -a.im_j, im_k := -a.im_k}).im_k\\n ',\n", + " \"<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\tc₁ c₂ : R,\\ta : ℍ[R,c₁,c₂]\\t⊢ ⇑{to_fun := λ (a : ℍ[R,c₁,c₂]), {re := a.re, im_i := -a.im_i, im_j := -a.im_j, im_k := -a.im_k}, map_add' := _, map_smul' := _} (⇑{to_fun := λ (a : ℍ[R,c₁,c₂]), {re := a.re, im_i := -a.im_i, im_j := -a.im_j, im_k := -a.im_k}, map_add' := _, map_smul' := _} a) = a\\n \",\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\tc₁ c₂ : R,\\ta : ℍ[R,c₁,c₂]\\t⊢ ⇑quaternion_algebra.conj a + a = 2 * ↑(a.re)\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : comm_ring R,\\tx : R\\t⊢ ⇑quaternion.norm_sq ↑x = x ^ 2\\n ',\n", + " '<|startoftext|> R : Type u_1,\\tA : Type u_2,\\t_inst_1 : comm_ring R,\\t_inst_2 : ring A,\\t_inst_4 : algebra R A,\\tc₁ c₂ : R,\\tq : quaternion_algebra.basis A c₁ c₂\\t⊢ q.k * q.j = c₂ • q.i\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : mul_zero_class R\\t⊢ ¬is_left_regular 0 ↔ nontrivial R\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : mul_zero_class R\\t⊢ (¬∃ (x y : R), x ≠ y) ↔ ∀ (x y : R), x = y\\n ',\n", + " '<|startoftext|> R : Type u_1,\\t_inst_1 : mul_zero_class R\\t⊢ (∀ (x y : R), x = y) ↔ ∀ (x y : R), x = y\\n ',\n", + " '<|startoftext|> R : Type u_1,\\ta b : R,\\t_inst_1 : monoid R,\\th : a * b = 1\\t⊢ is_right_regular (a * ?m_1)\\n ',\n", + " '<|startoftext|> R : Type u_1,\\ta b : R,\\t_inst_1 : monoid R,\\th : a * b = 1\\t⊢ is_right_regular 1\\n ',\n", + " '<|startoftext|> R : Type u_1,\\ta b : R,\\t_inst_1 : semigroup R,\\tab : is_left_regular (a * b)\\t⊢ function.injective (?m_2 ∘ has_mul.mul b)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : boolean_ring α,\\ta b : α\\t⊢ a * b + a * (1 + b) + a * b * (a * (1 + b)) = a * b + a * (1 + b) + a * a * (b * (1 + b))\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : boolean_ring α,\\ta b : α\\t⊢ a * b + a * (1 + b) + a * a * (b * (1 + b)) = a * b + (a + a * b)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : boolean_ring α,\\ta b : α\\t⊢ a * b + (a + a * b) = a + (a * b + a * b)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : boolean_ring α,\\ta b : α\\t⊢ a + (a * b + a * b) = a\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : boolean_ring α,\\ta : α\\t⊢ a + a = (a + a) * (a + a)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : boolean_ring α,\\ta : α\\t⊢ (a + a) * (a + a) = a * a + a * a + (a * a + a * a)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : boolean_ring α,\\ta : α\\t⊢ a * a + a * a + (a * a + a * a) = a + a + (a + a)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\t_inst_1 : boolean_ring α,\\ta : α,\\tthis : a + a = a + a + (a + a)\\t⊢ a + a = 0\\n ',\n", + " \"<|startoftext|> R₁ : Type u_1,\\tR₂ : Type u_2,\\t_inst_1 : semiring R₁,\\t_inst_2 : semiring R₂,\\tσ : R₁ →+* R₂,\\tσ' : R₂ →+* R₁,\\t_inst_4 : ring_hom_inv_pair σ σ',\\tx : R₂\\t⊢ ⇑(ring_hom.id R₂) x = x\\n \",\n", + " \"<|startoftext|> R₁ : Type u_1,\\tR₂ : Type u_2,\\t_inst_1 : semiring R₁,\\t_inst_2 : semiring R₂,\\tσ : R₁ →+* R₂,\\tσ' : R₂ →+* R₁,\\t_inst_4 : ring_hom_inv_pair σ σ',\\tx : R₂\\t⊢ ⇑σ (⇑σ' x) = x\\n \",\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), non_unital_semiring (f i)\\t⊢ non_unital_semiring (Π (i : I), f i)\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), non_unital_semiring (f i)\\t⊢ non_unital_semiring (Π (i : I), f i)\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), non_unital_semiring (f i)\\t⊢ ∀ (a b c : Π (i : I), f i), a + b + c = a + (b + c)\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), non_unital_semiring (f i)\\t⊢ ∀ (a : Π (i : I), f i), 0 + a = a\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), non_unital_semiring (f i)\\t⊢ ∀ (a : Π (i : I), f i), a + 0 = a\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), non_unital_semiring (f i)\\t⊢ ℕ → (Π (i : I), f i) → Π (i : I), f i\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), non_unital_semiring (f i)\\t⊢ ∀ (x : Π (i : I), f i), (λ (ᾰ_1 : Π (i : I), f i) (i : I), non_unital_semiring.nsmul 0 (ᾰ_1 i)) x = 0\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), non_unital_semiring (f i)\\t⊢ ∀ (n : ℕ) (x : Π (i : I), f i), (λ (ᾰ_1 : Π (i : I), f i) (i : I), non_unital_semiring.nsmul n.succ (ᾰ_1 i)) x = x + (λ (ᾰ_1 : Π (i : I), f i) (i : I), non_unital_semiring.nsmul n (ᾰ_1 i)) x\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), non_unital_semiring (f i)\\t⊢ ∀ (a b : Π (i : I), f i), a + b = b + a\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), non_unital_semiring (f i)\\t⊢ ∀ (a b c : Π (i : I), f i), a * (b + c) = a * b + a * c\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), non_unital_semiring (f i)\\t⊢ ∀ (a b c : Π (i : I), f i), (a + b) * c = a * c + b * c\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), non_unital_semiring (f i)\\t⊢ ∀ (a : Π (i : I), f i), 0 * a = 0\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), non_unital_semiring (f i)\\t⊢ ∀ (a : Π (i : I), f i), a * 0 = 0\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), non_unital_semiring (f i)\\t⊢ ∀ (a b c : Π (i : I), f i), a * b * c = a * (b * c)\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ comm_ring (Π (i : I), f i)\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ comm_ring (Π (i : I), f i)\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (a b c : Π (i : I), f i), a + b + c = a + (b + c)\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (a : Π (i : I), f i), 0 + a = a\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (a : Π (i : I), f i), a + 0 = a\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (x : Π (i : I), f i), (λ (i : I), add_monoid.nsmul 0 (x i)) = 0\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (n : ℕ) (x : Π (i : I), f i), (λ (i : I), add_monoid.nsmul n.succ (x i)) = x + λ (i : I), add_monoid.nsmul n (x i)\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ (Π (i : I), f i) → (Π (i : I), f i) → Π (i : I), f i\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (a b : Π (i : I), f i), (λ (ᾰ_1 : Π (i : I), f i) (i : I), comm_ring.sub (a i) (ᾰ_1 i)) b = a + -b\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ℤ → (Π (i : I), f i) → Π (i : I), f i\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (a : Π (i : I), f i), (λ (ᾰ_1 : Π (i : I), f i) (i : I), comm_ring.gsmul 0 (ᾰ_1 i)) a = 0\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (n : ℕ) (a : Π (i : I), f i), (λ (ᾰ_1 : Π (i : I), f i) (i : I), comm_ring.gsmul (int.of_nat n.succ) (ᾰ_1 i)) a = a + (λ (ᾰ_1 : Π (i : I), f i) (i : I), comm_ring.gsmul (int.of_nat n) (ᾰ_1 i)) a\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (n : ℕ) (a : Π (i : I), f i), (λ (ᾰ_1 : Π (i : I), f i) (i : I), comm_ring.gsmul -[1+ n] (ᾰ_1 i)) a = -(λ (ᾰ_1 : Π (i : I), f i) (i : I), comm_ring.gsmul ↑(n.succ) (ᾰ_1 i)) a\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (a : Π (i : I), f i), -a + a = 0\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (a b : Π (i : I), f i), a + b = b + a\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (a b c : Π (i : I), f i), a * b * c = a * (b * c)\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (a : Π (i : I), f i), 1 * a = a\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (a : Π (i : I), f i), a * 1 = a\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (x : Π (i : I), f i), (λ (i : I), monoid.npow 0 (x i)) = 1\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (n : ℕ) (x : Π (i : I), f i), (λ (i : I), monoid.npow n.succ (x i)) = x * λ (i : I), monoid.npow n (x i)\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (a b c : Π (i : I), f i), a * (b + c) = a * b + a * c\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (a b c : Π (i : I), f i), (a + b) * c = a * c + b * c\\n ',\n", + " '<|startoftext|> I : Type u,\\tf : I → Type v,\\t_inst_1 : Π (i : I), comm_ring (f i)\\t⊢ ∀ (a b : Π (i : I), f i), a * b = b * a\\n ',\n", + " '<|startoftext|> R : CommRingᵒᵖ\\t⊢ algebraic_geometry.Spec.SheafedSpace_map (𝟙 R).unop = 𝟙 (algebraic_geometry.Spec.SheafedSpace_obj (opposite.unop R))\\n ',\n", + " '<|startoftext|> R S T : CommRingᵒᵖ,\\tf : R ⟶ S,\\tg : S ⟶ T\\t⊢ algebraic_geometry.Spec.SheafedSpace_map (f ≫ g).unop = algebraic_geometry.Spec.SheafedSpace_map f.unop ≫ algebraic_geometry.Spec.SheafedSpace_map g.unop\\n ',\n", + " '<|startoftext|> R : CommRing,\\tx : ↥({to_PresheafedSpace := (algebraic_geometry.Spec.SheafedSpace_obj R).to_PresheafedSpace, sheaf_condition := (algebraic_geometry.Spec.SheafedSpace_obj R).sheaf_condition}.to_PresheafedSpace.carrier)\\t⊢ local_ring (localization.at_prime (prime_spectrum.as_ideal x))\\n ',\n", + " '<|startoftext|> R S T : CommRing,\\tf : R ⟶ S,\\tg : S ⟶ T\\t⊢ ↑(algebraic_geometry.Spec.LocallyRingedSpace_map (f ≫ g)) = ↑(algebraic_geometry.Spec.LocallyRingedSpace_map g ≫ algebraic_geometry.Spec.LocallyRingedSpace_map f)\\n ',\n", + " '<|startoftext|> R S T : CommRing,\\tf : R ⟶ S,\\tg : S ⟶ T\\t⊢ algebraic_geometry.Spec.SheafedSpace_map g ≫ algebraic_geometry.Spec.SheafedSpace_map f = ↑(algebraic_geometry.Spec.LocallyRingedSpace_map g ≫ algebraic_geometry.Spec.LocallyRingedSpace_map f)\\n ',\n", + " '<|startoftext|> R : CommRingᵒᵖ\\t⊢ algebraic_geometry.Spec.Top_map (𝟙 R).unop = 𝟙 (algebraic_geometry.Spec.Top_obj (opposite.unop R))\\n ',\n", + " '<|startoftext|> R S T : CommRingᵒᵖ,\\tf : R ⟶ S,\\tg : S ⟶ T\\t⊢ algebraic_geometry.Spec.Top_map (f ≫ g).unop = algebraic_geometry.Spec.Top_map f.unop ≫ algebraic_geometry.Spec.Top_map g.unop\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R,\\tx : prime_spectrum R\\t⊢ prime_spectrum.vanishing_ideal {x} = x.as_ideal\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R,\\tI : ideal R\\t⊢ prime_spectrum.zero_locus ↑I = ∅ ↔ I = ⊤\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R,\\tI : ideal R\\t⊢ prime_spectrum.zero_locus ↑I = ∅ → I = ⊤\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R,\\tI : ideal R\\t⊢ ¬I = ⊤ → ¬prime_spectrum.zero_locus ↑I = ∅\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R,\\tI : ideal R,\\th : ¬I = ⊤\\t⊢ ¬prime_spectrum.zero_locus ↑I = ∅\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R,\\tI : ideal R,\\th : ¬I = ⊤\\t⊢ (prime_spectrum.zero_locus ↑I).nonempty\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R,\\tI : ideal R\\t⊢ prime_spectrum.zero_locus ↑I = ∅ → I = ⊤\\t\\tR : Type u,\\t_inst_1 : comm_ring R,\\tI : ideal R\\t⊢ I = ⊤ → prime_spectrum.zero_locus ↑I = ∅\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R,\\tI : ideal R,\\th : ¬I = ⊤,\\tM : ideal R,\\thM : M.is_maximal,\\thIM : I ≤ M\\t⊢ (prime_spectrum.zero_locus ↑I).nonempty\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R\\t⊢ prime_spectrum.zero_locus ↑⊤ = ∅\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R\\t⊢ 1 ∈ ↑⊤\\n ',\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R,\\tI : ideal R\\t⊢ I = ⊤ → prime_spectrum.zero_locus ↑I = ∅\\n ',\n", + " \"<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R,\\ts : set (set R)\\t⊢ prime_spectrum.zero_locus (⋃ (s' : set R) (H : s' ∈ s), s') = ⋂ (s' : set R) (H : s' ∈ s), prime_spectrum.zero_locus s'\\n \",\n", + " '<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R,\\tS : Type v,\\t_inst_2 : comm_ring S,\\tf : R →+* S\\t⊢ continuous (prime_spectrum.comap f)\\n ',\n", + " \"<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R,\\tS : Type v,\\t_inst_2 : comm_ring S,\\tf : R →+* S\\t⊢ ∀ (s : set (prime_spectrum R)), is_closed s → is_closed (prime_spectrum.comap f ⁻¹' s)\\n \",\n", + " \"<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R,\\tS : Type v,\\t_inst_2 : comm_ring S,\\tf : R →+* S\\t⊢ ∀ (s : set (prime_spectrum R)), (∃ (s_1 : set R), s = prime_spectrum.zero_locus s_1) → (∃ (s_1 : set S), prime_spectrum.comap f ⁻¹' s = prime_spectrum.zero_locus s_1)\\n \",\n", + " \"<|startoftext|> R : Type u,\\t_inst_1 : comm_ring R,\\tS : Type v,\\t_inst_2 : comm_ring S,\\tf : R →+* S,\\ts : set R\\t⊢ ∃ (s_1 : set S), prime_spectrum.comap f ⁻¹' prime_spectrum.zero_locus s = prime_spectrum.zero_locus s_1\\n \",\n", + " '<|startoftext|> m n : simplex_category,\\tf g : m ⟶ n,\\th : simplex_category.skeletal_functor.map f = simplex_category.skeletal_functor.map g\\t⊢ simplex_category.hom.to_preorder_hom f = simplex_category.hom.to_preorder_hom g\\n ',\n", + " '<|startoftext|> m n : simplex_category,\\tf g : m ⟶ n,\\th : simplex_category.skeletal_functor.map f = simplex_category.skeletal_functor.map g\\t⊢ ⇑(simplex_category.hom.to_preorder_hom f) = ⇑(simplex_category.hom.to_preorder_hom g)\\n ',\n", + " '<|startoftext|> m n : simplex_category,\\tf g : m ⟶ n,\\th : simplex_category.skeletal_functor.map f = simplex_category.skeletal_functor.map g,\\ti : fin (m.len + 1)\\t⊢ ⇑(simplex_category.hom.to_preorder_hom f) i = ⇑(simplex_category.hom.to_preorder_hom g) i\\n ',\n", + " '<|startoftext|> m n : simplex_category,\\tf g : m ⟶ n,\\th : simplex_category.skeletal_functor.map f = simplex_category.skeletal_functor.map g\\t⊢ f = g\\n ',\n", + " '<|startoftext|> m n : simplex_category,\\tf g : m ⟶ n,\\th : simplex_category.skeletal_functor.map f = simplex_category.skeletal_functor.map g,\\ti : fin (m.len + 1)\\t⊢ {down := ⇑(simplex_category.hom.to_preorder_hom f) i} = {down := ⇑(simplex_category.hom.to_preorder_hom g) i}\\n ',\n", + " '<|startoftext|> m n : simplex_category,\\tf g : m ⟶ n,\\th : simplex_category.skeletal_functor.map f = simplex_category.skeletal_functor.map g,\\ti : fin (m.len + 1)\\t⊢ ⇑(simplex_category.skeletal_functor.map f) {down := i} = ⇑(simplex_category.skeletal_functor.map g) {down := i}\\n ',\n", + " '<|startoftext|> n m : simplex_category,\\tf : n ⟶ m\\t⊢ category_theory.mono f ↔ function.injective ⇑(simplex_category.hom.to_preorder_hom f)\\n ',\n", + " '<|startoftext|> n m : simplex_category,\\tf : n ⟶ m\\t⊢ category_theory.mono f → function.injective ⇑(simplex_category.hom.to_preorder_hom f)\\n ',\n", + " '<|startoftext|> n m : simplex_category,\\tf : n ⟶ m,\\tm : category_theory.mono f,\\tx y : fin (n.len + 1),\\th : ⇑(simplex_category.hom.to_preorder_hom f) x = ⇑(simplex_category.hom.to_preorder_hom f) y\\t⊢ x = y\\n ',\n", + " '<|startoftext|> n m : simplex_category,\\tf : n ⟶ m,\\tm : category_theory.mono f,\\tx y : fin (n.len + 1),\\th : ⇑(simplex_category.hom.to_preorder_hom f) x = ⇑(simplex_category.hom.to_preorder_hom f) y\\t⊢ m.const (⇑(simplex_category.hom.to_preorder_hom f) x) = m.const (⇑(simplex_category.hom.to_preorder_hom f) y)\\n ',\n", + " '<|startoftext|> n m : simplex_category,\\tf : n ⟶ m,\\tm : category_theory.mono f,\\tx y : fin (n.len + 1),\\th : ⇑(simplex_category.hom.to_preorder_hom f) x = ⇑(simplex_category.hom.to_preorder_hom f) y\\t⊢ n.const x ≫ f = n.const y ≫ f\\t\\tn m : simplex_category,\\tf : n ⟶ m,\\tm : category_theory.mono f,\\tx y : fin (n.len + 1),\\th : ⇑(simplex_category.hom.to_preorder_hom f) x = ⇑(simplex_category.hom.to_preorder_hom f) y,\\tH : n.const x ≫ f = n.const y ≫ f\\t⊢ x = y\\n ',\n", + " '<|startoftext|> n m : simplex_category,\\tf : n ⟶ m,\\tm : category_theory.mono f,\\tx y : fin (n.len + 1),\\th : ⇑(simplex_category.hom.to_preorder_hom f) x = ⇑(simplex_category.hom.to_preorder_hom f) y\\t⊢ n.const x ≫ f = n.const y ≫ f\\n ',\n", + " '<|startoftext|> n m : simplex_category,\\tf : n ⟶ m,\\tm : category_theory.mono f,\\tx y : fin (n.len + 1),\\th : ⇑(simplex_category.hom.to_preorder_hom f) x = ⇑(simplex_category.hom.to_preorder_hom f) y,\\tH : n.const x ≫ f = n.const y ≫ f\\t⊢ x = y\\n ',\n", + " '<|startoftext|> n m : simplex_category,\\tf : n ⟶ m,\\tm : category_theory.mono f,\\tx y : fin (n.len + 1),\\th : ⇑(simplex_category.hom.to_preorder_hom f) x = ⇑(simplex_category.hom.to_preorder_hom f) y,\\tH : n.const x ≫ f = n.const y ≫ f\\t⊢ ⇑(simplex_category.hom.to_preorder_hom (n.const x)) 0 = ⇑(simplex_category.hom.to_preorder_hom (n.const y)) 0\\n ',\n", + " '<|startoftext|> n m : simplex_category,\\tf : n ⟶ m\\t⊢ category_theory.mono f → function.injective ⇑(simplex_category.hom.to_preorder_hom f)\\t\\tn m : simplex_category,\\tf : n ⟶ m\\t⊢ function.injective ⇑(simplex_category.hom.to_preorder_hom f) → category_theory.mono f\\n ',\n", + " '<|startoftext|> n m : simplex_category,\\tf : n ⟶ m,\\tm : category_theory.mono f,\\tx y : fin (n.len + 1),\\th : ⇑(simplex_category.hom.to_preorder_hom f) x = ⇑(simplex_category.hom.to_preorder_hom f) y,\\tH : n.const x = n.const y\\t⊢ ⇑(simplex_category.hom.to_preorder_hom (n.const x)) 0 = ⇑(simplex_category.hom.to_preorder_hom (n.const y)) 0\\n ',\n", + " '<|startoftext|> n m : simplex_category,\\tf : n ⟶ m\\t⊢ function.injective ⇑(simplex_category.hom.to_preorder_hom f) → category_theory.mono f\\n ',\n", + " '<|startoftext|> C : Type u,\\t_inst_1 : category_theory.category C,\\tX : category_theory.simplicial_object C,\\tX₀ : C,\\tf : X.obj (opposite.op [0]) ⟶ X₀,\\tw : ∀ (i : simplex_category) (g₁ g₂ : [0] ⟶ i), X.map g₁.op ≫ f = X.map g₂.op ≫ f\\t⊢ ∀ ⦃X_1 Y : simplex_categoryᵒᵖ⦄ (f_1 : X_1 ⟶ Y), ((𝟭 (category_theory.simplicial_object C)).obj X).map f_1 ≫ X.map ((opposite.unop Y).const 0).op ≫ f = (X.map ((opposite.unop X_1).const 0).op ≫ f) ≫ ((category_theory.simplicial_object.const C).obj X₀).map f_1\\n ',\n", + " '<|startoftext|> C : Type u,\\t_inst_1 : category_theory.category C,\\tX : category_theory.simplicial_object C,\\tX₀ : C,\\tf : X.obj (opposite.op [0]) ⟶ X₀,\\tw : ∀ (i : simplex_category) (g₁ g₂ : [0] ⟶ i), X.map g₁.op ≫ f = X.map g₂.op ≫ f,\\ti j : simplex_categoryᵒᵖ,\\tg : i ⟶ j\\t⊢ ((𝟭 (category_theory.simplicial_object C)).obj X).map g ≫ X.map ((opposite.unop j).const 0).op ≫ f = (X.map ((opposite.unop i).const 0).op ≫ f) ≫ ((category_theory.simplicial_object.const C).obj X₀).map g\\n ',\n", + " '<|startoftext|> C : Type u,\\t_inst_1 : category_theory.category C,\\tX : category_theory.simplicial_object C,\\tX₀ : C,\\tf : X.obj (opposite.op [0]) ⟶ X₀,\\tw : ∀ (i : simplex_category) (g₁ g₂ : [0] ⟶ i), X.map g₁.op ≫ f = X.map g₂.op ≫ f,\\ti j : simplex_categoryᵒᵖ,\\tg : i ⟶ j\\t⊢ X.map g ≫ X.map ((opposite.unop j).const 0).op ≫ f = (X.map ((opposite.unop i).const 0).op ≫ f) ≫ 𝟙 X₀\\n ',\n", + " '<|startoftext|> C : Type u,\\t_inst_1 : category_theory.category C,\\tX : category_theory.simplicial_object C,\\tX₀ : C,\\tf : X.obj (opposite.op [0]) ⟶ X₀,\\tw : ∀ (i : simplex_category) (g₁ g₂ : [0] ⟶ i), X.map g₁.op ≫ f = X.map g₂.op ≫ f,\\ti j : simplex_categoryᵒᵖ,\\tg : i ⟶ j\\t⊢ X.map g.unop.op ≫ X.map ((opposite.unop j).const 0).op ≫ f = (X.map ((opposite.unop i).const 0).op ≫ f) ≫ 𝟙 X₀\\n ',\n", + " '<|startoftext|> C : Type u,\\t_inst_1 : category_theory.category C,\\tX : category_theory.cosimplicial_object C,\\tn : ℕ,\\ti : fin (n + 2),\\tj : fin (n + 1),\\tH : ⇑fin.cast_succ j < i\\t⊢ X.map (simplex_category.δ i.succ) ≫ X.map (simplex_category.σ (⇑fin.cast_succ j)) = X.map (simplex_category.σ j) ≫ X.map (simplex_category.δ i)\\n ',\n", + " '<|startoftext|> C : Type u,\\t_inst_1 : category_theory.category C,\\tX : category_theory.cosimplicial_object C,\\tn : ℕ,\\ti : fin (n + 2),\\tj : fin (n + 1),\\tH : ⇑fin.cast_succ j < i\\t⊢ X.δ i.succ ≫ X.σ (⇑fin.cast_succ j) = X.σ j ≫ X.δ i\\n ',\n", + " '<|startoftext|> C : Type u,\\t_inst_1 : category_theory.category C,\\tX : category_theory.cosimplicial_object.augmented Cᵒᵖ\\t⊢ X.left_op.right_op.left = X.left\\n ',\n", + " '<|startoftext|> C : Type u,\\t_inst_1 : category_theory.category C,\\tX : category_theory.cosimplicial_object.augmented Cᵒᵖ\\t⊢ (category_theory.cosimplicial_object.const Cᵒᵖ).map (category_theory.eq_to_iso _).hom ≫ X.hom = X.left_op.right_op.hom ≫ (𝟭 (category_theory.cosimplicial_object Cᵒᵖ)).map (category_theory.functor.left_op_right_op_iso X.right).hom\\n ',\n", + " '<|startoftext|> C : Type u,\\t_inst_1 : category_theory.category C,\\tX : category_theory.simplicial_object C,\\tn m : ℕ,\\th : n = m\\t⊢ opposite.op (simplex_category.mk n) = opposite.op (simplex_category.mk m)\\n ',\n", + " \"<|startoftext|> E : Type u_1,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tv : ℝ → E → E,\\ts : ℝ → set E,\\tK : ℝ,\\thv : ∀ (t : ℝ) (x y : E), x ∈ s t → y ∈ s t → has_dist.dist (v t x) (v t y) ≤ K * has_dist.dist x y,\\tf g : ℝ → E,\\ta b δ : ℝ,\\thf : continuous_on f (set.Icc a b),\\thf' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at f (v t (f t)) (set.Ici t) t,\\thfs : ∀ (t : ℝ), t ∈ set.Ico a b → f t ∈ s t,\\thg : continuous_on g (set.Icc a b),\\thg' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at g (v t (g t)) (set.Ici t) t,\\thgs : ∀ (t : ℝ), t ∈ set.Ico a b → g t ∈ s t,\\tha : has_dist.dist (f a) (g a) ≤ δ\\t⊢ ∀ (t : ℝ), t ∈ set.Icc a b → has_dist.dist (f t) (g t) ≤ δ * real.exp (K * (t - a))\\n \",\n", + " \"<|startoftext|> E : Type u_1,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tv : ℝ → E → E,\\ts : ℝ → set E,\\tK : ℝ,\\thv : ∀ (t : ℝ) (x y : E), x ∈ s t → y ∈ s t → has_dist.dist (v t x) (v t y) ≤ K * has_dist.dist x y,\\tf g : ℝ → E,\\ta b δ : ℝ,\\thf : continuous_on f (set.Icc a b),\\thf' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at f (v t (f t)) (set.Ici t) t,\\thfs : ∀ (t : ℝ), t ∈ set.Ico a b → f t ∈ s t,\\thg : continuous_on g (set.Icc a b),\\thg' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at g (v t (g t)) (set.Ici t) t,\\thgs : ∀ (t : ℝ), t ∈ set.Ico a b → g t ∈ s t,\\tha : has_dist.dist (f a) (g a) ≤ δ\\t⊢ ∀ (t : ℝ), t ∈ set.Ico a b → has_dist.dist (v t (f t)) (v t (f t)) ≤ 0\\n \",\n", + " \"<|startoftext|> E : Type u_1,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tv : ℝ → E → E,\\ts : ℝ → set E,\\tK : ℝ,\\thv : ∀ (t : ℝ) (x y : E), x ∈ s t → y ∈ s t → has_dist.dist (v t x) (v t y) ≤ K * has_dist.dist x y,\\tf g : ℝ → E,\\ta b δ : ℝ,\\thf : continuous_on f (set.Icc a b),\\thf' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at f (v t (f t)) (set.Ici t) t,\\thfs : ∀ (t : ℝ), t ∈ set.Ico a b → f t ∈ s t,\\thg : continuous_on g (set.Icc a b),\\thg' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at g (v t (g t)) (set.Ici t) t,\\thgs : ∀ (t : ℝ), t ∈ set.Ico a b → g t ∈ s t,\\tha : has_dist.dist (f a) (g a) ≤ δ,\\tt : ℝ,\\tH : t ∈ set.Ico a b\\t⊢ has_dist.dist (v t (f t)) (v t (f t)) ≤ 0\\n \",\n", + " \"<|startoftext|> E : Type u_1,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tv : ℝ → E → E,\\ts : ℝ → set E,\\tK : ℝ,\\thv : ∀ (t : ℝ) (x y : E), x ∈ s t → y ∈ s t → has_dist.dist (v t x) (v t y) ≤ K * has_dist.dist x y,\\tf g : ℝ → E,\\ta b δ : ℝ,\\thf : continuous_on f (set.Icc a b),\\thf' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at f (v t (f t)) (set.Ici t) t,\\thfs : ∀ (t : ℝ), t ∈ set.Ico a b → f t ∈ s t,\\thg : continuous_on g (set.Icc a b),\\thg' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at g (v t (g t)) (set.Ici t) t,\\thgs : ∀ (t : ℝ), t ∈ set.Ico a b → g t ∈ s t,\\tha : has_dist.dist (f a) (g a) ≤ δ,\\tf_bound : ∀ (t : ℝ), t ∈ set.Ico a b → has_dist.dist (v t (f t)) (v t (f t)) ≤ 0\\t⊢ ∀ (t : ℝ), t ∈ set.Icc a b → has_dist.dist (f t) (g t) ≤ δ * real.exp (K * (t - a))\\n \",\n", + " \"<|startoftext|> E : Type u_1,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tv : ℝ → E → E,\\ts : ℝ → set E,\\tK : ℝ,\\thv : ∀ (t : ℝ) (x y : E), x ∈ s t → y ∈ s t → has_dist.dist (v t x) (v t y) ≤ K * has_dist.dist x y,\\tf g : ℝ → E,\\ta b δ : ℝ,\\thf : continuous_on f (set.Icc a b),\\thf' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at f (v t (f t)) (set.Ici t) t,\\thfs : ∀ (t : ℝ), t ∈ set.Ico a b → f t ∈ s t,\\thg : continuous_on g (set.Icc a b),\\thg' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at g (v t (g t)) (set.Ici t) t,\\thgs : ∀ (t : ℝ), t ∈ set.Ico a b → g t ∈ s t,\\tha : has_dist.dist (f a) (g a) ≤ δ,\\tf_bound : ∀ (t : ℝ), t ∈ set.Ico a b → has_dist.dist (v t (f t)) (v t (f t)) ≤ 0\\t⊢ ∀ (t : ℝ), t ∈ set.Ico a b → has_dist.dist (v t (g t)) (v t (g t)) ≤ 0\\n \",\n", + " \"<|startoftext|> E : Type u_1,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tv : ℝ → E → E,\\ts : ℝ → set E,\\tK : ℝ,\\thv : ∀ (t : ℝ) (x y : E), x ∈ s t → y ∈ s t → has_dist.dist (v t x) (v t y) ≤ K * has_dist.dist x y,\\tf g : ℝ → E,\\ta b δ : ℝ,\\thf : continuous_on f (set.Icc a b),\\thf' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at f (v t (f t)) (set.Ici t) t,\\thfs : ∀ (t : ℝ), t ∈ set.Ico a b → f t ∈ s t,\\thg : continuous_on g (set.Icc a b),\\thg' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at g (v t (g t)) (set.Ici t) t,\\thgs : ∀ (t : ℝ), t ∈ set.Ico a b → g t ∈ s t,\\tha : has_dist.dist (f a) (g a) ≤ δ,\\tf_bound : ∀ (t : ℝ), t ∈ set.Ico a b → has_dist.dist (v t (f t)) (v t (f t)) ≤ 0,\\tt : ℝ,\\tH : t ∈ set.Ico a b\\t⊢ has_dist.dist (v t (g t)) (v t (g t)) ≤ 0\\n \",\n", + " \"<|startoftext|> E : Type u_1,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tv : ℝ → E → E,\\ts : ℝ → set E,\\tK : ℝ,\\thv : ∀ (t : ℝ) (x y : E), x ∈ s t → y ∈ s t → has_dist.dist (v t x) (v t y) ≤ K * has_dist.dist x y,\\tf g : ℝ → E,\\ta b δ : ℝ,\\thf : continuous_on f (set.Icc a b),\\thf' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at f (v t (f t)) (set.Ici t) t,\\thfs : ∀ (t : ℝ), t ∈ set.Ico a b → f t ∈ s t,\\thg : continuous_on g (set.Icc a b),\\thg' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at g (v t (g t)) (set.Ici t) t,\\thgs : ∀ (t : ℝ), t ∈ set.Ico a b → g t ∈ s t,\\tha : has_dist.dist (f a) (g a) ≤ δ,\\tf_bound : ∀ (t : ℝ), t ∈ set.Ico a b → has_dist.dist (v t (f t)) (v t (f t)) ≤ 0,\\tg_bound : ∀ (t : ℝ), t ∈ set.Ico a b → has_dist.dist (v t (g t)) (v t (g t)) ≤ 0\\t⊢ ∀ (t : ℝ), t ∈ set.Icc a b → has_dist.dist (f t) (g t) ≤ δ * real.exp (K * (t - a))\\n \",\n", + " \"<|startoftext|> E : Type u_1,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tv : ℝ → E → E,\\ts : ℝ → set E,\\tK : ℝ,\\thv : ∀ (t : ℝ) (x y : E), x ∈ s t → y ∈ s t → has_dist.dist (v t x) (v t y) ≤ K * has_dist.dist x y,\\tf g : ℝ → E,\\ta b δ : ℝ,\\thf : continuous_on f (set.Icc a b),\\thf' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at f (v t (f t)) (set.Ici t) t,\\thfs : ∀ (t : ℝ), t ∈ set.Ico a b → f t ∈ s t,\\thg : continuous_on g (set.Icc a b),\\thg' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at g (v t (g t)) (set.Ici t) t,\\thgs : ∀ (t : ℝ), t ∈ set.Ico a b → g t ∈ s t,\\tha : has_dist.dist (f a) (g a) ≤ δ,\\tf_bound : ∀ (t : ℝ), t ∈ set.Ico a b → has_dist.dist (v t (f t)) (v t (f t)) ≤ 0,\\tg_bound : ∀ (t : ℝ), t ∈ set.Ico a b → has_dist.dist (v t (g t)) (v t (g t)) ≤ 0,\\tt : ℝ,\\tht : t ∈ set.Icc a b\\t⊢ has_dist.dist (f t) (g t) ≤ δ * real.exp (K * (t - a))\\n \",\n", + " \"<|startoftext|> E : Type u_1,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tv : ℝ → E → E,\\ts : ℝ → set E,\\tK : ℝ,\\thv : ∀ (t : ℝ) (x y : E), x ∈ s t → y ∈ s t → has_dist.dist (v t x) (v t y) ≤ K * has_dist.dist x y,\\tf g : ℝ → E,\\ta b δ : ℝ,\\thf : continuous_on f (set.Icc a b),\\thf' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at f (v t (f t)) (set.Ici t) t,\\thfs : ∀ (t : ℝ), t ∈ set.Ico a b → f t ∈ s t,\\thg : continuous_on g (set.Icc a b),\\thg' : ∀ (t : ℝ), t ∈ set.Ico a b → has_deriv_within_at g (v t (g t)) (set.Ici t) t,\\thgs : ∀ (t : ℝ), t ∈ set.Ico a b → g t ∈ s t,\\tha : has_dist.dist (f a) (g a) ≤ δ,\\tf_bound : ∀ (t : ℝ), t ∈ set.Ico a b → has_dist.dist (v t (f t)) (v t (f t)) ≤ 0,\\tg_bound : ∀ (t : ℝ), t ∈ set.Ico a b → has_dist.dist (v t (g t)) (v t (g t)) ≤ 0,\\tt : ℝ,\\tht : t ∈ set.Icc a b,\\tthis : has_dist.dist (f t) (g t) ≤ gronwall_bound δ K (0 + 0) (t - a)\\t⊢ has_dist.dist (f t) (g t) ≤ δ * real.exp (K * (t - a))\\n \",\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tp : formal_multilinear_series 𝕜 E F,\\tr : ℝ≥0,\\th : ↑r < p.radius\\t⊢ summable (λ (n : ℕ), ∥p n∥ * ↑r ^ n)\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tp : formal_multilinear_series 𝕜 E F,\\tr : ℝ≥0,\\th : ↑r < p.radius,\\ta : ℝ,\\tha : a ∈ set.Ioo 0 1,\\tC : ℝ,\\thp : ∀ (n : ℕ), ∥p n∥ * ↑r ^ n ≤ C * a ^ n,\\thC : 0 < C\\t⊢ summable (λ (n : ℕ), ∥p n∥ * ↑r ^ n)\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\t_inst_8 : complete_space F,\\tf : E → F\\t⊢ is_open {x : E | analytic_at 𝕜 f x}\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\t_inst_8 : complete_space F,\\tf : E → F\\t⊢ ∀ (a : E), a ∈ {x : E | analytic_at 𝕜 f x} → {x : E | analytic_at 𝕜 f x} ∈ 𝓝 a\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\t_inst_8 : complete_space F,\\tf : E → F,\\tx : E,\\tp : formal_multilinear_series 𝕜 E F,\\tr : ℝ≥0∞,\\thr : has_fpower_series_on_ball f p x r\\t⊢ {x : E | analytic_at 𝕜 f x} ∈ 𝓝 x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : normed_field β,\\tu v : α → β,\\tl : filter α,\\thz : ∀ᶠ (x : α) in l, v x ≠ 0\\t⊢ u ~[l] v ↔ filter.tendsto (u / v) l (𝓝 1)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : normed_field β,\\tu v : α → β,\\tl : filter α,\\thz : ∀ᶠ (x : α) in l, v x ≠ 0\\t⊢ u ~[l] v → filter.tendsto (u / v) l (𝓝 1)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : normed_field β,\\tu v : α → β,\\tl : filter α,\\thz : ∀ᶠ (x : α) in l, v x ≠ 0,\\thequiv : u ~[l] v\\t⊢ filter.tendsto (u / v) l (𝓝 1)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : normed_field β,\\tu v : α → β,\\tl : filter α,\\thz : ∀ᶠ (x : α) in l, v x ≠ 0,\\thequiv : u ~[l] v,\\tthis : filter.tendsto (λ (x : α), (u - v) x / v x) l (𝓝 0)\\t⊢ filter.tendsto (u / v) l (𝓝 1)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : normed_field β,\\tu v : α → β,\\tl : filter α,\\thz : ∀ᶠ (x : α) in l, v x ≠ 0,\\thequiv : u ~[l] v,\\tthis : filter.tendsto (λ (x : α), u x / v x - v x / v x) l (𝓝 0)\\t⊢ filter.tendsto (u / v) l (𝓝 1)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : normed_field β,\\tu v : α → β,\\tl : filter α,\\thz : ∀ᶠ (x : α) in l, v x ≠ 0,\\thequiv : u ~[l] v,\\tthis : filter.tendsto (λ (x : α), u x / v x - v x / v x) l (𝓝 0)\\t⊢ filter.tendsto (λ (x : α), v x / v x) l (𝓝 1)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : normed_field β,\\tu v : α → β,\\tl : filter α,\\thz : ∀ᶠ (x : α) in l, v x ≠ 0,\\thequiv : u ~[l] v,\\tthis : filter.tendsto (λ (x : α), u x / v x - v x / v x) l (𝓝 0)\\t⊢ filter.tendsto (λ (x : α), v x / v x) l (𝓝 1)\\t\\tα : Type u_1,\\tβ : Type u_2,\\t_inst_1 : normed_field β,\\tu v : α → β,\\tl : filter α,\\thz : ∀ᶠ (x : α) in l, v x ≠ 0,\\thequiv : u ~[l] v,\\tthis : filter.tendsto (λ (x : α), u x / v x - v x / v x) l (𝓝 0),\\tkey : filter.tendsto (λ (x : α), v x / v x) l (𝓝 1)\\t⊢ filter.tendsto (u / v) l (𝓝 1)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : normed_field β,\\tu v : α → β,\\tl : filter α,\\thz : ∀ᶠ (x : α) in l, v x ≠ 0,\\thequiv : u ~[l] v,\\tthis : filter.tendsto (λ (x : α), u x / v x - v x / v x) l (𝓝 0),\\tkey : filter.tendsto (λ (x : α), v x / v x) l (𝓝 1)\\t⊢ filter.tendsto (u / v) l (𝓝 1)\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : normed_field β,\\tu v : α → β,\\tl : filter α,\\thz : ∀ᶠ (x : α) in l, v x ≠ 0,\\thequiv : u ~[l] v,\\tthis : filter.tendsto (λ (x : α), u x / v x - v x / v x) l (𝓝 0),\\tkey : filter.tendsto (λ (x : α), v x / v x) l (𝓝 1),\\tx : α\\t⊢ (u / v) x = u x / v x - v x / v x + v x / v x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : normed_field β,\\tu v : α → β,\\tl : filter α,\\thz : ∀ᶠ (x : α) in l, v x ≠ 0,\\thequiv : u ~[l] v,\\tthis : filter.tendsto (λ (x : α), u x / v x - v x / v x) l (𝓝 0),\\tkey : filter.tendsto (λ (x : α), v x / v x) l (𝓝 1)\\t⊢ u / v = λ (x : α), u x / v x - v x / v x + v x / v x\\t\\tα : Type u_1,\\tβ : Type u_2,\\t_inst_1 : normed_field β,\\tu v : α → β,\\tl : filter α,\\thz : ∀ᶠ (x : α) in l, v x ≠ 0,\\thequiv : u ~[l] v,\\tthis : filter.tendsto (λ (x : α), u x / v x - v x / v x) l (𝓝 0),\\tkey : filter.tendsto (λ (x : α), v x / v x) l (𝓝 1)\\t⊢ 1 = 0 + 1\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : normed_field β,\\tu v : α → β,\\tl : filter α,\\thz : ∀ᶠ (x : α) in l, v x ≠ 0,\\thequiv : u ~[l] v,\\tthis : filter.tendsto (λ (x : α), u x / v x - v x / v x) l (𝓝 0),\\tkey : filter.tendsto (λ (x : α), v x / v x) l (𝓝 1)\\t⊢ u / v = λ (x : α), u x / v x - v x / v x + v x / v x\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : normed_field β,\\tu v : α → β,\\tl : filter α,\\thz : ∀ᶠ (x : α) in l, v x ≠ 0\\t⊢ u ~[l] v → filter.tendsto (u / v) l (𝓝 1)\\t\\tα : Type u_1,\\tβ : Type u_2,\\t_inst_1 : normed_field β,\\tu v : α → β,\\tl : filter α,\\thz : ∀ᶠ (x : α) in l, v x ≠ 0\\t⊢ filter.tendsto (u / v) l (𝓝 1) → u ~[l] v\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : normed_field β,\\tu v : α → β,\\tl : filter α,\\thz : ∀ᶠ (x : α) in l, v x ≠ 0,\\thequiv : u ~[l] v,\\tthis : filter.tendsto (λ (x : α), u x / v x - v x / v x) l (𝓝 0),\\tkey : filter.tendsto (λ (x : α), v x / v x) l (𝓝 1)\\t⊢ 1 = 0 + 1\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : normed_field β,\\tu v : α → β,\\tl : filter α,\\thz : ∀ᶠ (x : α) in l, v x ≠ 0\\t⊢ filter.tendsto (u / v) l (𝓝 1) → u ~[l] v\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tE : Type u_3,\\tF : Type u_4,\\t_inst_1 : has_norm E,\\t_inst_2 : has_norm F,\\tc : ℝ,\\tf : α → E,\\tg : α → F,\\tl : filter α,\\th : asymptotics.is_O_with c f g l\\t⊢ asymptotics.is_O f g l\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tE : Type u_3,\\tF : Type u_4,\\t_inst_1 : has_norm E,\\t_inst_2 : has_norm F,\\tc : ℝ,\\tf : α → E,\\tg : α → F,\\tl : filter α,\\th : asymptotics.is_O_with c f g l\\t⊢ asymptotics.is_O f g l\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tE : Type u_3,\\tF : Type u_4,\\t_inst_1 : has_norm E,\\t_inst_2 : has_norm F,\\tc : ℝ,\\tf : α → E,\\tg : α → F,\\tl : filter α,\\th : asymptotics.is_O_with c f g l\\t⊢ ∃ (c : ℝ), asymptotics.is_O_with c f g l\\n ',\n", + " \"<|startoftext|> α : Type u_1,\\tE' : Type u_6,\\tF' : Type u_7,\\t_inst_4 : normed_group E',\\t_inst_5 : normed_group F',\\tf' : α → E',\\tg' : α → F',\\th : ∀ (x : α), g' x = 0 → f' x = 0,\\th' : asymptotics.is_O f' g' filter.cofinite,\\t_let_match : (∃ (C : ℝ) (H : C > 0), ∀ ⦃x : α⦄, g' x ≠ 0 → ∥f' x∥ ≤ C * ∥g' x∥) → (∃ (C : ℝ), ∀ (x : α), ∥f' x∥ ≤ C * ∥g' x∥),\\tC : ℝ,\\tC₀ : C > 0,\\thC : ∀ ⦃x : α⦄, g' x ≠ 0 → ∥f' x∥ ≤ C * ∥g' x∥,\\tx : α,\\thx : g' x = 0\\t⊢ ∥f' x∥ ≤ C * ∥g' x∥\\n \",\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : topological_space α,\\t_inst_2 : topological_space β,\\tE : Type u_3,\\t_inst_3 : has_norm E,\\tF : Type u_4,\\t_inst_4 : has_norm F,\\te : local_homeomorph α β,\\tb : β,\\thb : b ∈ e.to_local_equiv.target,\\tf : β → E,\\tg : β → F\\t⊢ (∃ (c : ℝ), asymptotics.is_O_with c f g (𝓝 b)) ↔ ∃ (c : ℝ), asymptotics.is_O_with c (f ∘ ⇑e) (g ∘ ⇑e) (𝓝 (⇑(e.symm) b))\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : topological_space α,\\t_inst_2 : topological_space β,\\tE : Type u_3,\\t_inst_3 : has_norm E,\\tF : Type u_4,\\t_inst_4 : has_norm F,\\te : local_homeomorph α β,\\tb : β,\\thb : b ∈ e.to_local_equiv.target,\\tf : β → E,\\tg : β → F\\t⊢ asymptotics.is_O f g (𝓝 b) ↔ asymptotics.is_O (f ∘ ⇑e) (g ∘ ⇑e) (𝓝 (⇑(e.symm) b))\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : topological_space α,\\t_inst_2 : topological_space β,\\tE : Type u_3,\\t_inst_3 : has_norm E,\\tF : Type u_4,\\t_inst_4 : has_norm F,\\te : α ≃ₜ β,\\tb : β,\\tf : β → E,\\tg : β → F\\t⊢ (∃ (c : ℝ), asymptotics.is_O_with c f g (𝓝 b)) ↔ ∃ (c : ℝ), asymptotics.is_O_with c (f ∘ ⇑e) (g ∘ ⇑e) (𝓝 (⇑(e.symm) b))\\n ',\n", + " '<|startoftext|> α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : topological_space α,\\t_inst_2 : topological_space β,\\tE : Type u_3,\\t_inst_3 : has_norm E,\\tF : Type u_4,\\t_inst_4 : has_norm F,\\te : α ≃ₜ β,\\tb : β,\\tf : β → E,\\tg : β → F\\t⊢ asymptotics.is_O f g (𝓝 b) ↔ asymptotics.is_O (f ∘ ⇑e) (g ∘ ⇑e) (𝓝 (⇑(e.symm) b))\\n ',\n", + " \"<|startoftext|> α : Type u_1,\\tE' : Type u_6,\\tF' : Type u_7,\\t_inst_4 : normed_group E',\\t_inst_5 : normed_group F',\\tf' : α → E',\\tg' : α → F',\\tl : filter α,\\t_inst_11 : subsingleton E',\\tc : ℝ,\\thc : 0 < c\\t⊢ ∀ᶠ (x : α) in l, ∥f' x∥ ≤ c * ∥g' x∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\tF : Type u_4,\\tE' : Type u_6,\\t_inst_2 : has_norm F,\\t_inst_4 : normed_group E',\\tc : ℝ,\\tg : α → F,\\tf' : α → E',\\tl : filter α\\t⊢ asymptotics.is_O_with c (λ (x : α), -f' x) g l ↔ asymptotics.is_O_with c f' g l\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\tF : Type u_4,\\tE' : Type u_6,\\t_inst_2 : has_norm F,\\t_inst_4 : normed_group E',\\tg : α → F,\\tl : filter α,\\tf₁ f₂ : α → E',\\th₁ : asymptotics.is_o f₁ g l,\\th₂ : asymptotics.is_o f₂ g l\\t⊢ asymptotics.is_o (λ (x : α), f₁ x - f₂ x) g l\\n \",\n", + " '<|startoftext|> α : Type u_1,\\tE : Type u_3,\\tF : Type u_4,\\t_inst_1 : has_norm E,\\t_inst_2 : has_norm F,\\tc : ℝ,\\tf : α → E,\\tg : α → F,\\tl : filter α\\t⊢ asymptotics.is_O_with c f g l ↔ ∀ᶠ (x : α) in l, ∥f x∥ ≤ c * ∥g x∥\\n ',\n", + " \"<|startoftext|> 𝕜 : Type u,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tx : 𝕜,\\ts : set 𝕜,\\t𝔸 : Type u_2,\\t_inst_7 : normed_ring 𝔸,\\t_inst_9 : normed_algebra 𝕜 𝔸,\\tc d : 𝕜 → 𝔸,\\tc' d' : 𝔸,\\thc : has_deriv_within_at c c' s x,\\thd : has_deriv_within_at d d' s x\\t⊢ has_deriv_within_at (λ (y : 𝕜), c y * d y) (c' * d x + c x * d') s x\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tx : 𝕜,\\ts : set 𝕜,\\t𝔸 : Type u_2,\\t_inst_7 : normed_ring 𝔸,\\t_inst_9 : normed_algebra 𝕜 𝔸,\\tc d : 𝕜 → 𝔸,\\tc' d' : 𝔸,\\thc : has_deriv_within_at c c' s x,\\thd : has_deriv_within_at d d' s x,\\tthis : has_deriv_within_at (λ (y : 𝕜), c y * d y) (⇑(c x • 1.smul_right d' + (1.smul_right c').smul_right (d x)) 1) s x\\t⊢ has_deriv_within_at (λ (y : 𝕜), c y * d y) (c' * d x + c x * d') s x\\n \",\n", + " '<|startoftext|> 𝕜 : Type u,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tx : 𝕜,\\tc : 𝕜 → 𝕜,\\td : 𝕜\\t⊢ deriv (λ (x : 𝕜), c x / d) x = deriv c x / d\\n ',\n", + " '<|startoftext|> 𝕜 : Type u,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tx : 𝕜,\\ts : set 𝕜,\\tp : polynomial 𝕜,\\thxs : unique_diff_within_at 𝕜 s x\\t⊢ deriv_within (λ (x : 𝕜), polynomial.eval x p) s x = polynomial.eval x (⇑polynomial.derivative p)\\n ',\n", + " '<|startoftext|> 𝕜 : Type u,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tx : 𝕜,\\ts : set 𝕜,\\tp : polynomial 𝕜,\\thxs : unique_diff_within_at 𝕜 s x\\t⊢ deriv (λ (x : 𝕜), polynomial.eval x p) x = polynomial.eval x (⇑polynomial.derivative p)\\n ',\n", + " \"<|startoftext|> 𝕜 : Type u,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tF : Type v,\\t_inst_2 : normed_group F,\\t_inst_3 : normed_space 𝕜 F,\\tf : 𝕜 → F,\\tf' : F,\\tx : 𝕜,\\t_inst_6 : partial_order 𝕜\\t⊢ has_deriv_within_at f f' (set.Ioi x) x ↔ has_deriv_within_at f f' (set.Ici x) x\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tF : Type v,\\t_inst_2 : normed_group F,\\t_inst_3 : normed_space 𝕜 F,\\tf : 𝕜 → F,\\tf' : F,\\tx : 𝕜,\\ts t : set 𝕜,\\ths : has_deriv_within_at f f' s x,\\tht : has_deriv_within_at f f' t x\\t⊢ has_deriv_within_at f f' (s ∪ t) x\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tF : Type v,\\t_inst_2 : normed_group F,\\t_inst_3 : normed_space 𝕜 F,\\tf : 𝕜 → F,\\tf' : F,\\tx : 𝕜,\\ts t : set 𝕜,\\ths : has_deriv_within_at f f' s x,\\tht : has_deriv_within_at f f' t x\\t⊢ has_deriv_at_filter f f' x (𝓝[s] x ⊔ 𝓝[t] x)\\n \",\n", + " '<|startoftext|> 𝕜 : Type u,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tF : Type v,\\t_inst_2 : normed_group F,\\t_inst_3 : normed_space 𝕜 F,\\tf : 𝕜 → F,\\tx : 𝕜\\t⊢ 1.smul_right (deriv f x) = fderiv 𝕜 f x\\n ',\n", + " \"<|startoftext|> 𝕜 : Type u,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tF : Type v,\\t_inst_2 : normed_group F,\\t_inst_3 : normed_space 𝕜 F,\\tf : 𝕜 → F,\\tf' : F,\\tx : 𝕜,\\tc : 𝕜 → 𝕜,\\tc' : 𝕜,\\thc : has_strict_deriv_at c c' x,\\thf : has_strict_deriv_at f f' x\\t⊢ has_strict_deriv_at (λ (y : 𝕜), c y • f y) (c x • f' + c' • f x) x\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tF : Type v,\\t_inst_2 : normed_group F,\\t_inst_3 : normed_space 𝕜 F,\\tf : 𝕜 → F,\\tf' : F,\\tx : 𝕜,\\tL : filter 𝕜,\\th : has_deriv_at_filter f f' x L\\t⊢ has_deriv_at_filter (λ (x : 𝕜), -f x) (-f') x L\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tF : Type v,\\t_inst_2 : normed_group F,\\t_inst_3 : normed_space 𝕜 F,\\tf g : 𝕜 → F,\\tf' g' : F,\\tx : 𝕜,\\tL : filter 𝕜,\\thf : has_deriv_at_filter f f' x L,\\thg : has_deriv_at_filter g g' x L\\t⊢ has_deriv_at_filter (λ (x : 𝕜), f x - g x) (f' - g') x L\\n \",\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tx : E,\\ts : set E,\\thxs : unique_diff_within_at 𝕜 s x,\\tc : F,\\thf : ¬differentiable_within_at 𝕜 f s x\\t⊢ fderiv_within 𝕜 (λ (y : E), f y + c) s x = fderiv_within 𝕜 f s x\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tx : E,\\ts : set E,\\thxs : unique_diff_within_at 𝕜 s x,\\tc : F,\\thf : ¬differentiable_within_at 𝕜 f s x\\t⊢ ¬differentiable_within_at 𝕜 (λ (y : E), f y + c) s x\\n ',\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v)\\t⊢ filter.tendsto (λ (n : α), c n • (f (x + d n) - f x)) l (𝓝 (⇑f' v))\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v)\\t⊢ 𝓝[s] x = ?m_1\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v)\\t⊢ filter.tendsto (λ (n : α), x + d n) l (𝓝[s] x)\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v)\\t⊢ filter.tendsto (λ (n : α), x + d n) l (𝓝[s] (x + 0))\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v)\\t⊢ filter.tendsto (λ (n : α), x + d n) l (𝓝 (x + 0)) ∧ filter.tendsto (λ (n : α), x + d n) l (𝓟 s)\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v)\\t⊢ filter.tendsto (λ (n : α), x + d n) l (𝓝 (x + 0))\\t\\t𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v)\\t⊢ filter.tendsto (λ (n : α), x + d n) l (𝓟 s)\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v)\\t⊢ filter.tendsto (λ (n : α), x + d n) l (𝓝 (x + 0))\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v)\\t⊢ filter.tendsto (λ (n : α), x + d n) l (𝓝[s] x)\\t\\t𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v),\\ttendsto_arg : filter.tendsto (λ (n : α), x + d n) l (𝓝[s] x)\\t⊢ filter.tendsto (λ (n : α), c n • (f (x + d n) - f x)) l (𝓝 (⇑f' v))\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v)\\t⊢ filter.tendsto (λ (n : α), x + d n) l (𝓟 s)\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v),\\ttendsto_arg : filter.tendsto (λ (n : α), x + d n) l (𝓝[s] x)\\t⊢ filter.tendsto (λ (n : α), c n • (f (x + d n) - f x)) l (𝓝 (⇑f' v))\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v),\\ttendsto_arg : filter.tendsto (λ (n : α), x + d n) l (𝓝[s] x),\\tthis : asymptotics.is_o (λ (y : E), f y - f x - ⇑f' (y - x)) (λ (y : E), y - x) (𝓝[s] x)\\t⊢ filter.tendsto (λ (n : α), c n • (f (x + d n) - f x)) l (𝓝 (⇑f' v))\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v),\\ttendsto_arg : filter.tendsto (λ (n : α), x + d n) l (𝓝[s] x),\\tthis : asymptotics.is_o (λ (y : E), f y - f x - ⇑f' (y - x)) (λ (y : E), y - x) (𝓝[s] x),\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (x + d n - x)) (λ (n : α), x + d n - x) l\\t⊢ filter.tendsto (λ (n : α), c n • (f (x + d n) - f x)) l (𝓝 (⇑f' v))\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v),\\ttendsto_arg : filter.tendsto (λ (n : α), x + d n) l (𝓝[s] x),\\tthis : asymptotics.is_o (λ (y : E), f y - f x - ⇑f' (y - x)) (λ (y : E), y - x) (𝓝[s] x),\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (x + d n - x)) (λ (n : α), x + d n - x) l\\t⊢ asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (d n)) d l\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v),\\ttendsto_arg : filter.tendsto (λ (n : α), x + d n) l (𝓝[s] x),\\tthis : asymptotics.is_o (λ (y : E), f y - f x - ⇑f' (y - x)) (λ (y : E), y - x) (𝓝[s] x),\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (x + d n - x)) (λ (n : α), x + d n - x) l,\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (d n)) d l\\t⊢ filter.tendsto (λ (n : α), c n • (f (x + d n) - f x)) l (𝓝 (⇑f' v))\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v),\\ttendsto_arg : filter.tendsto (λ (n : α), x + d n) l (𝓝[s] x),\\tthis : asymptotics.is_o (λ (y : E), f y - f x - ⇑f' (y - x)) (λ (y : E), y - x) (𝓝[s] x),\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (x + d n - x)) (λ (n : α), x + d n - x) l,\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (d n)) d l,\\tthis : asymptotics.is_o (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) (λ (n : α), c n • d n) l\\t⊢ filter.tendsto (λ (n : α), c n • (f (x + d n) - f x)) l (𝓝 (⇑f' v))\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v),\\ttendsto_arg : filter.tendsto (λ (n : α), x + d n) l (𝓝[s] x),\\tthis : asymptotics.is_o (λ (y : E), f y - f x - ⇑f' (y - x)) (λ (y : E), y - x) (𝓝[s] x),\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (x + d n - x)) (λ (n : α), x + d n - x) l,\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (d n)) d l,\\tthis : asymptotics.is_o (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) (λ (n : α), c n • d n) l,\\tthis : asymptotics.is_o (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) (λ (n : α), 1) l\\t⊢ filter.tendsto (λ (n : α), c n • (f (x + d n) - f x)) l (𝓝 (⇑f' v))\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v),\\ttendsto_arg : filter.tendsto (λ (n : α), x + d n) l (𝓝[s] x),\\tthis : asymptotics.is_o (λ (y : E), f y - f x - ⇑f' (y - x)) (λ (y : E), y - x) (𝓝[s] x),\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (x + d n - x)) (λ (n : α), x + d n - x) l,\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (d n)) d l,\\tthis : asymptotics.is_o (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) (λ (n : α), c n • d n) l,\\tthis : asymptotics.is_o (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) (λ (n : α), 1) l,\\tL1 : filter.tendsto (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) l (𝓝 0)\\t⊢ filter.tendsto (λ (n : α), c n • (f (x + d n) - f x)) l (𝓝 (⇑f' v))\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v),\\ttendsto_arg : filter.tendsto (λ (n : α), x + d n) l (𝓝[s] x),\\tthis : asymptotics.is_o (λ (y : E), f y - f x - ⇑f' (y - x)) (λ (y : E), y - x) (𝓝[s] x),\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (x + d n - x)) (λ (n : α), x + d n - x) l,\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (d n)) d l,\\tthis : asymptotics.is_o (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) (λ (n : α), c n • d n) l,\\tthis : asymptotics.is_o (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) (λ (n : α), 1) l,\\tL1 : filter.tendsto (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) l (𝓝 0),\\tL2 : filter.tendsto (λ (n : α), ⇑f' (c n • d n)) l (𝓝 (⇑f' v))\\t⊢ filter.tendsto (λ (n : α), c n • (f (x + d n) - f x)) l (𝓝 (⇑f' v))\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v),\\ttendsto_arg : filter.tendsto (λ (n : α), x + d n) l (𝓝[s] x),\\tthis : asymptotics.is_o (λ (y : E), f y - f x - ⇑f' (y - x)) (λ (y : E), y - x) (𝓝[s] x),\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (x + d n - x)) (λ (n : α), x + d n - x) l,\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (d n)) d l,\\tthis : asymptotics.is_o (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) (λ (n : α), c n • d n) l,\\tthis : asymptotics.is_o (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) (λ (n : α), 1) l,\\tL1 : filter.tendsto (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) l (𝓝 0),\\tL2 : filter.tendsto (λ (n : α), ⇑f' (c n • d n)) l (𝓝 (⇑f' v)),\\tL3 : filter.tendsto (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n)) + ⇑f' (c n • d n)) l (𝓝 (0 + ⇑f' v))\\t⊢ filter.tendsto (λ (n : α), c n • (f (x + d n) - f x)) l (𝓝 (⇑f' v))\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v),\\ttendsto_arg : filter.tendsto (λ (n : α), x + d n) l (𝓝[s] x),\\tthis : asymptotics.is_o (λ (y : E), f y - f x - ⇑f' (y - x)) (λ (y : E), y - x) (𝓝[s] x),\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (x + d n - x)) (λ (n : α), x + d n - x) l,\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (d n)) d l,\\tthis : asymptotics.is_o (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) (λ (n : α), c n • d n) l,\\tthis : asymptotics.is_o (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) (λ (n : α), 1) l,\\tL1 : filter.tendsto (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) l (𝓝 0),\\tL2 : filter.tendsto (λ (n : α), ⇑f' (c n • d n)) l (𝓝 (⇑f' v)),\\tL3 : filter.tendsto (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n)) + ⇑f' (c n • d n)) l (𝓝 (0 + ⇑f' v))\\t⊢ (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n)) + ⇑f' (c n • d n)) = λ (n : α), c n • (f (x + d n) - f x)\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v),\\ttendsto_arg : filter.tendsto (λ (n : α), x + d n) l (𝓝[s] x),\\tthis : asymptotics.is_o (λ (y : E), f y - f x - ⇑f' (y - x)) (λ (y : E), y - x) (𝓝[s] x),\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (x + d n - x)) (λ (n : α), x + d n - x) l,\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (d n)) d l,\\tthis : asymptotics.is_o (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) (λ (n : α), c n • d n) l,\\tthis : asymptotics.is_o (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) (λ (n : α), 1) l,\\tL1 : filter.tendsto (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) l (𝓝 0),\\tL2 : filter.tendsto (λ (n : α), ⇑f' (c n • d n)) l (𝓝 (⇑f' v)),\\tL3 : filter.tendsto (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n)) + ⇑f' (c n • d n)) l (𝓝 (0 + ⇑f' v)),\\tn : α\\t⊢ c n • (f (x + d n) - f x - ⇑f' (d n)) + ⇑f' (c n • d n) = c n • (f (x + d n) - f x)\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts : set E,\\th : has_fderiv_within_at f f' s x,\\tα : Type u_4,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\tv : E,\\tdtop : ∀ᶠ (n : α) in l, x + d n ∈ s,\\tclim : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\tcdlim : filter.tendsto (λ (n : α), c n • d n) l (𝓝 v),\\ttendsto_arg : filter.tendsto (λ (n : α), x + d n) l (𝓝[s] x),\\tthis : asymptotics.is_o (λ (y : E), f y - f x - ⇑f' (y - x)) (λ (y : E), y - x) (𝓝[s] x),\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (x + d n - x)) (λ (n : α), x + d n - x) l,\\tthis : asymptotics.is_o (λ (n : α), f (x + d n) - f x - ⇑f' (d n)) d l,\\tthis : asymptotics.is_o (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) (λ (n : α), c n • d n) l,\\tthis : asymptotics.is_o (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) (λ (n : α), 1) l,\\tL1 : filter.tendsto (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n))) l (𝓝 0),\\tL2 : filter.tendsto (λ (n : α), ⇑f' (c n • d n)) l (𝓝 (⇑f' v)),\\tL3 : filter.tendsto (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n)) + ⇑f' (c n • d n)) l (𝓝 (0 + ⇑f' v)),\\tthis : (λ (n : α), c n • (f (x + d n) - f x - ⇑f' (d n)) + ⇑f' (c n • d n)) = λ (n : α), c n • (f (x + d n) - f x)\\t⊢ filter.tendsto (λ (n : α), c n • (f (x + d n) - f x)) l (𝓝 (⇑f' v))\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tG : Type u_4,\\t_inst_6 : normed_group G,\\t_inst_7 : normed_space 𝕜 G,\\tiso : E ≃L[𝕜] F,\\tf : G → E,\\tx : G,\\tf' : G →L[𝕜] E\\t⊢ has_strict_fderiv_at (⇑iso ∘ f) (↑iso.comp f') x ↔ has_strict_fderiv_at f f' x\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tG : Type u_4,\\t_inst_6 : normed_group G,\\t_inst_7 : normed_space 𝕜 G,\\tiso : E ≃L[𝕜] F,\\tf : G → E,\\tx : G,\\tf' : G →L[𝕜] E,\\tH : has_strict_fderiv_at (⇑iso ∘ f) (↑iso.comp f') x\\t⊢ has_strict_fderiv_at f f' x\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tG : Type u_4,\\t_inst_6 : normed_group G,\\t_inst_7 : normed_space 𝕜 G,\\tiso : E ≃L[𝕜] F,\\tf : G → E,\\tx : G,\\tf' : G →L[𝕜] E,\\tH : has_strict_fderiv_at (⇑iso ∘ f) (↑iso.comp f') x\\t⊢ has_strict_fderiv_at f f' x\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tG : Type u_4,\\t_inst_6 : normed_group G,\\t_inst_7 : normed_space 𝕜 G,\\tiso : E ≃L[𝕜] F,\\tf : G → E,\\tx : G,\\tf' : G →L[𝕜] E,\\tH : has_strict_fderiv_at (⇑iso ∘ f) (↑iso.comp f') x\\t⊢ has_strict_fderiv_at f f' x\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tG : Type u_4,\\t_inst_6 : normed_group G,\\t_inst_7 : normed_space 𝕜 G,\\tiso : E ≃L[𝕜] F,\\tf : G → E,\\tx : G,\\tf' : G →L[𝕜] E,\\tH : has_strict_fderiv_at (⇑iso ∘ f) (↑iso.comp f') x\\t⊢ f = λ (x : G), ⇑(iso.symm) ((⇑iso ∘ f) x)\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tG : Type u_4,\\t_inst_6 : normed_group G,\\t_inst_7 : normed_space 𝕜 G,\\tiso : E ≃L[𝕜] F,\\tf : G → E,\\tx : G,\\tf' : G →L[𝕜] E,\\tH : has_strict_fderiv_at (⇑iso ∘ f) (↑iso.comp f') x\\t⊢ f' = ↑(iso.symm).comp (↑iso.comp f')\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tG : Type u_4,\\t_inst_6 : normed_group G,\\t_inst_7 : normed_space 𝕜 G,\\tiso : E ≃L[𝕜] F,\\tf : G → E,\\tx : G,\\tf' : G →L[𝕜] E,\\tH : has_strict_fderiv_at (⇑iso ∘ f) (↑iso.comp f') x,\\tz : G\\t⊢ f z = ⇑(iso.symm) ((⇑iso ∘ f) z)\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tG : Type u_4,\\t_inst_6 : normed_group G,\\t_inst_7 : normed_space 𝕜 G,\\tiso : E ≃L[𝕜] F,\\tf : G → E,\\tx : G,\\tf' : G →L[𝕜] E,\\tH : has_strict_fderiv_at (⇑iso ∘ f) (↑iso.comp f') x,\\tz : G\\t⊢ ⇑f' z = ⇑(↑(iso.symm).comp (↑iso.comp f')) z\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\tL : filter E,\\tx' : E,\\thx' : ∥x' - x∥ = 0\\t⊢ ∥f x - f x - ⇑f' (x - x)∥ = 0\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\tL : filter E,\\tx' : E,\\thx' : ∥x' - x∥ = 0\\t⊢ ∥f x' - f x - ⇑f' (x' - x)∥ = 0\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\tL : filter E,\\th : ∀ (x' : E), ∥x' - x∥ = 0 → ∥f x' - f x - ⇑f' (x' - x)∥ = 0\\t⊢ has_fderiv_at_filter f f' x L ↔ filter.tendsto (λ (x' : E), ∥x' - x∥⁻¹ * ∥f x' - f x - ⇑f' (x' - x)∥) L (𝓝 0)\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\tL : filter E,\\th : ∀ (x' : E), ∥x' - x∥ = 0 → ∥f x' - f x - ⇑f' (x' - x)∥ = 0\\t⊢ asymptotics.is_o (λ (x' : E), f x' - f x - ⇑f' (x' - x)) (λ (x' : E), x' - x) L ↔ filter.tendsto (λ (x' : E), ∥x' - x∥⁻¹ * ∥f x' - f x - ⇑f' (x' - x)∥) L (𝓝 0)\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\tL : filter E,\\th : ∀ (x' : E), ∥x' - x∥ = 0 → ∥f x' - f x - ⇑f' (x' - x)∥ = 0\\t⊢ filter.tendsto (λ (x_1 : E), (λ (x' : E), ∥f x' - f x - ⇑f' (x' - x)∥) x_1 / (λ (x' : E), ∥x' - x∥) x_1) L (𝓝 0) ↔ filter.tendsto (λ (x' : E), ∥x' - x∥⁻¹ * ∥f x' - f x - ⇑f' (x' - x)∥) L (𝓝 0)\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx₀ : E,\\thf : has_fderiv_at f f' x₀,\\ts : set E,\\ths : s ∈ 𝓝 x₀,\\tC : ℝ≥0,\\thlip : lipschitz_on_with C f s\\t⊢ ∥f'∥ ≤ ↑C\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx₀ : E,\\thf : has_fderiv_at f f' x₀,\\ts : set E,\\ths : s ∈ 𝓝 x₀,\\tC : ℝ≥0,\\thlip : lipschitz_on_with C f s,\\tε : ℝ,\\tε0 : 0 < ε\\t⊢ 0 ≤ ↑C + ε\\t\\t𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx₀ : E,\\thf : has_fderiv_at f f' x₀,\\ts : set E,\\ths : s ∈ 𝓝 x₀,\\tC : ℝ≥0,\\thlip : lipschitz_on_with C f s,\\tε : ℝ,\\tε0 : 0 < ε\\t⊢ ∀ᶠ (x : E) in 𝓝 0, ∥⇑f' x∥ ≤ (↑C + ε) * ∥x∥\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx₀ : E,\\thf : has_fderiv_at f f' x₀,\\ts : set E,\\ths : s ∈ 𝓝 x₀,\\tC : ℝ≥0,\\thlip : lipschitz_on_with C f s,\\tε : ℝ,\\tε0 : 0 < ε,\\ths' : s ∈ 𝓝 x₀\\t⊢ ∀ᶠ (x : E) in 𝓝 0, ∥⇑f' x∥ ≤ (↑C + ε) * ∥x∥\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx₀ : E,\\thf : has_fderiv_at f f' x₀,\\ts : set E,\\ths : s ∈ 𝓝 x₀,\\tC : ℝ≥0,\\thlip : lipschitz_on_with C f s,\\tε : ℝ,\\tε0 : 0 < ε\\t⊢ ∀ᶠ (x : E) in 𝓝 0, ∥⇑f' x∥ ≤ (↑C + ε) * ∥x∥\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx₀ : E,\\thf : has_fderiv_at f f' x₀,\\ts : set E,\\ths : s ∈ 𝓝 x₀,\\tC : ℝ≥0,\\thlip : lipschitz_on_with C f s,\\tε : ℝ,\\tε0 : 0 < ε,\\ths' : has_add.add x₀ ⁻¹' s ∈ 𝓝 0\\t⊢ ∀ᶠ (x : E) in 𝓝 0, ∥⇑f' x∥ ≤ (↑C + ε) * ∥x∥\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx₀ : E,\\thf : has_fderiv_at f f' x₀,\\ts : set E,\\ths : s ∈ 𝓝 x₀,\\tC : ℝ≥0,\\thlip : lipschitz_on_with C f s,\\tε : ℝ,\\tε0 : 0 < ε,\\ths' : has_add.add x₀ ⁻¹' s ∈ 𝓝 0\\t⊢ ∀ (a : E), ∥f (x₀ + a) - f x₀ - ⇑f' a∥ ≤ ε * ∥a∥ → a ∈ has_add.add x₀ ⁻¹' s → ∥⇑f' a∥ ≤ (↑C + ε) * ∥a∥\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx₀ : E,\\thf : has_fderiv_at f f' x₀,\\ts : set E,\\ths : s ∈ 𝓝 x₀,\\tC : ℝ≥0,\\thlip : lipschitz_on_with C f s,\\tε : ℝ,\\tε0 : 0 < ε,\\ths' : has_add.add x₀ ⁻¹' s ∈ 𝓝 0,\\ty : E,\\thy : ∥f (x₀ + y) - f x₀ - ⇑f' y∥ ≤ ε * ∥y∥,\\thys : y ∈ has_add.add x₀ ⁻¹' s\\t⊢ ∥⇑f' y∥ ≤ (↑C + ε) * ∥y∥\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx₀ : E,\\thf : has_fderiv_at f f' x₀,\\ts : set E,\\ths : s ∈ 𝓝 x₀,\\tC : ℝ≥0,\\thlip : lipschitz_on_with C f s,\\tε : ℝ,\\tε0 : 0 < ε,\\ths' : has_add.add x₀ ⁻¹' s ∈ 𝓝 0,\\ty : E,\\thy : ∥f (x₀ + y) - f x₀ - ⇑f' y∥ ≤ ε * ∥y∥,\\thys : y ∈ has_add.add x₀ ⁻¹' s,\\tthis : ∥f (x₀ + y) - f x₀∥ ≤ ↑C * ∥x₀ + y - x₀∥\\t⊢ ∥⇑f' y∥ ≤ (↑C + ε) * ∥y∥\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx₀ : E,\\thf : has_fderiv_at f f' x₀,\\ts : set E,\\ths : s ∈ 𝓝 x₀,\\tC : ℝ≥0,\\thlip : lipschitz_on_with C f s,\\tε : ℝ,\\tε0 : 0 < ε,\\ths' : has_add.add x₀ ⁻¹' s ∈ 𝓝 0,\\ty : E,\\thy : ∥f (x₀ + y) - f x₀ - ⇑f' y∥ ≤ ε * ∥y∥,\\thys : y ∈ has_add.add x₀ ⁻¹' s,\\tthis : ∥f (x₀ + y) - f x₀∥ ≤ ↑C * ∥y∥\\t⊢ ∥⇑f' y∥ ≤ (↑C + ε) * ∥y∥\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tf' : E →L[𝕜] F,\\tx : E,\\ts t : set E,\\th : t ∈ 𝓝 x\\t⊢ has_fderiv_within_at f f' (s ∩ t) x ↔ has_fderiv_within_at f f' s x\\n \",\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tr ε : ℝ,\\tL : E →L[𝕜] F,\\tx : E,\\thx : x ∈ fderiv_measurable_aux.A f L r ε,\\ty z : E,\\thy : y ∈ metric.closed_ball x (r / 2),\\thz : z ∈ metric.closed_ball x (r / 2)\\t⊢ ∥f z - f y - ⇑L (z - y)∥ ≤ ε * r\\n ',\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tf : E → F,\\tr ε : ℝ,\\tL : E →L[𝕜] F,\\tx y z : E,\\thy : y ∈ metric.closed_ball x (r / 2),\\thz : z ∈ metric.closed_ball x (r / 2),\\tr' : ℝ,\\tr'mem : r' ∈ set.Ioc (r / 2) r,\\thr' : ∀ (y z : E), y ∈ metric.ball x r' → z ∈ metric.ball x r' → ∥f z - f y - ⇑L (z - y)∥ ≤ ε * r\\t⊢ ∥f z - f y - ⇑L (z - y)∥ ≤ ε * r\\n \",\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tF : Type u_2,\\t_inst_2 : normed_group F,\\t_inst_3 : normed_space 𝕜 F,\\tn : ℕ,\\tf : 𝕜 → F,\\ts : set 𝕜,\\tx : 𝕜\\t⊢ iterated_deriv_within n f s x = (⇑((continuous_multilinear_map.pi_field_equiv 𝕜 (fin n) F).symm) ∘ iterated_fderiv_within 𝕜 n f s) x\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tF : Type u_2,\\t_inst_2 : normed_group F,\\t_inst_3 : normed_space 𝕜 F,\\tn : ℕ,\\tf : 𝕜 → F,\\ts : set 𝕜\\t⊢ iterated_deriv_within n f s = ⇑((continuous_multilinear_map.pi_field_equiv 𝕜 (fin n) F).symm) ∘ iterated_fderiv_within 𝕜 n f s\\n ',\n", + " \"<|startoftext|> E : Type u,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tf : E → ℝ,\\ta : E,\\ts : set E,\\th : is_local_max_on f s a,\\ty : E,\\thy : y ∈ pos_tangent_cone_at s a,\\thy' : -y ∈ pos_tangent_cone_at s a,\\thf : ¬differentiable_within_at ℝ f s a\\t⊢ ⇑(fderiv_within ℝ f s a) y = 0\\n \",\n", + " \"<|startoftext|> E : Type u,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tf : E → ℝ,\\ta : E,\\ts : set E,\\th : is_local_max_on f s a,\\ty : E,\\thy : y ∈ pos_tangent_cone_at s a,\\thy' : -y ∈ pos_tangent_cone_at s a,\\thf : ¬differentiable_within_at ℝ f s a\\t⊢ ⇑0 y = 0\\n \",\n", + " \"<|startoftext|> E : Type u,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tf : E → ℝ,\\ta : E,\\tf' : E →L[ℝ] ℝ,\\th : is_local_min f a,\\thf : has_fderiv_at f f' a\\t⊢ f' = 0\\n \",\n", + " \"<|startoftext|> E : Type u,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tf : E → ℝ,\\ta : E,\\tf' : E →L[ℝ] ℝ,\\th : is_local_min f a,\\thf : has_fderiv_at f f' a,\\ty : E\\t⊢ ⇑f' y = ⇑0 y\\n \",\n", + " \"<|startoftext|> E : Type u,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tf : E → ℝ,\\ta : E,\\tf' : E →L[ℝ] ℝ,\\th : is_local_min f a,\\thf : has_fderiv_at f f' a,\\ty : E\\t⊢ ⇑f' y = ⇑0 y\\n \",\n", + " \"<|startoftext|> E : Type u,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tf : E → ℝ,\\ta : E,\\tf' : E →L[ℝ] ℝ,\\th : is_local_min f a,\\thf : has_fderiv_at f f' a,\\ty : E\\t⊢ ⇑f' y = ⇑0 y\\n \",\n", + " \"<|startoftext|> E : Type u,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tf : E → ℝ,\\ta : E,\\tf' : E →L[ℝ] ℝ,\\th : is_local_min f a,\\thf : has_fderiv_at f f' a,\\ty : E\\t⊢ y ∈ set.univ\\n \",\n", + " \"<|startoftext|> E : Type u,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tf : E → ℝ,\\ta : E,\\tf' : E →L[ℝ] ℝ,\\th : is_local_min f a,\\thf : has_fderiv_at f f' a,\\ty : E\\t⊢ -y ∈ set.univ\\n \",\n", + " \"<|startoftext|> E : Type u,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tf : E → ℝ,\\ta : E,\\tf' : E →L[ℝ] ℝ,\\th : is_local_min f a,\\thf : has_fderiv_at f f' a,\\ty : E\\t⊢ y ∈ pos_tangent_cone_at set.univ a\\n \",\n", + " \"<|startoftext|> E : Type u,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tf : E → ℝ,\\ta : E,\\tf' : E →L[ℝ] ℝ,\\th : is_local_min f a,\\thf : has_fderiv_at f f' a,\\ty : E\\t⊢ -y ∈ pos_tangent_cone_at set.univ a\\n \",\n", + " '<|startoftext|> E : Type u_1,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tf : ℝ → E,\\ta b C : ℝ,\\thf : differentiable_on ℝ f (set.Icc a b),\\tbound : ∀ (x : ℝ), x ∈ set.Ico a b → ∥deriv_within f (set.Icc a b) x∥ ≤ C\\t⊢ ∀ (x : ℝ), x ∈ set.Icc a b → ∥f x - f a∥ ≤ C * (x - a)\\n ',\n", + " '<|startoftext|> E : Type u_1,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\tf : ℝ → E,\\ta b C : ℝ,\\thf : differentiable_on ℝ f (set.Icc a b),\\tbound : ∀ (x : ℝ), x ∈ set.Ico a b → ∥deriv_within f (set.Icc a b) x∥ ≤ C\\t⊢ ∀ (x : ℝ), x ∈ set.Icc a b → has_deriv_within_at (λ (x : ℝ), f x) (deriv_within f (set.Icc a b) x) (set.Icc a b) x\\n ',\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb)\\t⊢ ∃ (c : ℝ) (H : c ∈ set.Ioo a b), (lgb - lga) * f' c = (lfb - lfa) * g' c\\n \",\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x\\t⊢ ∃ (c : ℝ) (H : c ∈ set.Ioo a b), (lgb - lga) * f' c = (lfb - lfa) * g' c\\n \",\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x\\t⊢ filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga))\\n \",\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x,\\tthis : filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 ((lgb - lga) * lfa - (lfb - lfa) * lga))\\t⊢ filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga))\\n \",\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x\\t⊢ filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga))\\t\\tf f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x,\\thha : filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga))\\t⊢ ∃ (c : ℝ) (H : c ∈ set.Ioo a b), (lgb - lga) * f' c = (lfb - lfa) * g' c\\n \",\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x,\\tthis : filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 ((lgb - lga) * lfa - (lfb - lfa) * lga))\\t⊢ lgb * lfa - lfb * lga = (lgb - lga) * lfa - (lfb - lfa) * lga\\n \",\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x,\\thha : filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga))\\t⊢ ∃ (c : ℝ) (H : c ∈ set.Ioo a b), (lgb - lga) * f' c = (lfb - lfa) * g' c\\n \",\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x,\\thha : filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga))\\t⊢ filter.tendsto h (𝓝[set.Iio b] b) (𝓝 (lgb * lfa - lfb * lga))\\n \",\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x,\\thha : filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga)),\\tthis : filter.tendsto h (𝓝[set.Iio b] b) (𝓝 ((lgb - lga) * lfb - (lfb - lfa) * lgb))\\t⊢ filter.tendsto h (𝓝[set.Iio b] b) (𝓝 (lgb * lfa - lfb * lga))\\n \",\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x,\\thha : filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga))\\t⊢ filter.tendsto h (𝓝[set.Iio b] b) (𝓝 (lgb * lfa - lfb * lga))\\t\\tf f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x,\\thha : filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga)),\\thhb : filter.tendsto h (𝓝[set.Iio b] b) (𝓝 (lgb * lfa - lfb * lga))\\t⊢ ∃ (c : ℝ) (H : c ∈ set.Ioo a b), (lgb - lga) * f' c = (lfb - lfa) * g' c\\n \",\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x,\\thha : filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga)),\\tthis : filter.tendsto h (𝓝[set.Iio b] b) (𝓝 ((lgb - lga) * lfb - (lfb - lfa) * lgb))\\t⊢ lgb * lfa - lfb * lga = (lgb - lga) * lfb - (lfb - lfa) * lgb\\n \",\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x,\\thha : filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga)),\\thhb : filter.tendsto h (𝓝[set.Iio b] b) (𝓝 (lgb * lfa - lfb * lga))\\t⊢ ∃ (c : ℝ) (H : c ∈ set.Ioo a b), (lgb - lga) * f' c = (lfb - lfa) * g' c\\n \",\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x,\\thha : filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga)),\\thhb : filter.tendsto h (𝓝[set.Iio b] b) (𝓝 (lgb * lfa - lfb * lga)),\\th' : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f' x - (lfb - lfa) * g' x\\t⊢ ∃ (c : ℝ) (H : c ∈ set.Ioo a b), (lgb - lga) * f' c = (lfb - lfa) * g' c\\n \",\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x,\\thha : filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga)),\\thhb : filter.tendsto h (𝓝[set.Iio b] b) (𝓝 (lgb * lfa - lfb * lga)),\\th' : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f' x - (lfb - lfa) * g' x\\t⊢ ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at h (h' x) x\\n \",\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x,\\thha : filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga)),\\thhb : filter.tendsto h (𝓝[set.Iio b] b) (𝓝 (lgb * lfa - lfb * lga)),\\th' : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f' x - (lfb - lfa) * g' x,\\tx : ℝ,\\thx : x ∈ set.Ioo a b\\t⊢ has_deriv_at h (h' x) x\\n \",\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x,\\thha : filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga)),\\thhb : filter.tendsto h (𝓝[set.Iio b] b) (𝓝 (lgb * lfa - lfb * lga)),\\th' : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f' x - (lfb - lfa) * g' x\\t⊢ ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at h (h' x) x\\t\\tf f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x,\\thha : filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga)),\\thhb : filter.tendsto h (𝓝[set.Iio b] b) (𝓝 (lgb * lfa - lfb * lga)),\\th' : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f' x - (lfb - lfa) * g' x,\\thhh' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at h (h' x) x\\t⊢ ∃ (c : ℝ) (H : c ∈ set.Ioo a b), (lgb - lga) * f' c = (lfb - lfa) * g' c\\n \",\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x,\\thha : filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga)),\\thhb : filter.tendsto h (𝓝[set.Iio b] b) (𝓝 (lgb * lfa - lfb * lga)),\\th' : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f' x - (lfb - lfa) * g' x,\\thhh' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at h (h' x) x\\t⊢ ∃ (c : ℝ) (H : c ∈ set.Ioo a b), (lgb - lga) * f' c = (lfb - lfa) * g' c\\n \",\n", + " \"<|startoftext|> f f' : ℝ → ℝ,\\ta b : ℝ,\\thab : a < b,\\tg g' : ℝ → ℝ,\\tlfa lga lfb lgb : ℝ,\\thff' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at f (f' x) x,\\thgg' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at g (g' x) x,\\thfa : filter.tendsto f (𝓝[set.Ioi a] a) (𝓝 lfa),\\thga : filter.tendsto g (𝓝[set.Ioi a] a) (𝓝 lga),\\thfb : filter.tendsto f (𝓝[set.Iio b] b) (𝓝 lfb),\\thgb : filter.tendsto g (𝓝[set.Iio b] b) (𝓝 lgb),\\th : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f x - (lfb - lfa) * g x,\\thha : filter.tendsto h (𝓝[set.Ioi a] a) (𝓝 (lgb * lfa - lfb * lga)),\\thhb : filter.tendsto h (𝓝[set.Iio b] b) (𝓝 (lgb * lfa - lfb * lga)),\\th' : ℝ → ℝ := λ (x : ℝ), (lgb - lga) * f' x - (lfb - lfa) * g' x,\\thhh' : ∀ (x : ℝ), x ∈ set.Ioo a b → has_deriv_at h (h' x) x,\\tc : ℝ,\\tcmem : c ∈ set.Ioo a b,\\thc : (λ (x : ℝ), h' x) c = 0\\t⊢ ∃ (c : ℝ) (H : c ∈ set.Ioo a b), (lgb - lga) * f' c = (lfb - lfa) * g' c\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀\\t⊢ ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀\\t⊢ metric.ball x₀ ε ⊆ {x : H | (λ (x : H), ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥) x}\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε\\t⊢ x ∈ {x : H | (λ (x : H), ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥) x}\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε\\t⊢ measure_theory.integrable (λ (a : α), F x a) μ\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε\\t⊢ measure_theory.integrable (λ (a : α), F x₀ a) μ\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε\\t⊢ measure_theory.integrable (λ (a : α), F x a - F x₀ a) μ\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε\\t⊢ measure_theory.integrable (λ (a : α), ⇑(F' a) (x - x₀)) μ\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀\\t⊢ ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ has_fderiv_at (λ (x : H), ∫ (a : α), F x a ∂μ) (∫ (a : α), F' a ∂μ) x₀\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ has_fderiv_at (λ (x : H), ∫ (a : α), F x a ∂μ) (∫ (a : α), F' a ∂μ) x₀\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ ∫ (a : α), ∥x₀ - x₀∥⁻¹ • (F x₀ a - F x₀ a - ⇑(F' a) (x₀ - x₀)) ∂μ = 0\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ filter.tendsto (λ (e : H), ∫ (a : α), ∥e - x₀∥⁻¹ • (F e a - F x₀ a - ⇑(F' a) (e - x₀)) ∂μ) (𝓝 x₀) (𝓝 (∫ (a : α), ∥x₀ - x₀∥⁻¹ • (F x₀ a - F x₀ a - ⇑(F' a) (x₀ - x₀)) ∂μ))\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ (𝓝 x₀).is_countably_generated\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ ∀ᶠ (n : H) in 𝓝 x₀, ae_measurable (λ (a : α), ∥n - x₀∥⁻¹ • (F n a - F x₀ a - ⇑(F' a) (n - x₀))) μ\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ ∀ (a : H), a ∈ metric.ball x₀ ε → ae_measurable (λ (a_1 : α), ∥a - x₀∥⁻¹ • (F a a_1 - F x₀ a_1 - ⇑(F' a_1) (a - x₀))) μ\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε\\t⊢ ae_measurable (λ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀))) μ\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε\\t⊢ ae_measurable (λ (i : α), F x i - F x₀ i - ⇑(F' i) (x - x₀)) μ\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ ∀ᶠ (n : H) in 𝓝 x₀, ae_measurable (λ (a : α), ∥n - x₀∥⁻¹ • (F n a - F x₀ a - ⇑(F' a) (n - x₀))) μ\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ ae_measurable (λ (a : α), ∥x₀ - x₀∥⁻¹ • (F x₀ a - F x₀ a - ⇑(F' a) (x₀ - x₀))) μ\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ ∀ᶠ (n : H) in 𝓝 x₀, ∀ᵐ (a : α) ∂μ, ∥∥n - x₀∥⁻¹ • (F n a - F x₀ a - ⇑(F' a) (n - x₀))∥ ≤ ?m_1 a\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ measure_theory.integrable ?m_1 μ\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ ∀ᵐ (a : α) ∂μ, filter.tendsto (λ (n : H), ∥n - x₀∥⁻¹ • (F n a - F x₀ a - ⇑(F' a) (n - x₀))) (𝓝 x₀) (𝓝 (∥x₀ - x₀∥⁻¹ • (F x₀ a - F x₀ a - ⇑(F' a) (x₀ - x₀))))\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ α → ℝ\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ ae_measurable (λ (a : α), ∥x₀ - x₀∥⁻¹ • (F x₀ a - F x₀ a - ⇑(F' a) (x₀ - x₀))) μ\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ ∀ᶠ (n : H) in 𝓝 x₀, ∀ᵐ (a : α) ∂μ, ∥∥n - x₀∥⁻¹ • (F n a - F x₀ a - ⇑(F' a) (n - x₀))∥ ≤ ?m_1 a\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ measure_theory.integrable ?m_1 μ\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ ∀ᵐ (a : α) ∂μ, filter.tendsto (λ (n : H), ∥n - x₀∥⁻¹ • (F n a - F x₀ a - ⇑(F' a) (n - x₀))) (𝓝 x₀) (𝓝 (∥x₀ - x₀∥⁻¹ • (F x₀ a - F x₀ a - ⇑(F' a) (x₀ - x₀))))\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ α → ℝ\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ ae_measurable (λ (a : α), ∥x₀ - x₀∥⁻¹ • (F x₀ a - F x₀ a - ⇑(F' a) (x₀ - x₀))) μ\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ ∀ᶠ (n : H) in 𝓝 x₀, ∀ᵐ (a : α) ∂μ, ∥∥n - x₀∥⁻¹ • (F n a - F x₀ a - ⇑(F' a) (n - x₀))∥ ≤ ?m_1 a\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ metric.ball x₀ ε ⊆ {x : H | (λ (n : H), ∀ᵐ (a : α) ∂μ, ∥∥n - x₀∥⁻¹ • (F n a - F x₀ a - ⇑(F' a) (n - x₀))∥ ≤ ?m_1 a) x}\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε\\t⊢ x ∈ {x : H | (λ (n : H), ∀ᵐ (a : α) ∂μ, ∥∥n - x₀∥⁻¹ • (F n a - F x₀ a - ⇑(F' a) (n - x₀))∥ ≤ ?m_1 a) x}\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε\\t⊢ ∀ (x_1 : α), (λ (x : α), has_fderiv_at (λ (x_1 : H), F x_1 x) (F' x) x₀ ∧ lipschitz_on_with (⇑real.nnabs (bound x)) (λ (x_1 : H), F x_1 x) (metric.ball x₀ ε)) x_1 → ∥∥x - x₀∥⁻¹ • (F x x_1 - F x₀ x_1 - ⇑(F' x_1) (x - x₀))∥ ≤ ?m_1 x_1\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε)\\t⊢ ∥∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀))∥ ≤ ?m_1 a\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε)\\t⊢ ∥∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀))∥ ≤ b a + ∥F' a∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε)\\t⊢ ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε)\\t⊢ ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥\\t⊢ ∥∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀))∥ ≤ b a + ∥F' a∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : ∀ (x : H), x ∈ metric.ball x₀ ε → ∀ (y : H), y ∈ metric.ball x₀ ε → ∥F x a - F y a∥ ≤ ↑(⇑real.nnabs (bound a)) * ∥x - y∥\\t⊢ ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥\\t⊢ ∥∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀))∥ ≤ b a + ∥F' a∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥\\t⊢ ∥∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀))∥ = ∥∥x - x₀∥⁻¹ • (F x a - F x₀ a) - ∥x - x₀∥⁻¹ • ⇑(F' a) (x - x₀)∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥\\t⊢ ∥∥x - x₀∥⁻¹ • (F x a - F x₀ a)∥ + ∥∥x - x₀∥⁻¹ • ⇑(F' a) (x - x₀)∥ = ∥x - x₀∥⁻¹ * ∥F x a - F x₀ a∥ + ∥x - x₀∥⁻¹ * ∥⇑(F' a) (x - x₀)∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥\\t⊢ ∥∥x - x₀∥⁻¹ • (F x a - F x₀ a)∥ + ∥∥x - x₀∥⁻¹ • ⇑(F' a) (x - x₀)∥ = ∥x - x₀∥⁻¹ * ∥F x a - F x₀ a∥ + ∥x - x₀∥⁻¹ * ∥⇑(F' a) (x - x₀)∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥\\t⊢ 0 ≤ ∥x - x₀∥⁻¹\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥\\t⊢ ∥x - x₀∥⁻¹ * ∥F x a - F x₀ a∥ ≤ ∥x - x₀∥⁻¹ * (b a * ∥x - x₀∥)\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥\\t⊢ ∥x - x₀∥⁻¹ * ∥⇑(F' a) (x - x₀)∥ ≤ ∥x - x₀∥⁻¹ * (∥F' a∥ * ∥x - x₀∥)\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥\\t⊢ ∥x - x₀∥⁻¹ * (b a * ∥x - x₀∥) + ∥x - x₀∥⁻¹ * (∥F' a∥ * ∥x - x₀∥) ≤ b a + ∥F' a∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥\\t⊢ ∥x - x₀∥⁻¹ * ∥⇑(F' a) (x - x₀)∥ ≤ ∥x - x₀∥⁻¹ * (∥F' a∥ * ∥x - x₀∥)\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥\\t⊢ ∥x - x₀∥⁻¹ * (b a * ∥x - x₀∥) + ∥x - x₀∥⁻¹ * (∥F' a∥ * ∥x - x₀∥) ≤ b a + ∥F' a∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥\\t⊢ ∥x - x₀∥⁻¹ * (b a * ∥x - x₀∥) + ∥x - x₀∥⁻¹ * (∥F' a∥ * ∥x - x₀∥) ≤ b a + ∥F' a∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥,\\th : ∥x - x₀∥ = 0\\t⊢ ∥x - x₀∥⁻¹ * (b a * ∥x - x₀∥) + ∥x - x₀∥⁻¹ * (∥F' a∥ * ∥x - x₀∥) ≤ b a + ∥F' a∥\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥,\\th : ¬∥x - x₀∥ = 0\\t⊢ ∥x - x₀∥⁻¹ * (b a * ∥x - x₀∥) + ∥x - x₀∥⁻¹ * (∥F' a∥ * ∥x - x₀∥) ≤ b a + ∥F' a∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥,\\th : ∥x - x₀∥ = 0\\t⊢ ∥x - x₀∥⁻¹ * (b a * ∥x - x₀∥) + ∥x - x₀∥⁻¹ * (∥F' a∥ * ∥x - x₀∥) ≤ b a + ∥F' a∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ ∀ᶠ (n : H) in 𝓝 x₀, ∀ᵐ (a : α) ∂μ, ∥∥n - x₀∥⁻¹ • (F n a - F x₀ a - ⇑(F' a) (n - x₀))∥ ≤ ?m_1 a\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ measure_theory.integrable ?m_1 μ\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ ∀ᵐ (a : α) ∂μ, filter.tendsto (λ (n : H), ∥n - x₀∥⁻¹ • (F n a - F x₀ a - ⇑(F' a) (n - x₀))) (𝓝 x₀) (𝓝 (∥x₀ - x₀∥⁻¹ • (F x₀ a - F x₀ a - ⇑(F' a) (x₀ - x₀))))\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ α → ℝ\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\tx : H,\\thx : x ∈ metric.ball x₀ ε,\\ta : α,\\tha_deriv : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_bound : ∥F x a - F x₀ a∥ ≤ b a * ∥x - x₀∥,\\th : ¬∥x - x₀∥ = 0\\t⊢ ∥x - x₀∥⁻¹ * (b a * ∥x - x₀∥) + ∥x - x₀∥⁻¹ * (∥F' a∥ * ∥x - x₀∥) ≤ b a + ∥F' a∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ measure_theory.integrable (λ (a : α), b a + ∥F' a∥) μ\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ ∀ᵐ (a : α) ∂μ, filter.tendsto (λ (n : H), ∥n - x₀∥⁻¹ • (F n a - F x₀ a - ⇑(F' a) (n - x₀))) (𝓝 x₀) (𝓝 (∥x₀ - x₀∥⁻¹ • (F x₀ a - F x₀ a - ⇑(F' a) (x₀ - x₀))))\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ measure_theory.integrable (λ (a : α), b a + ∥F' a∥) μ\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ ∀ᵐ (a : α) ∂μ, filter.tendsto (λ (n : H), ∥n - x₀∥⁻¹ • (F n a - F x₀ a - ⇑(F' a) (n - x₀))) (𝓝 x₀) (𝓝 (∥x₀ - x₀∥⁻¹ • (F x₀ a - F x₀ a - ⇑(F' a) (x₀ - x₀))))\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥\\t⊢ ∀ (x : α), (λ (a : α), has_fderiv_at (λ (x : H), F x a) (F' a) x₀) x → filter.tendsto (λ (n : H), ∥n - x₀∥⁻¹ • (F n x - F x₀ x - ⇑(F' x) (n - x₀))) (𝓝 x₀) (𝓝 (∥x₀ - x₀∥⁻¹ • (F x₀ x - F x₀ x - ⇑(F' x) (x₀ - x₀))))\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\ta : α,\\tha : has_fderiv_at (λ (x : H), F x a) (F' a) x₀\\t⊢ filter.tendsto (λ (n : H), ∥n - x₀∥⁻¹ • (F n a - F x₀ a - ⇑(F' a) (n - x₀))) (𝓝 x₀) (𝓝 (∥x₀ - x₀∥⁻¹ • (F x₀ a - F x₀ a - ⇑(F' a) (x₀ - x₀))))\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\ta : α,\\tha : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tthis : filter.tendsto (λ (x : H), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀))) (𝓝 x₀) (𝓝 0)\\t⊢ filter.tendsto (λ (n : H), ∥n - x₀∥⁻¹ • (F n a - F x₀ a - ⇑(F' a) (n - x₀))) (𝓝 x₀) (𝓝 (∥x₀ - x₀∥⁻¹ • (F x₀ a - F x₀ a - ⇑(F' a) (x₀ - x₀))))\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\ta : α,\\tha : has_fderiv_at (λ (x : H), F x a) (F' a) x₀\\t⊢ filter.tendsto (λ (x : H), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀))) (𝓝 x₀) (𝓝 0)\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\ta : α,\\tha : has_fderiv_at (λ (x : H), F x a) (F' a) x₀\\t⊢ filter.tendsto (λ (e : H), ∥∥e - x₀∥⁻¹ • (F e a - F x₀ a - ⇑(F' a) (e - x₀))∥) (𝓝 x₀) (𝓝 0)\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\ta : α,\\tha : has_fderiv_at (λ (x : H), F x a) (F' a) x₀\\t⊢ (λ (x : H), ∥x - x₀∥⁻¹ * ∥F x a - F x₀ a - ⇑(F' a) (x - x₀)∥) = λ (x : H), ∥∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀))∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\ta : α,\\tha : has_fderiv_at (λ (x : H), F x a) (F' a) x₀\\t⊢ (λ (x : H), ∥x - x₀∥⁻¹ * ∥F x a - F x₀ a - ⇑(F' a) (x - x₀)∥) = λ (x : H), ∥∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀))∥\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\ta : α,\\tha : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tthis : (λ (x : H), ∥x - x₀∥⁻¹ * ∥F x a - F x₀ a - ⇑(F' a) (x - x₀)∥) = λ (x : H), ∥∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀))∥\\t⊢ filter.tendsto (λ (e : H), ∥∥e - x₀∥⁻¹ • (F e a - F x₀ a - ⇑(F' a) (e - x₀))∥) (𝓝 x₀) (𝓝 0)\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\ta : α,\\tha : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx : H\\t⊢ ∥x - x₀∥⁻¹ * ∥F x a - F x₀ a - ⇑(F' a) (x - x₀)∥ = ∥∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀))∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀,\\tthis : ∀ᶠ (x : H) in 𝓝 x₀, ∥x - x₀∥⁻¹ * ∥∫ (a : α), F x a ∂μ - ∫ (a : α), F x₀ a ∂μ - (⇑∫ (a : α), F' a ∂μ) (x - x₀)∥ = ∥∫ (a : α), ∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀)) ∂μ∥,\\ta : α,\\tha : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tthis : (λ (x : H), ∥x - x₀∥⁻¹ * ∥F x a - F x₀ a - ⇑(F' a) (x - x₀)∥) = λ (x : H), ∥∥x - x₀∥⁻¹ • (F x a - F x₀ a - ⇑(F' a) (x - x₀))∥\\t⊢ filter.tendsto (λ (e : H), ∥∥e - x₀∥⁻¹ • (F e a - F x₀ a - ⇑(F' a) (e - x₀))∥) (𝓝 x₀) (𝓝 0)\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀\\t⊢ measure_theory.integrable F' μ ∧ has_fderiv_at (λ (x : H), ∫ (a : α), F x a ∂μ) (∫ (a : α), F' a ∂μ) x₀\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε\\t⊢ measure_theory.integrable F' μ ∧ has_fderiv_at (λ (x : H), ∫ (a : α), F x a ∂μ) (∫ (a : α), F' a ∂μ) x₀\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹\\t⊢ measure_theory.integrable F' μ ∧ has_fderiv_at (λ (x : H), ∫ (a : α), F x a ∂μ) (∫ (a : α), F' a ∂μ) x₀\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a)\\t⊢ measure_theory.integrable F' μ ∧ has_fderiv_at (λ (x : H), ∫ (a : α), F x a ∂μ) (∫ (a : α), F' a ∂μ) x₀\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ\\t⊢ measure_theory.integrable F' μ ∧ has_fderiv_at (λ (x : H), ∫ (a : α), F x a ∂μ) (∫ (a : α), F' a ∂μ) x₀\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a\\t⊢ measure_theory.integrable F' μ ∧ has_fderiv_at (λ (x : H), ∫ (a : α), F x a ∂μ) (∫ (a : α), F' a ∂μ) x₀\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a\\t⊢ ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε\\t⊢ measure_theory.integrable (F x) μ\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε\\t⊢ ∀ᵐ (a : α) ∂μ, ∥F x₀ a - F x a∥ ≤ ε * ∥bound a∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε\\t⊢ ∀ (x_1 : α), (λ (a : α), lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε)) x_1 → ∥F x₀ x_1 - F x x_1∥ ≤ ε * ∥bound x_1∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε,\\ta : α,\\tha : lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε)\\t⊢ ∥F x₀ a - F x a∥ ≤ ε * ∥bound a∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε,\\ta : α,\\tha : ∀ (x : H), x ∈ metric.ball x₀ ε → ∀ (y : H), y ∈ metric.ball x₀ ε → ∥F x a - F y a∥ ≤ ↑(⇑real.nnabs (bound a)) * ∥x - y∥\\t⊢ ∥F x₀ a - F x a∥ ≤ ε * ∥bound a∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε,\\ta : α,\\tha : ∀ (x : H), x ∈ metric.ball x₀ ε → ∀ (y : H), y ∈ metric.ball x₀ ε → ∥F x a - F y a∥ ≤ ↑(⇑real.nnabs (bound a)) * ∥x - y∥\\t⊢ ↑(⇑real.nnabs (bound a)) * ∥x₀ - x∥ ≤ ε * ∥bound a∥\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε,\\ta : α,\\tha : ∀ (x : H), x ∈ metric.ball x₀ ε → ∀ (y : H), y ∈ metric.ball x₀ ε → ∥F x a - F y a∥ ≤ ↑(⇑real.nnabs (bound a)) * ∥x - y∥\\t⊢ ∥x₀ - x∥ * abs (bound a) ≤ ε * abs (bound a)\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε\\t⊢ ∀ᵐ (a : α) ∂μ, ∥F x₀ a - F x a∥ ≤ ε * ∥bound a∥\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε,\\tthis : ∀ᵐ (a : α) ∂μ, ∥F x₀ a - F x a∥ ≤ ε * ∥bound a∥\\t⊢ measure_theory.integrable (F x) μ\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\tx : H,\\ta : α,\\tha : ∀ (x : H), x ∈ metric.ball x₀ ε → ∀ (y : H), y ∈ metric.ball x₀ ε → ∥F x a - F y a∥ ≤ ↑(⇑real.nnabs (bound a)) * ∥x - y∥,\\tx_in : ∥x₀ - x∥ < ε\\t⊢ ∥x₀ - x∥ * abs (bound a) ≤ ε * abs (bound a)\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε,\\tthis : ∀ᵐ (a : α) ∂μ, ∥F x₀ a - F x a∥ ≤ ε * ∥bound a∥\\t⊢ measure_theory.integrable (F x) μ\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a\\t⊢ ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ\\t⊢ measure_theory.integrable F' μ ∧ has_fderiv_at (λ (x : H), ∫ (a : α), F x a ∂μ) (∫ (a : α), F' a ∂μ) x₀\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ\\t⊢ measure_theory.integrable F' μ ∧ has_fderiv_at (λ (x : H), ∫ (a : α), F x a ∂μ) (∫ (a : α), F' a ∂μ) x₀\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ\\t⊢ measure_theory.integrable F' μ\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ\\t⊢ ∀ᵐ (a : α) ∂μ, ∥F' a∥ ≤ b a\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ\\t⊢ ∀ (x : α), (λ (x : α), has_fderiv_at (λ (x_1 : H), F x_1 x) (F' x) x₀ ∧ lipschitz_on_with (⇑real.nnabs (bound x)) (λ (x_1 : H), F x_1 x) (metric.ball x₀ ε)) x → ∥F' x∥ ≤ b x\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ\\t⊢ ∀ᵐ (a : α) ∂μ, ∥F' a∥ ≤ b a\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\tthis : ∀ᵐ (a : α) ∂μ, ∥F' a∥ ≤ b a\\t⊢ measure_theory.integrable F' μ\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\ta : α,\\tha_diff : has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tha_lip : lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε)\\t⊢ ∥F' a∥ ≤ b a\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ\\t⊢ measure_theory.integrable F' μ\\t\\tα : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ\\t⊢ measure_theory.integrable F' μ ∧ has_fderiv_at (λ (x : H), ∫ (a : α), F x a ∂μ) (∫ (a : α), F' a ∂μ) x₀\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\tthis : ∀ᵐ (a : α) ∂μ, ∥F' a∥ ≤ b a\\t⊢ measure_theory.integrable F' μ\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ\\t⊢ measure_theory.integrable F' μ ∧ has_fderiv_at (λ (x : H), ∫ (a : α), F x a ∂μ) (∫ (a : α), F' a ∂μ) x₀\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ\\t⊢ has_fderiv_at (λ (x : H), ∫ (a : α), F x a ∂μ) (∫ (a : α), F' a ∂μ) x₀\\n \",\n", + " \"<|startoftext|> α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\thF_int' : ∀ (x : H), x ∈ metric.ball x₀ ε → measure_theory.integrable (F x) μ,\\thF'_int : measure_theory.integrable F' μ,\\th_ball : metric.ball x₀ ε ∈ 𝓝 x₀\\t⊢ has_fderiv_at (λ (x : H), ∫ (a : α), F x a ∂μ) (∫ (a : α), F' a ∂μ) x₀\\n \",\n", + " '<|startoftext|> E : Type u_1,\\t_inst_1 : inner_product_space ℝ E,\\tc : E,\\tf : times_cont_diff_bump_of_inner c\\t⊢ function.support ⇑f = metric.ball c f.R\\n ',\n", + " '<|startoftext|> E : Type u_1,\\t_inst_1 : inner_product_space ℝ E,\\tc : E,\\tf : times_cont_diff_bump_of_inner c,\\tx : E\\t⊢ x ∈ function.support ⇑f ↔ x ∈ metric.ball c f.R\\n ',\n", + " '<|startoftext|> E : Type u_1,\\t_inst_1 : inner_product_space ℝ E,\\tc : E,\\tf : times_cont_diff_bump_of_inner c,\\tx : E,\\tthis : ⇑f x ≠ 0 ↔ has_dist.dist x c < f.R\\t⊢ x ∈ function.support ⇑f ↔ x ∈ metric.ball c f.R\\n ',\n", + " '<|startoftext|> E : Type u_1,\\t_inst_1 : inner_product_space ℝ E,\\tc : E,\\tf : times_cont_diff_bump_of_inner c,\\tx : E\\t⊢ ⇑f x ≠ 0 ↔ has_dist.dist x c < f.R\\n ',\n", + " '<|startoftext|> E : Type u_1,\\t_inst_1 : inner_product_space ℝ E,\\tc : E,\\tf : times_cont_diff_bump_of_inner c,\\tx : E,\\thx : has_dist.dist x c < f.R\\t⊢ ⇑f x ≠ 0 ↔ has_dist.dist x c < f.R\\t\\tcase or.inr\\tE : Type u_1,\\t_inst_1 : inner_product_space ℝ E,\\tc : E,\\tf : times_cont_diff_bump_of_inner c,\\tx : E,\\thx : f.R ≤ has_dist.dist x c\\t⊢ ⇑f x ≠ 0 ↔ has_dist.dist x c < f.R\\n ',\n", + " '<|startoftext|> E : Type u_1,\\t_inst_1 : inner_product_space ℝ E,\\tc : E,\\tf : times_cont_diff_bump_of_inner c,\\tx : E,\\thx : has_dist.dist x c < f.R\\t⊢ ⇑f x ≠ 0 ↔ has_dist.dist x c < f.R\\n ',\n", + " '<|startoftext|> E : Type u_1,\\t_inst_1 : inner_product_space ℝ E,\\tc : E,\\tf : times_cont_diff_bump_of_inner c,\\tx : E,\\thx : f.R ≤ has_dist.dist x c\\t⊢ ⇑f x ≠ 0 ↔ has_dist.dist x c < f.R\\n ',\n", + " '<|startoftext|> E : Type u_1,\\t_inst_1 : normed_group E,\\t_inst_2 : normed_space ℝ E,\\t_inst_3 : finite_dimensional ℝ E,\\tc : E,\\tf : times_cont_diff_bump c\\t⊢ closure (function.support ⇑f) = euclidean.closed_ball c f.to_times_cont_diff_bump_of_inner.R\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\ty : E,\\tα : Type u_3,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\thc : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\thd : filter.tendsto (λ (n : α), c n • d n) l (𝓝 y)\\t⊢ filter.tendsto d l (𝓝 0)\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\ty : E,\\tα : Type u_3,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\thc : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\thd : filter.tendsto (λ (n : α), c n • d n) l (𝓝 y),\\tA : filter.tendsto (λ (n : α), ∥c n∥⁻¹) l (𝓝 0)\\t⊢ filter.tendsto d l (𝓝 0)\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\ty : E,\\tα : Type u_3,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\thc : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\thd : filter.tendsto (λ (n : α), c n • d n) l (𝓝 y),\\tA : filter.tendsto (λ (n : α), ∥c n∥⁻¹) l (𝓝 0),\\tB : filter.tendsto (λ (n : α), ∥c n • d n∥) l (𝓝 ∥y∥)\\t⊢ filter.tendsto d l (𝓝 0)\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\ty : E,\\tα : Type u_3,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\thc : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\thd : filter.tendsto (λ (n : α), c n • d n) l (𝓝 y),\\tA : filter.tendsto (λ (n : α), ∥c n∥⁻¹) l (𝓝 0),\\tB : filter.tendsto (λ (n : α), ∥c n • d n∥) l (𝓝 ∥y∥),\\tC : filter.tendsto (λ (n : α), ∥c n∥⁻¹ * ∥c n • d n∥) l (𝓝 (0 * ∥y∥))\\t⊢ filter.tendsto d l (𝓝 0)\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\ty : E,\\tα : Type u_3,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\thc : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\thd : filter.tendsto (λ (n : α), c n • d n) l (𝓝 y),\\tA : filter.tendsto (λ (n : α), ∥c n∥⁻¹) l (𝓝 0),\\tB : filter.tendsto (λ (n : α), ∥c n • d n∥) l (𝓝 ∥y∥),\\tC : filter.tendsto (λ (n : α), ∥c n∥⁻¹ * ∥c n • d n∥) l (𝓝 0)\\t⊢ filter.tendsto d l (𝓝 0)\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\ty : E,\\tα : Type u_3,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\thc : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\thd : filter.tendsto (λ (n : α), c n • d n) l (𝓝 y),\\tA : filter.tendsto (λ (n : α), ∥c n∥⁻¹) l (𝓝 0),\\tB : filter.tendsto (λ (n : α), ∥c n • d n∥) l (𝓝 ∥y∥),\\tC : filter.tendsto (λ (n : α), ∥c n∥⁻¹ * ∥c n • d n∥) l (𝓝 0)\\t⊢ ∀ᶠ (n : α) in l, ∥c n∥⁻¹ * ∥c n • d n∥ = ∥d n∥\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\ty : E,\\tα : Type u_3,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\thc : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\thd : filter.tendsto (λ (n : α), c n • d n) l (𝓝 y),\\tA : filter.tendsto (λ (n : α), ∥c n∥⁻¹) l (𝓝 0),\\tB : filter.tendsto (λ (n : α), ∥c n • d n∥) l (𝓝 ∥y∥),\\tC : filter.tendsto (λ (n : α), ∥c n∥⁻¹ * ∥c n • d n∥) l (𝓝 0),\\tn : α,\\thn : c n ≠ 0\\t⊢ ∥c n∥⁻¹ * ∥c n • d n∥ = ∥d n∥\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\ty : E,\\tα : Type u_3,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\thc : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\thd : filter.tendsto (λ (n : α), c n • d n) l (𝓝 y),\\tA : filter.tendsto (λ (n : α), ∥c n∥⁻¹) l (𝓝 0),\\tB : filter.tendsto (λ (n : α), ∥c n • d n∥) l (𝓝 ∥y∥),\\tC : filter.tendsto (λ (n : α), ∥c n∥⁻¹ * ∥c n • d n∥) l (𝓝 0)\\t⊢ ∀ᶠ (n : α) in l, ∥c n∥⁻¹ * ∥c n • d n∥ = ∥d n∥\\t\\t𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\ty : E,\\tα : Type u_3,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\thc : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\thd : filter.tendsto (λ (n : α), c n • d n) l (𝓝 y),\\tA : filter.tendsto (λ (n : α), ∥c n∥⁻¹) l (𝓝 0),\\tB : filter.tendsto (λ (n : α), ∥c n • d n∥) l (𝓝 ∥y∥),\\tC : filter.tendsto (λ (n : α), ∥c n∥⁻¹ * ∥c n • d n∥) l (𝓝 0),\\tthis : ∀ᶠ (n : α) in l, ∥c n∥⁻¹ * ∥c n • d n∥ = ∥d n∥\\t⊢ filter.tendsto d l (𝓝 0)\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\ty : E,\\tα : Type u_3,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\thc : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\thd : filter.tendsto (λ (n : α), c n • d n) l (𝓝 y),\\tA : filter.tendsto (λ (n : α), ∥c n∥⁻¹) l (𝓝 0),\\tB : filter.tendsto (λ (n : α), ∥c n • d n∥) l (𝓝 ∥y∥),\\tC : filter.tendsto (λ (n : α), ∥c n∥⁻¹ * ∥c n • d n∥) l (𝓝 0),\\tn : α,\\thn : c n ≠ 0\\t⊢ ∥c n∥ ≠ 0\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\ty : E,\\tα : Type u_3,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\thc : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\thd : filter.tendsto (λ (n : α), c n • d n) l (𝓝 y),\\tA : filter.tendsto (λ (n : α), ∥c n∥⁻¹) l (𝓝 0),\\tB : filter.tendsto (λ (n : α), ∥c n • d n∥) l (𝓝 ∥y∥),\\tC : filter.tendsto (λ (n : α), ∥c n∥⁻¹ * ∥c n • d n∥) l (𝓝 0),\\tthis : ∀ᶠ (n : α) in l, ∥c n∥⁻¹ * ∥c n • d n∥ = ∥d n∥\\t⊢ filter.tendsto d l (𝓝 0)\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\ty : E,\\tα : Type u_3,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\thc : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\thd : filter.tendsto (λ (n : α), c n • d n) l (𝓝 y),\\tA : filter.tendsto (λ (n : α), ∥c n∥⁻¹) l (𝓝 0),\\tB : filter.tendsto (λ (n : α), ∥c n • d n∥) l (𝓝 ∥y∥),\\tC : filter.tendsto (λ (n : α), ∥c n∥⁻¹ * ∥c n • d n∥) l (𝓝 0),\\tthis : ∀ᶠ (n : α) in l, ∥c n∥⁻¹ * ∥c n • d n∥ = ∥d n∥,\\tD : filter.tendsto (λ (n : α), ∥d n∥) l (𝓝 0)\\t⊢ filter.tendsto d l (𝓝 0)\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\ty : E,\\tα : Type u_3,\\tl : filter α,\\tc : α → 𝕜,\\td : α → E,\\thc : filter.tendsto (λ (n : α), ∥c n∥) l filter.at_top,\\thd : filter.tendsto (λ (n : α), c n • d n) l (𝓝 y),\\tA : filter.tendsto (λ (n : α), ∥c n∥⁻¹) l (𝓝 0),\\tB : filter.tendsto (λ (n : α), ∥c n • d n∥) l (𝓝 ∥y∥),\\tC : filter.tendsto (λ (n : α), ∥c n∥⁻¹ * ∥c n • d n∥) l (𝓝 0),\\tthis : ∀ᶠ (n : α) in l, ∥c n∥⁻¹ * ∥c n • d n∥ = ∥d n∥,\\tD : filter.tendsto (λ (n : α), ∥d n∥) l (𝓝 0)\\t⊢ filter.tendsto (λ (e : α), ∥d e∥) l (𝓝 0)\\n ',\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t\\t⊢ ⇑(linear_map.inl 𝕜 E F) '' tangent_cone_at 𝕜 s x ⊆ tangent_cone_at 𝕜 (s.prod t) (x, y)\\n \",\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v)\\t⊢ ⇑(linear_map.inl 𝕜 E F) v ∈ tangent_cone_at 𝕜 (s.prod t) (x, y)\\n ',\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v)\\t⊢ ∀ (n : ℕ), ∃ (d' : F), y + d' ∈ t ∧ ∥c n • d'∥ < (1 / 2) ^ n\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ\\t⊢ ∃ (d' : F), y + d' ∈ t ∧ ∥c n • d'∥ < (1 / 2) ^ n\\n \",\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ y + (z - y) ∈ t\\n ',\n", + " '<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ ∥c n • (z - y)∥ < (1 / 2) ^ n\\n ',\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v)\\t⊢ ∀ (n : ℕ), ∃ (d' : F), y + d' ∈ t ∧ ∥c n • d'∥ < (1 / 2) ^ n\\t\\t𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tthis : ∀ (n : ℕ), ∃ (d' : F), y + d' ∈ t ∧ ∥c n • d'∥ < (1 / 2) ^ n\\t⊢ ⇑(linear_map.inl 𝕜 E F) v ∈ tangent_cone_at 𝕜 (s.prod t) (x, y)\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tn : ℕ,\\tz : F,\\thz : z ∈ {x : F | (λ (y_1 : F), ∥c n • (y_1 - y)∥ < (1 / 2) ^ n) x},\\thzt : z ∈ t\\t⊢ ∃ (d' : F), y + d' ∈ t ∧ ∥c n • d'∥ < (1 / 2) ^ n\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\tthis : ∀ (n : ℕ), ∃ (d' : F), y + d' ∈ t ∧ ∥c n • d'∥ < (1 / 2) ^ n\\t⊢ ⇑(linear_map.inl 𝕜 E F) v ∈ tangent_cone_at 𝕜 (s.prod t) (x, y)\\n \",\n", + " \"<|startoftext|> 𝕜 : Type u_1,\\t_inst_1 : nondiscrete_normed_field 𝕜,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space 𝕜 E,\\tF : Type u_3,\\t_inst_4 : normed_group F,\\t_inst_5 : normed_space 𝕜 F,\\tx : E,\\ts : set E,\\tt : set F,\\ty : F,\\tht : y ∈ closure t,\\tv : E,\\tc : ℕ → 𝕜,\\td : ℕ → E,\\thd : ∀ᶠ (n : ℕ) in filter.at_top, x + d n ∈ s,\\thc : filter.tendsto (λ (n : ℕ), ∥c n∥) filter.at_top filter.at_top,\\thy : filter.tendsto (λ (n : ℕ), c n • d n) filter.at_top (𝓝 v),\\td' : ℕ → F,\\thd' : ∀ (n : ℕ), y + d' n ∈ t ∧ ∥c n • d' n∥ < (1 / 2) ^ n\\t⊢ ⇑(linear_map.inl 𝕜 E F) v ∈ tangent_cone_at 𝕜 (s.prod t) (x, y)\\n \",\n", + " ...]" + ] + }, + "execution_count": 41, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "hard[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 248, + "id": "6a7c299a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " derangements\n", + "\n", + " substI F\n", + "\n", + " apply funext\n", + "\n", + " skip\n", + "\n", + " rw id_map\n", + "\n", + " refl\n", + "\n", + " dest : id ∘ funext rfl = funek\n", + "\n", + " refine funext (λ X, _)\n", + "\n", + " refl\n", + "\n", + " dsimp [map]\n", + "\n" + ] + } + ], + "source": [ + "for i in gen_text:\n", + " print(i.split(\"\")[-1].replace(\"<|endoftext|>\",\"\"))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2ce2de18", + "metadata": {}, + "outputs": [], + "source": [ + "1+1" + ] + }, + { + "cell_type": "code", + "execution_count": 103, + "id": "e9433d2c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[\"<|startoftext|> GOAL α : Type u,\\tb : buffer α,\\ti : ℕ,\\th : i < b.size,\\tv : α\\t⊢ b.write ⟨i, h⟩ v = b.write' i v\\n PROOFSTEP refl\\n<|endoftext|>\"]" + ] + }, + "execution_count": 103, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "gen_text = tokenizer.batch_decode(gen_tokens)\n", + "gen_text" + ] + }, + { + "cell_type": "code", + "execution_count": 140, + "id": "4e1f3095", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "files_upload/data_test.jsonl\r\n" + ] + } + ], + "source": [ + "ls files_upload/data_test.jsonl" + ] + }, + { + "cell_type": "code", + "execution_count": 143, + "id": "f0840f60", + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "\n", + "with open('files_upload/data_test.jsonl', 'r') as json_file:\n", + " json_list = [json.loads(i) for i in list(json_file)]" + ] + }, + { + "cell_type": "code", + "execution_count": 144, + "id": "7391d7b1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'prompt': \"GOAL α : Type u,\\t_inst_1 : inhabited α,\\tb : buffer α,\\ti : ℕ,\\th : i < b.size\\t⊢ b.read ⟨i, h⟩ = b.read' i\\n PROOFSTEP \",\n", + " 'completion': \"cases b; unfold read read'; simp [array.read_eq_read']\\n\"}" + ] + }, + "execution_count": 144, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "json_list[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "91f9f0d3", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/models/gpt_neo/neo_process.ipynb b/src/models/gpt_neo/neo_process.ipynb new file mode 100644 index 0000000..9bca1d8 --- /dev/null +++ b/src/models/gpt_neo/neo_process.ipynb @@ -0,0 +1,10267 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 40, + "id": "7c5ec191", + "metadata": {}, + "outputs": [], + "source": [ + "import glob" + ] + }, + { + "cell_type": "code", + "execution_count": 54, + "id": "91c79cf6", + "metadata": {}, + "outputs": [], + "source": [ + "from transformers import GPT2Tokenizer, GPTNeoForCausalLM,GPTNeoModel, AutoModelForCausalLM" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "id": "212de722", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n" + ] + } + ], + "source": [ + "tokenizer = GPT2Tokenizer.from_pretrained(\"EleutherAI/gpt-neo-125M\", \n", + " bos_token=\"<|startoftext|>\",\n", + " eos_token=\"<|endoftext|>\",\n", + " pad_token=\"<|pad|>\",truncation=True,padding=True)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "id": "c865648c", + "metadata": {}, + "outputs": [], + "source": [ + "# Download the pre-trained GPT-Neo model and transfer it to the GPU\n", + "model = GPTNeoForCausalLM.from_pretrained(\"results/checkpoint-140000/\")" + ] + }, + { + "cell_type": "code", + "execution_count": 102, + "id": "9e57033d", + "metadata": {}, + "outputs": [], + "source": [ + "hard_lenngth = [ (i , len(i[\"prompt\"].split())) for i in hard]" + ] + }, + { + "cell_type": "code", + "execution_count": 104, + "id": "d9bc5908", + "metadata": {}, + "outputs": [], + "source": [ + "sorted_by_second = sorted(hard_lenngth, key=lambda tup: tup[1])" + ] + }, + { + "cell_type": "code", + "execution_count": 113, + "id": "8e850d48", + "metadata": {}, + "outputs": [], + "source": [ + "hard_ones = [i[0] for i in sorted_by_second]" + ] + }, + { + "cell_type": "code", + "execution_count": 114, + "id": "a4981fc1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[{'prompt': 'GOAL ⊢ fourier_subalgebra.separates_points\\n PROOFSTEP ',\n", + " 'completion': ' intros x y hxy\\n'},\n", + " {'prompt': 'GOAL ⊢ 0.impartial\\n PROOFSTEP ',\n", + " 'completion': ' rw impartial_def\\n'},\n", + " {'prompt': 'GOAL ⊢ category_theory.is_equivalence Fintype.skeleton.incl\\n PROOFSTEP ',\n", + " 'completion': ' apply_instance\\n'},\n", + " {'prompt': 'GOAL ⊢ primrec G\\n PROOFSTEP ',\n", + " 'completion': ' have a := (primrec.of_nat (ℕ × code)).comp list_length\\n'},\n", + " {'prompt': 'GOAL ⊢ is_lawful_bifunctor sum\\n PROOFSTEP ',\n", + " 'completion': ' refine { .. }; intros; cases x; refl\\n'},\n", + " {'prompt': 'GOAL ⊢ is_lawful_bifunctor sum\\n PROOFSTEP ',\n", + " 'completion': ' refine { .. }; intros; cases x\\n'},\n", + " {'prompt': 'GOAL ⊢ is_lawful_bifunctor sum\\n PROOFSTEP ',\n", + " 'completion': ' refine { .. }; intros\\n'},\n", + " {'prompt': 'GOAL ⊢ is_lawful_bifunctor sum\\n PROOFSTEP ',\n", + " 'completion': ' refine { .. }\\n'},\n", + " {'prompt': 'GOAL ⊢ comm_monoid pos_num\\n PROOFSTEP ',\n", + " 'completion': ' refine_struct {mul := (*), one := (1 : pos_num), npow := @npow_rec _ ⟨1⟩ ⟨(*)⟩}\\n'},\n", + " {'prompt': 'GOAL ⊢ comm_monoid pos_num\\n PROOFSTEP ',\n", + " 'completion': ' refine_struct {mul := (*), one := (1 : pos_num), npow := @npow_rec _ ⟨1⟩ ⟨(*)⟩}; try { intros, refl }; transfer\\n'},\n", + " {'prompt': 'GOAL ⊢ comm_monoid pos_num\\n PROOFSTEP ',\n", + " 'completion': ' refine_struct {mul := (*), one := (1 : pos_num), npow := @npow_rec _ ⟨1⟩ ⟨(*)⟩}; try { intros, refl }\\n'},\n", + " {'prompt': 'GOAL ⊢ has_sub prime_multiset\\n PROOFSTEP ',\n", + " 'completion': ' dsimp [prime_multiset]\\n'},\n", + " {'prompt': 'GOAL ⊢ distrib_lattice ℝ\\n PROOFSTEP ',\n", + " 'completion': ' apply_instance\\n'},\n", + " {'prompt': 'GOAL ⊢ ring ℝ\\n PROOFSTEP ', 'completion': ' apply_instance\\n'},\n", + " {'prompt': 'GOAL ⊢ 1.arg = 0\\n PROOFSTEP ',\n", + " 'completion': ' simp [arg, zero_le_one]\\n'},\n", + " {'prompt': 'GOAL ⊢ 0.factors = list.nil\\n PROOFSTEP ',\n", + " 'completion': ' exact factors_zero\\n'},\n", + " {'prompt': 'GOAL ⊢ 1.factors = list.nil\\n PROOFSTEP ',\n", + " 'completion': ' exact factors_one\\n'},\n", + " {'prompt': 'GOAL ⊢ has_sub (multiset nat.primes)\\n PROOFSTEP ',\n", + " 'completion': ' apply_instance\\n'},\n", + " {'prompt': 'GOAL ⊢ -1 < ψ\\n PROOFSTEP ',\n", + " 'completion': ' rw [neg_lt, ← inv_gold]\\n'},\n", + " {'prompt': 'GOAL ⊢ φ⁻¹ < 1\\n PROOFSTEP ',\n", + " 'completion': ' exact inv_lt_one one_lt_gold\\n'},\n", + " {'prompt': 'GOAL ⊢ punit.star = punit.star\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL ⊢ orthonormal ℂ (fourier_Lp 2)\\n PROOFSTEP ',\n", + " 'completion': ' rw orthonormal_iff_ite\\n'},\n", + " {'prompt': 'GOAL ⊢ real.tan 0 = 0\\n PROOFSTEP ',\n", + " 'completion': ' simp [tan]\\n'},\n", + " {'prompt': 'GOAL ⊢ (int.neg_of_nat 0).nat_abs = 0\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL ⊢ nat.prime 0 → false\\n PROOFSTEP ',\n", + " 'completion': ' dec_trivial\\n'},\n", + " {'prompt': 'GOAL ⊢ 1.divisors_antidiagonal = {(1, 1)}\\n PROOFSTEP ',\n", + " 'completion': ' ext\\n'},\n", + " {'prompt': 'GOAL ⊢ real.exp = exp ℝ ℝ\\n PROOFSTEP ',\n", + " 'completion': ' refine funext (λ x, _)\\n'},\n", + " {'prompt': 'GOAL h : nat.prime 0\\t⊢ false\\n PROOFSTEP ',\n", + " 'completion': ' revert h\\n'},\n", + " {'prompt': 'GOAL k : ℕ\\t⊢ (σ k).is_multiplicative\\n PROOFSTEP ',\n", + " 'completion': ' rw [← zeta_mul_pow_eq_sigma]\\n'},\n", + " {'prompt': 'GOAL d : ℤ\\t⊢ comm_semiring (ℤ√d)\\n PROOFSTEP ',\n", + " 'completion': ' apply_instance\\n'},\n", + " {'prompt': 'GOAL z : ↥circle\\t⊢ ↑z⁻¹ = (↑z)⁻¹\\n PROOFSTEP ',\n", + " 'completion': ' rw coe_inv_circle_eq_conj\\n'},\n", + " {'prompt': 'GOAL ⊢ nat.of_digits 0 (0.digits 0) = 0\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL ⊢ nat.of_digits 1 (1.digits 0) = 0\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': \"GOAL n : num\\t⊢ ↑(n.succ'.pred') = ↑n\\n PROOFSTEP \",\n", + " 'completion': \" rw [pred'_to_nat, succ'_to_nat, nat.add_one, nat.pred_succ]\\n\"},\n", + " {'prompt': 'GOAL ⊢ rat.mk_pnat 1 ⟨1, _⟩ = 1\\n PROOFSTEP ',\n", + " 'completion': ' rw mk_pnat\\n'},\n", + " {'prompt': 'GOAL ⊢ charted_space (euclidean_half_space 1) ↥(set.Icc 0 1)\\n PROOFSTEP ',\n", + " 'completion': ' apply_instance\\n'},\n", + " {'prompt': 'GOAL y : unit\\t⊢ punit.star = y\\n PROOFSTEP ',\n", + " 'completion': ' cases y\\n'},\n", + " {'prompt': 'GOAL y : punit\\t⊢ punit.star = y\\n PROOFSTEP ',\n", + " 'completion': ' cases y\\n'},\n", + " {'prompt': 'GOAL r : ℝ\\t⊢ ↑0 < ⊤\\n PROOFSTEP ',\n", + " 'completion': ' simp [zero_lt_one]\\n'},\n", + " {'prompt': 'GOAL z : ↥circle\\t⊢ ⇑complex.conj ↑z = (↑z)⁻¹\\n PROOFSTEP ',\n", + " 'completion': ' apply eq_inv_of_mul_right_eq_one\\n'},\n", + " {'prompt': 'GOAL z : ↥circle\\t⊢ ↑(⇑complex.norm_sq ↑z) = 1\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL x : ℝ\\t⊢ (complex.sin ↑x).im = 0\\n PROOFSTEP ',\n", + " 'completion': ' rw [← of_real_sin_of_real_re, of_real_im]\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ (int.neg_of_nat n).nat_abs = n\\n PROOFSTEP ',\n", + " 'completion': ' cases n; refl\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ (int.neg_of_nat n.succ).nat_abs = n.succ\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ (int.neg_of_nat n).nat_abs = n\\n PROOFSTEP ',\n", + " 'completion': ' cases n\\n'},\n", + " {'prompt': 'GOAL i : ℤ\\t⊢ i.nat_abs.lcm i.nat_abs = i.nat_abs\\n PROOFSTEP ',\n", + " 'completion': ' apply nat.lcm_self\\n'},\n", + " {'prompt': 'GOAL i : ℤ\\t⊢ i.lcm i = i.nat_abs\\n PROOFSTEP ',\n", + " 'completion': ' rw int.lcm\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ list.pairwise has_lt.lt (list.range n)\\n PROOFSTEP ',\n", + " 'completion': \" simp only [range_eq_range', pairwise_lt_range']\\n\"},\n", + " {'prompt': 'GOAL b : ℕ\\t⊢ b.digits 0 = list.nil\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL ⊢ 0.factors = list.nil\\t\\t⊢ 1.factors = list.nil\\n PROOFSTEP ',\n", + " 'completion': ' { exact factors_zero }\\n'},\n", + " {'prompt': 'GOAL p : nat.primes\\t⊢ ↑p.factor_multiset = prime_multiset.of_prime p\\n PROOFSTEP ',\n", + " 'completion': ' apply factor_multiset_equiv.symm.injective\\n'},\n", + " {'prompt': 'GOAL p : nat.primes\\t⊢ ↑p.factor_multiset.prod = (prime_multiset.of_prime p).prod\\n PROOFSTEP ',\n", + " 'completion': ' rw[(p : ℕ+).prod_factor_multiset, prime_multiset.prod_of_prime]\\n'},\n", + " {'prompt': 'GOAL k : ℕ\\t⊢ (ζ * nat.arithmetic_function.pow k).is_multiplicative\\n PROOFSTEP ',\n", + " 'completion': ' apply ((is_multiplicative_zeta).mul is_multiplicative_pow)\\n'},\n", + " {'prompt': 'GOAL x y : unit\\t⊢ x = y\\n PROOFSTEP ',\n", + " 'completion': ' cases x\\n'},\n", + " {'prompt': 'GOAL x y : punit\\t⊢ x = y\\n PROOFSTEP ',\n", + " 'completion': ' cases x\\n'},\n", + " {'prompt': 'GOAL h : decidable ↥bool.ff\\t⊢ decidable.to_bool ↥bool.ff = bool.ff\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL h : decidable ↥bool.tt\\t⊢ decidable.to_bool ↥bool.tt = bool.tt\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL b : ℕ\\t⊢ 0 / b = 0\\n PROOFSTEP ', 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL a : ℕ\\t⊢ a / 1 = a\\n PROOFSTEP ', 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ (1 + n).pred = n\\n PROOFSTEP ',\n", + " 'completion': ' rw [add_comm, add_one, pred_succ]\\n'},\n", + " {'prompt': 'GOAL a b : ℝ\\t⊢ decidable (a = b)\\n PROOFSTEP ',\n", + " 'completion': ' apply_instance\\n'},\n", + " {'prompt': 'GOAL f : circle_deg1_lift\\t⊢ f.transnum_aux_seq 0 = ⇑f 0\\n PROOFSTEP ',\n", + " 'completion': ' simp [transnum_aux_seq]\\n'},\n", + " {'prompt': 'GOAL p : ℕ\\t⊢ padic_val_rat p 1 = 0\\n PROOFSTEP ',\n", + " 'completion': ' unfold padic_val_rat; split_ifs; simp *\\n'},\n", + " {'prompt': 'GOAL p : ℕ\\t⊢ padic_val_rat p 1 = 0\\n PROOFSTEP ',\n", + " 'completion': ' unfold padic_val_rat; split_ifs\\n'},\n", + " {'prompt': 'GOAL p : ℕ\\t⊢ padic_val_rat p 1 = 0\\n PROOFSTEP ',\n", + " 'completion': ' unfold padic_val_rat\\n'},\n", + " {'prompt': 'GOAL c : cardinal\\t⊢ c + 1 ≤ c.succ\\n PROOFSTEP ',\n", + " 'completion': ' refine quot.induction_on c (λ α, _) (lt_succ_self c)\\n'},\n", + " {'prompt': 'GOAL a : cardinal\\t⊢ a * 1 = a\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL b : cardinal\\t⊢ 0 * b = 0\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL a b : ℚ\\t⊢ totally_bounded (set.Icc a b)\\n PROOFSTEP ',\n", + " 'completion': ' have := totally_bounded_preimage uniform_embedding_of_rat (totally_bounded_Icc a b)\\n'},\n", + " {'prompt': 'GOAL z : ↥circle\\t⊢ ↑z * ⇑complex.conj ↑z = 1\\n PROOFSTEP ',\n", + " 'completion': ' rw [mul_comm, ← complex.norm_sq_eq_conj_mul_self]\\n'},\n", + " {'prompt': 'GOAL ⊢ real.sin (π / 4) = real.sqrt 2 / 2\\n PROOFSTEP ',\n", + " 'completion': ' transitivity sin (π / 2 ^ 2)\\n'},\n", + " {'prompt': 'GOAL u : list unit\\t⊢ (list.repeat () u.length).length = u.length\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ nat.of_digits 0 (0.digits n) = n\\n PROOFSTEP ',\n", + " 'completion': ' cases n with n\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ nat.of_digits 0 (0.digits n.succ) = n.succ\\n PROOFSTEP ',\n", + " 'completion': ' change of_digits 0 [n+1] = n+1\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ nat.of_digits 1 (1.digits n) = n\\n PROOFSTEP ',\n", + " 'completion': ' induction n with n ih\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ n.coprime 0 ↔ n = 1\\n PROOFSTEP ',\n", + " 'completion': ' simp [coprime]\\n'},\n", + " {'prompt': 'GOAL ⊢ ∀ (a : pos_num), 1 * a = a\\n PROOFSTEP ',\n", + " 'completion': ' try { intros, refl }\\n'},\n", + " {'prompt': 'GOAL ⊢ ∀ (a : pos_num), a * 1 = a\\n PROOFSTEP ',\n", + " 'completion': ' try { intros, refl }\\n'},\n", + " {'prompt': 'GOAL ⊢ ∀ (x : pos_num), npow_rec 0 x = 1\\n PROOFSTEP ',\n", + " 'completion': ' try { intros, refl }\\n'},\n", + " {'prompt': 'GOAL ⊢ ∀ (a : pos_num), 1 * a = a\\n PROOFSTEP ',\n", + " 'completion': ' transfer\\n'},\n", + " {'prompt': 'GOAL p : nat.primes\\t⊢ ⇑(pnat.factor_multiset_equiv.symm) ↑p.factor_multiset = ⇑(pnat.factor_multiset_equiv.symm) (prime_multiset.of_prime p)\\n PROOFSTEP ',\n", + " 'completion': ' change (p : ℕ+).factor_multiset.prod = (prime_multiset.of_prime p).prod\\n'},\n", + " {'prompt': 'GOAL ⊢ ⇑equiv.perm.sign (fin_rotate (0 + 1)) = (-1) ^ 0\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': \"GOAL f : stieltjes_function,\\tc : ℝ\\t⊢ f.outer.caratheodory.measurable_set' (set.Ioi c)\\n PROOFSTEP \",\n", + " 'completion': ' apply outer_measure.of_function_caratheodory (λ t, _)\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ tactic.norm_fin.normalize_fin (n + 1) 0 0\\n PROOFSTEP ',\n", + " 'completion': ' rw normalize_fin\\n'},\n", + " {'prompt': 'GOAL x : ℝ\\t⊢ real.exp x = exp ℝ ℝ x\\n PROOFSTEP ',\n", + " 'completion': \" rw [real.exp, complex.exp_eq_exp_ℂ_ℂ, ← exp_ℝ_ℂ_eq_exp_ℂ_ℂ, exp_eq_tsum, exp_eq_tsum_field, ← re_to_complex, ← re_clm_apply, re_clm.map_tsum (exp_series_summable' (x : ℂ))]\\n\"},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : primcodable α\\t⊢ primrec ulower.down\\n PROOFSTEP ',\n", + " 'completion': ' letI : ∀ a, decidable (a ∈ set.range (encode : α → ℕ)) := decidable_range_encode _\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : primcodable α\\t⊢ primrec ulower.down\\n PROOFSTEP ',\n", + " 'completion': ' letI : ∀ a, decidable (a ∈ set.range (encode : α → ℕ)) := decidable_range_encode _; exact subtype_mk primrec.encode\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ ⇑(fin_congr _) (⇑fin_add_flip ⟨n, _⟩) = 0\\n PROOFSTEP ',\n", + " 'completion': ' rw fin_add_flip_apply_mk_right\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : decidable_eq α\\t⊢ function.surjective list.to_finset\\n PROOFSTEP ',\n", + " 'completion': ' intro s\\n'},\n", + " {'prompt': 'GOAL b n : ℕ\\t⊢ nat.of_digits b (b.digits n) = n\\n PROOFSTEP ',\n", + " 'completion': ' cases b with b\\n'},\n", + " {'prompt': 'GOAL n b : ℕ\\t⊢ nat.of_digits b.succ (b.succ.digits n) = n\\n PROOFSTEP ',\n", + " 'completion': ' cases b with b\\n'},\n", + " {'prompt': 'GOAL n b : ℕ\\t⊢ nat.of_digits b.succ.succ (b.succ.succ.digits n) = n\\n PROOFSTEP ',\n", + " 'completion': ' apply nat.strong_induction_on n _\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\th : nat.prime n\\t⊢ n ≠ 0\\n PROOFSTEP ',\n", + " 'completion': ' rintro rfl\\n'},\n", + " {'prompt': 'GOAL decidable_prime : decidable_pred pos_num.prime,\\tn : pos_num\\t⊢ 2 ≤ ↑(n.bit0)\\n PROOFSTEP ',\n", + " 'completion': ' exact bit0_le_bit0.2 (to_nat_pos _)\\n'},\n", + " {'prompt': 'GOAL decidable_prime : decidable_pred pos_num.prime,\\tn : pos_num\\t⊢ 2 ≤ ↑(n.bit1)\\n PROOFSTEP ',\n", + " 'completion': ' exact nat.bit0_le_bit1_iff.2 (to_nat_pos _)\\n'},\n", + " {'prompt': 'GOAL a : ℝ≥0∞\\t⊢ 1⁻¹ ≤ a ↔ 1 ≤ a\\n PROOFSTEP ',\n", + " 'completion': ' rw inv_one\\n'},\n", + " {'prompt': 'GOAL a : ℝ≥0∞\\t⊢ a ≤ 1⁻¹ ↔ a ≤ 1\\n PROOFSTEP ',\n", + " 'completion': ' rw inv_one\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : semiring α\\t⊢ is_semiring_hom id\\n PROOFSTEP ',\n", + " 'completion': ' refine {..}\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : semiring α\\t⊢ is_semiring_hom id\\n PROOFSTEP ',\n", + " 'completion': ' refine {..}; intros; refl\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : semiring α\\t⊢ is_semiring_hom id\\n PROOFSTEP ',\n", + " 'completion': ' refine {..}; intros\\n'},\n", + " {'prompt': 'GOAL G : Type u_2,\\t_inst_2 : group G\\t⊢ unique ↥{vector.nil}\\n PROOFSTEP ',\n", + " 'completion': ' exact set.unique_singleton vector.nil\\n'},\n", + " {'prompt': 'GOAL m : ℕ\\t⊢ ↑(m * 0) = ↑m * ↑0\\n PROOFSTEP ',\n", + " 'completion': ' simp only [nat.cast_zero, nat.mul_zero, mul_zero]\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ 0.val = 0 % (n + 1)\\n PROOFSTEP ',\n", + " 'completion': ' norm_num\\n'},\n", + " {'prompt': 'GOAL has_reflect : _root_.has_reflect tactic.rcases_patt,\\tl : listΠ tactic.rcases_patt\\t⊢ reflected l\\n PROOFSTEP ',\n", + " 'completion': ' haveI := has_reflect; exact list.reflect l\\n'},\n", + " {'prompt': 'GOAL has_reflect : _root_.has_reflect tactic.rcases_patt,\\tl : listΠ tactic.rcases_patt\\t⊢ reflected l\\n PROOFSTEP ',\n", + " 'completion': ' haveI := has_reflect\\n'},\n", + " {'prompt': 'GOAL has_reflect : _root_.has_reflect tactic.rcases_patt,\\tl : listΣ tactic.rcases_patt\\t⊢ reflected l\\n PROOFSTEP ',\n", + " 'completion': ' haveI := has_reflect; exact list.reflect l\\n'},\n", + " {'prompt': 'GOAL has_reflect : _root_.has_reflect tactic.rcases_patt,\\tl : listΣ tactic.rcases_patt\\t⊢ reflected l\\n PROOFSTEP ',\n", + " 'completion': ' haveI := has_reflect\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : topological_space α\\t⊢ is_open ∅\\n PROOFSTEP ',\n", + " 'completion': ' exact is_open_empty\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : topological_space α\\t⊢ is_closed set.univ\\n PROOFSTEP ',\n", + " 'completion': ' rw [← is_open_compl_iff, compl_univ]\\n'},\n", + " {'prompt': 'GOAL V : Type u_5,\\t_inst_1 : semi_normed_group V\\t⊢ add_comm_group (uniform_space.completion V)\\n PROOFSTEP ',\n", + " 'completion': ' apply_instance\\n'},\n", + " {'prompt': 'GOAL V : Type u_5,\\t_inst_1 : semi_normed_group V\\t⊢ metric_space (uniform_space.completion V)\\n PROOFSTEP ',\n", + " 'completion': ' apply_instance\\n'},\n", + " {'prompt': 'GOAL ⊢ real.sin (π / 2 ^ 2) = real.sqrt 2 / 2\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL h : -1 ≠ 1\\t⊢ real.arcsin (-1) = -(π / 2)\\n PROOFSTEP ',\n", + " 'completion': ' simp [arcsin_of_le_neg_one] { contextual := tt }\\n'},\n", + " {'prompt': 'GOAL b : bool,\\th : decidable ↥b\\t⊢ decidable.to_bool ↥b = b\\n PROOFSTEP ',\n", + " 'completion': ' cases b\\n'},\n", + " {'prompt': 'GOAL b : bool,\\th : decidable ↥b\\t⊢ decidable.to_bool ↥b = b\\n PROOFSTEP ',\n", + " 'completion': ' cases b; refl\\n'},\n", + " {'prompt': 'GOAL α : Type ?,\\t_inst_1 : decidable_eq α\\t⊢ decidable_eq (buffer α)\\n PROOFSTEP ',\n", + " 'completion': ' tactic.mk_dec_eq_instance\\n'},\n", + " {'prompt': \"GOAL z : ℂ\\t⊢ complex.abs z ≤ abs' z.re + abs' z.im\\n PROOFSTEP \",\n", + " 'completion': ' simpa [re_add_im] using abs_add z.re (z.im * I)\\n'},\n", + " {'prompt': 'GOAL x : ℝ\\t⊢ real.tan x = real.sin x / real.cos x\\n PROOFSTEP ',\n", + " 'completion': ' rw [← of_real_inj, of_real_tan, tan_eq_sin_div_cos, of_real_div, of_real_sin, of_real_cos]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : infinite α\\t⊢ function.injective (nat_embedding_aux α)\\n PROOFSTEP ',\n", + " 'completion': ' rintro m n h\\n'},\n", + " {'prompt': 'GOAL x y : ℕ\\t⊢ ↑x = ↑y ↔ x = y\\n PROOFSTEP ',\n", + " 'completion': ' simpa only [← some_eq_coe] using part.some_inj\\n'},\n", + " {'prompt': 'GOAL a b : pos_num\\t⊢ a * b = b * a\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL u v : prime_multiset\\t⊢ (u + v).prod = u.prod * v.prod\\n PROOFSTEP ',\n", + " 'completion': ' change (coe_pnat_monoid_hom (u + v)).prod = _\\n'},\n", + " {'prompt': 'GOAL n m : ℕ+\\t⊢ (n * m).factor_multiset = n.factor_multiset + m.factor_multiset\\n PROOFSTEP ',\n", + " 'completion': ' let u := factor_multiset n\\n'},\n", + " {'prompt': 'GOAL a : ℝ≥0∞,\\th : a * ⊤ = 1\\t⊢ false\\n PROOFSTEP ',\n", + " 'completion': ' simpa [left_ne_zero_of_mul_eq_one h] using h\\n'},\n", + " {'prompt': 'GOAL a b : ℝ≥0\\t⊢ ↑a.to_real = ↑b.to_real ↔ ↑a = ↑b\\n PROOFSTEP ',\n", + " 'completion': ' simp only [coe_eq_coe, nnreal.coe_eq, coe_to_real]\\n'},\n", + " {'prompt': 'GOAL r : ℝ≥0,\\thr : r = 0\\t⊢ ↑r⁻¹ ≤ (↑r)⁻¹\\n PROOFSTEP ',\n", + " 'completion': ' simp only [hr, inv_zero, coe_zero, le_top]\\n'},\n", + " {'prompt': 'GOAL r : ℝ≥0,\\thr : ¬r = 0\\t⊢ ↑r⁻¹ ≤ (↑r)⁻¹\\n PROOFSTEP ',\n", + " 'completion': ' simp only [coe_inv hr, le_refl]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts : wseq α\\t⊢ s.think ~ s\\n PROOFSTEP ',\n", + " 'completion': ' apply think_equiv\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : measurable_space α\\t⊢ 0.total_variation = 0\\n PROOFSTEP ',\n", + " 'completion': ' simp [total_variation, to_jordan_decomposition_zero]\\n'},\n", + " {'prompt': 'GOAL pqr : multiset ℕ+\\t⊢ ADE_inequality.admissible pqr → 1 < ADE_inequality.sum_inv pqr\\n PROOFSTEP ',\n", + " 'completion': ' rw [admissible]\\n'},\n", + " {'prompt': 'GOAL d : ℤ,\\tz : ℤ√d\\t⊢ (bit1 z).im = bit0 z.im\\n PROOFSTEP ',\n", + " 'completion': ' simp [bit1]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tB : filter_basis α\\t⊢ B.sets ⊆ B.filter.sets\\n PROOFSTEP ',\n", + " 'completion': ' apply mem_filter_of_mem\\n'},\n", + " {'prompt': 'GOAL a b : cardinal\\t⊢ a < b.succ ↔ a ≤ b\\n PROOFSTEP ',\n", + " 'completion': ' rw [← not_le, succ_le, not_lt]\\n'},\n", + " {'prompt': 'GOAL c : cardinal\\t⊢ bit0 c < ω ↔ c < ω\\n PROOFSTEP ',\n", + " 'completion': ' simp [bit0, add_lt_omega_iff]\\n'},\n", + " {'prompt': 'GOAL x : pgame,\\tS : x.short,\\ti : x.left_moves\\t⊢ (x.move_left i).short\\n PROOFSTEP ',\n", + " 'completion': ' casesI S with _ _ _ _ L _ _ _\\n'},\n", + " {'prompt': 'GOAL s : set ordinal,\\ths : s.nonempty\\t⊢ has_Inf.Inf s ∈ s\\n PROOFSTEP ',\n", + " 'completion': ' rw Inf_eq_omin hs\\n'},\n", + " {'prompt': 'GOAL m n : ℕ\\t⊢ ↑(m * n) = ↑m * ↑n\\n PROOFSTEP ',\n", + " 'completion': ' induction n with n IH; [simp only [nat.cast_zero, nat.mul_zero, mul_zero], rw [nat.mul_succ, nat.cast_add, IH, nat.cast_succ, mul_add_one]]\\n'},\n", + " {'prompt': 'GOAL m n : ℕ\\t⊢ ↑(m * n) = ↑m * ↑n\\n PROOFSTEP ',\n", + " 'completion': ' induction n with n IH\\n'},\n", + " {'prompt': 'GOAL x : pgame,\\to : x.numeric,\\tj : x.right_moves\\t⊢ (x.move_right j).numeric\\n PROOFSTEP ',\n", + " 'completion': ' cases x with xl xr xL xR\\n'},\n", + " {'prompt': 'GOAL x y : ℍ\\t⊢ ⇑is_R_or_C.conj (has_inner.inner y x) = has_inner.inner x y\\n PROOFSTEP ',\n", + " 'completion': ' simp [inner_def, mul_comm]\\n'},\n", + " {'prompt': 'GOAL X : Type u\\t⊢ category_theory.discrete.functor (𝟙 X) = 𝟙 (category_theory.Cat.of (category_theory.discrete X))\\n PROOFSTEP ',\n", + " 'completion': ' apply functor.ext\\n'},\n", + " {'prompt': 'GOAL b : bool,\\th : decidable ↥b\\t⊢ decidable.to_bool ↥b = decidable.to_bool ↥b\\n PROOFSTEP ',\n", + " 'completion': ' congr\\n'},\n", + " {'prompt': 'GOAL x : ℂ\\t⊢ complex.tan (x * complex.I) = complex.tanh x * complex.I\\n PROOFSTEP ',\n", + " 'completion': ' rw [tan, sin_mul_I, cos_mul_I, mul_div_right_comm, tanh_eq_sinh_div_cosh]\\n'},\n", + " {'prompt': 'GOAL x : ℂ\\t⊢ x + 2 * x = 3 * x\\n PROOFSTEP ',\n", + " 'completion': ' ring\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ ⇑(fin_rotate (n + 1)) ⟨n, _⟩ = ⟨0, _⟩\\n PROOFSTEP ',\n", + " 'completion': ' dsimp [fin_rotate]\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\ti : fin (n + 2)\\t⊢ ↑(i.cast_pred) ≤ ↑i\\n PROOFSTEP ',\n", + " 'completion': ' rcases i.le_last.eq_or_lt with rfl|h\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ ↑((fin.last (n + 1)).cast_pred) ≤ ↑(fin.last (n + 1))\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\th : n = 0\\t⊢ 1 = bit1 n\\n PROOFSTEP ',\n", + " 'completion': ' subst h\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ nat.of_digits 0 [n + 1] = n + 1\\n PROOFSTEP ',\n", + " 'completion': ' dsimp [of_digits]\\n'},\n", + " {'prompt': 'GOAL b n : ℕ\\t⊢ b.digits n = list.nil ↔ n = 0\\n PROOFSTEP ',\n", + " 'completion': ' split\\n'},\n", + " {'prompt': 'GOAL b n : ℕ\\t⊢ b.digits n = list.nil → n = 0\\n PROOFSTEP ',\n", + " 'completion': ' intro h\\n'},\n", + " {'prompt': 'GOAL b n : ℕ\\t⊢ n = 0 → b.digits n = list.nil\\n PROOFSTEP ',\n", + " 'completion': ' rintro rfl\\n'},\n", + " {'prompt': 'GOAL h : 0.factors = list.nil\\t⊢ 0 = 0 ∨ 0 = 1\\n PROOFSTEP ',\n", + " 'completion': ' exact or.inl rfl\\n'},\n", + " {'prompt': 'GOAL h : 1.factors = list.nil\\t⊢ 1 = 0 ∨ 1 = 1\\n PROOFSTEP ',\n", + " 'completion': ' exact or.inr rfl\\n'},\n", + " {'prompt': 'GOAL m n : ℕ\\t⊢ m.psub n = option.none ↔ m < n\\n PROOFSTEP ',\n", + " 'completion': ' cases s : psub m n; simp [eq_comm]\\n'},\n", + " {'prompt': 'GOAL m n : ℕ\\t⊢ m.psub n = option.none ↔ m < n\\n PROOFSTEP ',\n", + " 'completion': ' cases s : psub m n\\n'},\n", + " {'prompt': 'GOAL ⊢ ∀ (a b : pos_num), a * b = b * a\\n PROOFSTEP ',\n", + " 'completion': ' try { intros, refl }\\n'},\n", + " {'prompt': 'GOAL ⊢ ∀ (a b : pos_num), a * b = b * a\\n PROOFSTEP ',\n", + " 'completion': ' transfer\\n'},\n", + " {'prompt': 'GOAL ⊢ ∀ (a b : pos_num), a * b = b * a\\n PROOFSTEP ',\n", + " 'completion': ' intros\\n'},\n", + " {'prompt': 'GOAL decidable_prime : decidable_pred pos_num.prime,\\tn : pos_num\\t⊢ n.bit0.prime ↔ n = 1\\n PROOFSTEP ',\n", + " 'completion': ' refine nat.prime_def_min_fac.trans ((and_iff_right _).trans $ eq_comm.trans _)\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tt : ordnode α,\\th : t.dual.sized\\t⊢ t.sized\\n PROOFSTEP ',\n", + " 'completion': ' rw ← dual_dual t\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tt : ordnode α,\\th : t.dual.sized\\t⊢ t.sized\\n PROOFSTEP ',\n", + " 'completion': ' rw ← dual_dual t; exact h.dual\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tt : ordnode α,\\th : t.dual.sized\\t⊢ t.dual.dual.sized\\n PROOFSTEP ',\n", + " 'completion': ' exact h.dual\\n'},\n", + " {'prompt': 'GOAL u v : prime_multiset\\t⊢ (⇑prime_multiset.coe_pnat_monoid_hom (u + v)).prod = u.prod * v.prod\\n PROOFSTEP ',\n", + " 'completion': ' rw coe_pnat_monoid_hom.map_add\\n'},\n", + " {'prompt': 'GOAL a b : ereal,\\th : -a = -b\\t⊢ a = b\\n PROOFSTEP ',\n", + " 'completion': ' rw [←ereal.neg_neg a, h, ereal.neg_neg b]\\n'},\n", + " {'prompt': 'GOAL x : ℝ*\\t⊢ x.infinite_neg ↔ ∀ (r : ℝ), x < ↑r\\n PROOFSTEP ',\n", + " 'completion': ' rw iff_eq_eq\\n'},\n", + " {'prompt': 'GOAL x : ℝ*\\t⊢ x.infinite_neg ↔ ∀ (r : ℝ), x < ↑r\\n PROOFSTEP ',\n", + " 'completion': ' rw iff_eq_eq; refl\\n'},\n", + " {'prompt': 'GOAL x : ℝ*\\t⊢ x.infinite_neg = ∀ (r : ℝ), x < ↑r\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts : wseq α\\t⊢ (wseq.ret s).join ~ s\\n PROOFSTEP ',\n", + " 'completion': ' simp [ret]; apply think_equiv\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts : wseq α\\t⊢ (wseq.ret s).join ~ s\\n PROOFSTEP ',\n", + " 'completion': ' simp [ret]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : semiring α\\t⊢ id 0 = 0\\n PROOFSTEP ',\n", + " 'completion': ' intros\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : semiring α\\t⊢ id 1 = 1\\n PROOFSTEP ',\n", + " 'completion': ' intros\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : semiring α\\t⊢ id 0 = 0\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : semiring α\\t⊢ id 1 = 1\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL G : Type u_2,\\t_inst_2 : group G\\t⊢ unique ↥(equiv.perm.vectors_prod_eq_one G 0)\\n PROOFSTEP ',\n", + " 'completion': ' rw zero_eq\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\th : 2 ≤ n\\t⊢ (fin_rotate n).cycle_type = {n}\\n PROOFSTEP ',\n", + " 'completion': ' obtain ⟨m, rfl⟩ := exists_add_of_le h\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ ⇑equiv.perm.sign (fin_rotate (n + 1)) = (-1) ^ n\\n PROOFSTEP ',\n", + " 'completion': ' induction n with n ih\\n'},\n", + " {'prompt': 'GOAL x y z : ℤ,\\th : pythagorean_triple x y z\\t⊢ h.is_classified\\n PROOFSTEP ',\n", + " 'completion': ' by_cases h0 : int.gcd x y = 0\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tB : filter_basis α\\t⊢ filter.generate B.sets = B.filter\\n PROOFSTEP ',\n", + " 'completion': ' apply le_antisymm\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tB : filter_basis α\\t⊢ filter.generate B.sets ≤ B.filter\\n PROOFSTEP ',\n", + " 'completion': ' intros U U_in\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tB : filter_basis α\\t⊢ B.filter ≤ filter.generate B.sets\\n PROOFSTEP ',\n", + " 'completion': ' rw sets_iff_generate\\n'},\n", + " {'prompt': 'GOAL s : set ordinal,\\ths : s.nonempty\\t⊢ ordinal.omin s hs ∈ s\\n PROOFSTEP ',\n", + " 'completion': ' exact omin_mem _ hs\\n'},\n", + " {'prompt': 'GOAL o : onote,\\tb h : ordinal\\t⊢ 0.repr < ω ^ h\\n PROOFSTEP ',\n", + " 'completion': ' exact power_pos _ omega_pos\\n'},\n", + " {'prompt': 'GOAL ⊢ continuous_at (λ (p : ereal × ereal), p.fst + p.snd) (⊤, ⊤)\\n PROOFSTEP ',\n", + " 'completion': ' simp only [continuous_at, tendsto_nhds_top_iff_real, top_add, nhds_prod_eq]\\n'},\n", + " {'prompt': 'GOAL x y : ↥circle,\\thxy : x ≠ y\\t⊢ fourier 1 ∈ ↑fourier_subalgebra\\n PROOFSTEP ',\n", + " 'completion': ' exact subset_adjoin ⟨1, rfl⟩\\n'},\n", + " {'prompt': 'GOAL z : ℂ\\t⊢ ↑(z.im) = (z - ⇑complex.conj z) / (2 * complex.I)\\n PROOFSTEP ',\n", + " 'completion': \" simp only [sub_conj, of_real_mul, of_real_one, of_real_bit0, mul_right_comm, mul_div_cancel_left _ (mul_ne_zero two_ne_zero' I_ne_zero : 2 * I ≠ 0)]\\n\"},\n", + " {'prompt': 'GOAL ⊢ 1 / 10 ^ 10 + 2244083 / 825552 < 13591409143 / 5000000000\\n PROOFSTEP ',\n", + " 'completion': ' norm_num\\n'},\n", + " {'prompt': 'GOAL a b : ℤ\\t⊢ (a + b) % b = a % b\\n PROOFSTEP ',\n", + " 'completion': ' have := add_mul_mod_self_left a b 1; rwa mul_one at this\\n'},\n", + " {'prompt': 'GOAL a b : ℤ\\t⊢ (a + b) % b = a % b\\n PROOFSTEP ',\n", + " 'completion': ' have := add_mul_mod_self_left a b 1\\n'},\n", + " {'prompt': 'GOAL m n : ℕ\\t⊢ list.range m <+ list.range n ↔ m ≤ n\\n PROOFSTEP ',\n", + " 'completion': \" simp only [range_eq_range', range'_sublist_right]\\n\"},\n", + " {'prompt': 'GOAL α : Type u,\\tl : list α\\t⊢ list.map prod.fst l.revzip = l\\n PROOFSTEP ',\n", + " 'completion': ' rw [← unzip_left, unzip_revzip]\\n'},\n", + " {'prompt': 'GOAL a : ℕ,\\tᾰ : a / 1 = a\\t⊢ 1 = 1\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL b n : ℕ,\\th : b.digits n = list.nil\\t⊢ n = 0\\n PROOFSTEP ',\n", + " 'completion': ' have : of_digits b (digits b n) = of_digits b []\\n'},\n", + " {'prompt': 'GOAL m n : ℕ,\\ts : m.psub n = option.none\\t⊢ m < n\\n PROOFSTEP ',\n", + " 'completion': ' refine lt_of_not_ge (λ h, _)\\n'},\n", + " {'prompt': 'GOAL m n : ℕ,\\ts : m.psub n = option.none\\t⊢ m < n\\n PROOFSTEP ',\n", + " 'completion': ' show m < n\\n'},\n", + " {'prompt': 'GOAL u v : prime_multiset\\t⊢ (⇑prime_multiset.coe_pnat_monoid_hom u + ⇑prime_multiset.coe_pnat_monoid_hom v).prod = u.prod * v.prod\\n PROOFSTEP ',\n", + " 'completion': ' exact multiset.prod_add _ _\\n'},\n", + " {'prompt': 'GOAL a : ℝ≥0∞,\\th : a * ⊤ = 1\\t⊢ a = ⊤⁻¹\\n PROOFSTEP ',\n", + " 'completion': ' have : false\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts : seq α\\t⊢ s.head = prod.fst <$> s.destruct\\n PROOFSTEP ',\n", + " 'completion': ' unfold destruct head; cases nth s 0; refl\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts : seq α\\t⊢ s.head = prod.fst <$> s.destruct\\n PROOFSTEP ',\n", + " 'completion': ' unfold destruct head; cases nth s 0\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts : seq α\\t⊢ s.head = prod.fst <$> s.destruct\\n PROOFSTEP ',\n", + " 'completion': ' unfold destruct head\\n'},\n", + " {'prompt': 'GOAL R : Type u_1,\\t_inst_1 : comm_ring R\\t⊢ ↑ζ * ↑μ = 1\\n PROOFSTEP ',\n", + " 'completion': ' rw [mul_comm, coe_moebius_mul_coe_zeta]\\n'},\n", + " {'prompt': 'GOAL a : ℕ × ℕ\\t⊢ a ∈ 1.divisors_antidiagonal ↔ a ∈ {(1, 1)}\\n PROOFSTEP ',\n", + " 'completion': ' simp [nat.mul_eq_one_iff, prod.ext_iff]\\n'},\n", + " {'prompt': 'GOAL R : Type u_5,\\t_inst_5 : integral_domain R\\t⊢ primitive_roots 1 R = {1}\\n PROOFSTEP ',\n", + " 'completion': ' apply finset.eq_singleton_iff_unique_mem.2\\n'},\n", + " {'prompt': 'GOAL R : Type u_5,\\t_inst_5 : integral_domain R\\t⊢ 1 ∈ primitive_roots 1 R\\n PROOFSTEP ',\n", + " 'completion': ' simp only [is_primitive_root.one_right_iff, mem_primitive_roots zero_lt_one]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : comm_cancel_monoid_with_zero α\\t⊢ wf_dvd_monoid α → well_founded has_lt.lt\\n PROOFSTEP ',\n", + " 'completion': ' apply wf_dvd_monoid.well_founded_associates\\n'},\n", + " {'prompt': 'GOAL a : cardinal,\\tb : cardinal\\t⊢ a.lift = b.lift ↔ a.lift.lift = b.lift.lift\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL c : cardinal,\\th : ω ≤ c\\t⊢ c + c ≤ c\\n PROOFSTEP ',\n", + " 'completion': \" simpa only [nat.cast_bit0, nat.cast_one, mul_eq_self h, two_mul] using mul_le_mul_right' ((nat_lt_omega 2).le.trans h) c\\n\"},\n", + " {'prompt': 'GOAL i j : ℤ,\\th : ¬i = j\\t⊢ -i + j ≠ 0\\n PROOFSTEP ',\n", + " 'completion': ' rw add_comm\\n'},\n", + " {'prompt': 'GOAL i j : ℤ,\\th : ¬i = j\\t⊢ j + -i ≠ 0\\n PROOFSTEP ',\n", + " 'completion': ' exact sub_ne_zero.mpr (ne.symm h)\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\tk : fin (n + 1)\\t⊢ k + 0 = k\\n PROOFSTEP ',\n", + " 'completion': ' simp [eq_iff_veq, add_def, mod_eq_of_lt (is_lt k)]\\n'},\n", + " {'prompt': 'GOAL n m : ℕ,\\th : n.succ ≤ m.succ\\t⊢ ⇑(fin.cast_le h) 0 = 0\\n PROOFSTEP ',\n", + " 'completion': ' simp [eq_iff_veq]\\n'},\n", + " {'prompt': 'GOAL a b : ℕ,\\tᾰ : a / b.succ.succ = a\\t⊢ a = 0\\n PROOFSTEP ',\n", + " 'completion': ' have : a / (b + 2) ≤ a / 2 := div_le_div_left (by simp) dec_trivial\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ ↑n + 1 + 0 * 0 = n + 1\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ ¬n % 2 = 1 ↔ n % 2 = 0\\n PROOFSTEP ',\n", + " 'completion': ' cases mod_two_eq_zero_or_one n with h h; simp [h]\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ ¬n % 2 = 1 ↔ n % 2 = 0\\n PROOFSTEP ',\n", + " 'completion': ' cases mod_two_eq_zero_or_one n with h h\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ n.factors = list.nil → n = 0 ∨ n = 1\\n PROOFSTEP ',\n", + " 'completion': ' intro h\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ n = 0 ∨ n = 1 → n.factors = list.nil\\n PROOFSTEP ',\n", + " 'completion': ' intro h\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ n.factors = list.nil ↔ n = 0 ∨ n = 1\\n PROOFSTEP ',\n", + " 'completion': ' split\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ n.factors = list.nil ↔ n = 0 ∨ n = 1\\n PROOFSTEP ',\n", + " 'completion': ' split; intro h\\n'},\n", + " {'prompt': 'GOAL decidable_prime : decidable_pred pos_num.prime,\\tn : pos_num\\t⊢ ↑(n.bit0) = ↑(n.bit0).min_fac ↔ n = 1\\n PROOFSTEP ',\n", + " 'completion': ' rw [← min_fac_to_nat, to_nat_inj]\\n'},\n", + " {'prompt': 'GOAL decidable_prime : decidable_pred pos_num.prime,\\tn : pos_num\\t⊢ n.bit0 = n.bit0.min_fac ↔ n = 1\\n PROOFSTEP ',\n", + " 'completion': ' exact ⟨bit0.inj, congr_arg _⟩\\n'},\n", + " {'prompt': 'GOAL decidable_prime : decidable_pred pos_num.prime,\\tn : pos_num\\t⊢ n.bit1.prime ↔ n.bit1.min_fac_aux ↑n 1 = n.bit1\\n PROOFSTEP ',\n", + " 'completion': ' refine nat.prime_def_min_fac.trans ((and_iff_right _).trans _)\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\th : 1 < 0\\t⊢ false\\n PROOFSTEP ',\n", + " 'completion': ' exact nat.not_lt_zero 1 h\\n'},\n", + " {'prompt': 'GOAL ⊢ {num := 1, denom := 1, pos := _, cop := _} = 1\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL a b : ℝ≥0∞,\\th : a * b = 1\\t⊢ a = b⁻¹\\n PROOFSTEP ',\n", + " 'completion': ' rcases eq_or_ne b ∞ with rfl|hb\\n'},\n", + " {'prompt': 'GOAL a : ℝ≥0∞,\\tb : ℝ≥0,\\th : a ≤ ↑b\\t⊢ ↑(a.to_nnreal) ≤ ↑b\\n PROOFSTEP ',\n", + " 'completion': ' have : ↑a.to_nnreal = a := ennreal.coe_to_nnreal (lt_of_le_of_lt h coe_lt_top).ne\\n'},\n", + " {'prompt': 'GOAL x y : ℝ≥0\\t⊢ ⇑nnreal.sqrt (x * y) = ⇑nnreal.sqrt x * ⇑nnreal.sqrt y\\n PROOFSTEP ',\n", + " 'completion': ' rw [sqrt_eq_iff_sq_eq, mul_mul_mul_comm, mul_self_sqrt, mul_self_sqrt]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : encodable α,\\ts : set α\\t⊢ encodable ↥s\\n PROOFSTEP ',\n", + " 'completion': ' apply_instance\\n'},\n", + " {'prompt': 'GOAL l : list char,\\ts : string\\t⊢ l.as_string = s ↔ l = s.to_list\\n PROOFSTEP ',\n", + " 'completion': ' rw [←as_string_inv_to_list s, list.as_string_inj, as_string_inv_to_list s]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tf g : equiv.perm α\\t⊢ f.disjoint g → g.disjoint f\\n PROOFSTEP ',\n", + " 'completion': ' simp only [disjoint, or.comm, imp_self]\\n'},\n", + " {'prompt': 'GOAL x y : clifford_algebra clifford_algebra_complex.Q\\t⊢ ⇑clifford_algebra_complex.equiv (x * y) = ⇑clifford_algebra_complex.equiv (y * x)\\n PROOFSTEP ',\n", + " 'completion': ' rw [alg_equiv.map_mul, mul_comm, alg_equiv.map_mul]\\n'},\n", + " {'prompt': 'GOAL a : ℝ,\\tn : ℕ\\t⊢ ↑n = ⇑measure_theory.measure_space.volume (set.Ioo a (a + ↑n))\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': \"GOAL pqr : multiset ℕ+,\\tH : ADE_inequality.E' 3 = pqr\\t⊢ 1 < ADE_inequality.sum_inv pqr\\n PROOFSTEP \",\n", + " 'completion': \" rw [← H, E', sum_inv_pqr]\\n\"},\n", + " {'prompt': \"GOAL pqr : multiset ℕ+,\\tH : ADE_inequality.E' 4 = pqr\\t⊢ 1 < ADE_inequality.sum_inv pqr\\n PROOFSTEP \",\n", + " 'completion': \" rw [← H, E', sum_inv_pqr]\\n\"},\n", + " {'prompt': \"GOAL pqr : multiset ℕ+,\\tH : ADE_inequality.E' 5 = pqr\\t⊢ 1 < ADE_inequality.sum_inv pqr\\n PROOFSTEP \",\n", + " 'completion': \" rw [← H, E', sum_inv_pqr]\\n\"},\n", + " {'prompt': 'GOAL d : ℤ,\\tm n : ℕ\\t⊢ ↑m = ↑n → m = n\\n PROOFSTEP ',\n", + " 'completion': ' simp [ext]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : complete_lattice α,\\th : complete_lattice.is_Sup_finite_compact α\\t⊢ complete_lattice.is_sup_closed_compact α\\n PROOFSTEP ',\n", + " 'completion': ' intros s hne hsc\\n'},\n", + " {'prompt': 'GOAL α : Type ?,\\tA B : semilattice_sup α,\\tx y : α\\t⊢ Prop\\n PROOFSTEP ',\n", + " 'completion': ' haveI := A\\n'},\n", + " {'prompt': 'GOAL α : Type ?,\\tA B : semilattice_sup α,\\tx y : α\\t⊢ Prop\\n PROOFSTEP ',\n", + " 'completion': ' haveI := A; exact x ≤ y\\n'},\n", + " {'prompt': 'GOAL a b : ℤ\\t⊢ ↑(a.gcd b) * ↑(a.lcm b) = ⇑normalize (a * b)\\n PROOFSTEP ',\n", + " 'completion': ' rw [← int.coe_nat_mul, gcd_mul_lcm, coe_nat_abs_eq_normalize]\\n'},\n", + " {'prompt': 'GOAL i : ℤ\\t⊢ 0 < i → omega.symmod i (i + 1) = -1\\n PROOFSTEP ',\n", + " 'completion': ' intro h1\\n'},\n", + " {'prompt': 'GOAL has_reflect : _root_.has_reflect tactic.rcases_patt,\\tl : listΠ tactic.rcases_patt,\\t_inst : _root_.has_reflect tactic.rcases_patt\\t⊢ reflected l\\n PROOFSTEP ',\n", + " 'completion': ' exact list.reflect l\\n'},\n", + " {'prompt': 'GOAL has_reflect : _root_.has_reflect tactic.rcases_patt,\\tl : listΣ tactic.rcases_patt,\\t_inst : _root_.has_reflect tactic.rcases_patt\\t⊢ reflected l\\n PROOFSTEP ',\n", + " 'completion': ' exact list.reflect l\\n'},\n", + " {'prompt': 'GOAL X : Type u_1,\\t_inst_1 : topological_space X,\\t_inst_2 : (filter.cocompact X).ne_bot\\t⊢ dense_range coe\\n PROOFSTEP ',\n", + " 'completion': ' rw [dense_range, ← compl_infty]\\n'},\n", + " {'prompt': 'GOAL X : Type u_1,\\t_inst_1 : topological_space X,\\t_inst_2 : (filter.cocompact X).ne_bot\\t⊢ dense {∞}ᶜ\\n PROOFSTEP ',\n", + " 'completion': ' exact dense_compl_singleton _\\n'},\n", + " {'prompt': 'GOAL ⊢ real.cos (π / 16) = real.sqrt (2 + real.sqrt (2 + real.sqrt 2)) / 2\\n PROOFSTEP ',\n", + " 'completion': ' transitivity cos (π / 2 ^ 4)\\n'},\n", + " {'prompt': 'GOAL h : -1 ≠ 1\\t⊢ 1 / real.sqrt (1 - (-1) ^ 2) = 0\\n PROOFSTEP ',\n", + " 'completion': ' simp [arcsin_of_le_neg_one] { contextual := tt }\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tb : buffer α,\\ta : α\\t⊢ b.size < (b.push_back a).size\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL x : ℂ\\t⊢ complex.sinh (2 * x) = 2 * complex.sinh x * complex.cosh x\\n PROOFSTEP ',\n", + " 'completion': ' rw [two_mul, sinh_add]\\n'},\n", + " {'prompt': 'GOAL K : Type u_1,\\t_inst_1 : is_R_or_C K,\\tz : K\\t⊢ ∥⇑is_R_or_C.conj z∥ = ∥z∥\\n PROOFSTEP ',\n", + " 'completion': ' simp only [←sqrt_norm_sq_eq_norm, norm_sq_conj]\\n'},\n", + " {'prompt': 'GOAL K : Type u_1,\\t_inst_1 : is_R_or_C K,\\tq : ℚ\\t⊢ ⇑is_R_or_C.re ↑q = ↑q\\n PROOFSTEP ',\n", + " 'completion': ' rw [← of_real_rat_cast, of_real_re]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : decidable_eq α,\\ts : finset α\\t⊢ s.val.to_finset = s\\n PROOFSTEP ',\n", + " 'completion': ' ext\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tl : list α\\t⊢ list.nil ~r l ↔ list.nil = l\\n PROOFSTEP ',\n", + " 'completion': ' rw [is_rotated_comm, is_rotated_nil_iff, eq_comm]\\n'},\n", + " {'prompt': 'GOAL a b c : ℕ,\\th : a + b ≤ c\\t⊢ a ≤ c\\n PROOFSTEP ',\n", + " 'completion': ' refine le_trans _ h\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\th : n.factors = list.nil\\t⊢ n = 0 ∨ n = 1\\n PROOFSTEP ',\n", + " 'completion': ' rcases n with (_ | _ | n)\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\th : n.succ.succ.factors = list.nil\\t⊢ n.succ.succ = 0 ∨ n.succ.succ = 1\\n PROOFSTEP ',\n", + " 'completion': ' rw factors at h\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\th : n = 0 ∨ n = 1\\t⊢ n.factors = list.nil\\n PROOFSTEP ',\n", + " 'completion': ' rcases h with (rfl | rfl)\\n'},\n", + " {'prompt': 'GOAL m n val : ℕ,\\ts : m.psub n = option.some val\\t⊢ n ≤ m\\n PROOFSTEP ',\n", + " 'completion': ' rw ← psub_eq_some.1 s\\n'},\n", + " {'prompt': 'GOAL m n val : ℕ,\\ts : m.psub n = option.some val\\t⊢ n ≤ m\\n PROOFSTEP ',\n", + " 'completion': ' show n ≤ m\\n'},\n", + " {'prompt': 'GOAL size_eq_nat_size : ∀ (n : pos_num), ↑(n.size) = n.nat_size,\\tn : pos_num\\t⊢ ↑(n.bit0.size) = n.bit0.nat_size\\n PROOFSTEP ',\n", + " 'completion': ' rw [size, succ_to_nat, nat_size, size_eq_nat_size]\\n'},\n", + " {'prompt': 'GOAL size_eq_nat_size : ∀ (n : pos_num), ↑(n.size) = n.nat_size,\\tn : pos_num\\t⊢ ↑(n.bit1.size) = n.bit1.nat_size\\n PROOFSTEP ',\n", + " 'completion': ' rw [size, succ_to_nat, nat_size, size_eq_nat_size]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : semiring α,\\tn : num\\t⊢ ↑(n.bit1) = bit1 ↑n\\n PROOFSTEP ',\n", + " 'completion': ' rw [← bit1_of_bit1, _root_.bit1, bit0_of_bit0, cast_add, cast_bit0]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : semiring α,\\tn : num\\t⊢ ↑(n.bit1) = bit1 ↑n\\n PROOFSTEP ',\n", + " 'completion': ' rw [← bit1_of_bit1, _root_.bit1, bit0_of_bit0, cast_add, cast_bit0]; refl\\n'},\n", + " {'prompt': 'GOAL to_of_int : ∀ (n : ℤ), ↑↑n = n,\\tn : ℕ\\t⊢ ↑↑↑n = ↑n\\n PROOFSTEP ',\n", + " 'completion': ' rw [int.cast_coe_nat, ← num.of_nat_to_znum, num.cast_to_znum, ← num.cast_to_nat, int.nat_cast_eq_coe_nat, num.to_of_nat]\\n'},\n", + " {'prompt': 'GOAL k m n : ℕ+\\t⊢ k.coprime m → k.coprime n → k.coprime (m * n)\\n PROOFSTEP ',\n", + " 'completion': ' rw ← coprime_coe\\n'},\n", + " {'prompt': 'GOAL k m n : ℕ+\\t⊢ ↑k.coprime ↑m → k.coprime n → k.coprime (m * n)\\n PROOFSTEP ',\n", + " 'completion': ' rw ← coprime_coe\\n'},\n", + " {'prompt': 'GOAL k m n : ℕ+\\t⊢ ↑k.coprime ↑m → ↑k.coprime ↑n → k.coprime (m * n)\\n PROOFSTEP ',\n", + " 'completion': ' rw ← coprime_coe\\n'},\n", + " {'prompt': 'GOAL k m n : ℕ+\\t⊢ ↑k.coprime ↑m → ↑k.coprime ↑n → ↑k.coprime ↑(m * n)\\n PROOFSTEP ',\n", + " 'completion': ' rw ← coprime_coe\\n'},\n", + " {'prompt': 'GOAL k m n : ℕ+\\t⊢ ↑k.coprime ↑m → ↑k.coprime ↑n → ↑k.coprime ↑(m * n)\\n PROOFSTEP ',\n", + " 'completion': ' rw mul_coe\\n'},\n", + " {'prompt': 'GOAL k m n : ℕ+\\t⊢ ↑k.coprime ↑m → ↑k.coprime ↑n → ↑k.coprime (↑m * ↑n)\\n PROOFSTEP ',\n", + " 'completion': ' apply nat.coprime.mul_right\\n'},\n", + " {'prompt': 'GOAL k m n : ℕ+\\t⊢ k.coprime m → k.coprime n → k.coprime (m * n)\\n PROOFSTEP ',\n", + " 'completion': ' repeat {rw ← coprime_coe}\\n'},\n", + " {'prompt': 'GOAL b c : ℕ\\t⊢ ↑(b * c / b) = ↑(b * c) / ↑b\\n PROOFSTEP ',\n", + " 'completion': ' simp only [mul_comm b, nat.mul_div_assoc c (dvd_refl b), nat.cast_mul, mul_div_assoc, coe_nat_div_self]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts t : wseq α\\t⊢ (s.append t).destruct = s.destruct.bind (wseq.destruct_append.aux t)\\n PROOFSTEP ',\n", + " 'completion': ' apply eq_of_bisim (λ c1 c2, ∃ s t, c1 = destruct (append s t) ∧ c2 = (destruct s).bind (destruct_append.aux t)) _ ⟨s, t, rfl, rfl⟩\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\ts : set α\\t⊢ (⋃ (h : s.nonempty), s) = s\\n PROOFSTEP ',\n", + " 'completion': ' rw [Union_nonempty_index, bUnion_self]\\n'},\n", + " {'prompt': 'GOAL G : Type u_1,\\t_inst_1 : group G,\\tH : subgroup G\\t⊢ ⁅⊥,H⁆ ≤ ⊥\\n PROOFSTEP ',\n", + " 'completion': ' exact general_commutator_le_left ⊥ H\\n'},\n", + " {'prompt': 'GOAL G : Type u_1,\\t_inst_1 : group G,\\tH : subgroup G\\t⊢ ⁅⊥,H⁆ = ⊥\\n PROOFSTEP ',\n", + " 'completion': ' rw eq_bot_iff\\n'},\n", + " {'prompt': 'GOAL H : subgroup ↥(alternating_group (fin 5))\\t⊢ H.normal → H = ⊥ ∨ H = ⊤\\n PROOFSTEP ',\n", + " 'completion': ' introI Hn\\n'},\n", + " {'prompt': 'GOAL p : ℕ,\\th : ¬(1 ≠ 0 ∧ p ≠ 1)\\t⊢ 0 = 0\\n PROOFSTEP ',\n", + " 'completion': ' simp *\\n'},\n", + " {'prompt': 'GOAL ⊢ ∀ (a b : ℤ), ↑(a.gcd b) * ↑(a.lcm b) = ⇑normalize (a * b)\\n PROOFSTEP ',\n", + " 'completion': ' intros\\n'},\n", + " {'prompt': 'GOAL ⊢ ∀ (a b : ℤ), ↑(a.gcd b) * ↑(a.lcm b) = ⇑normalize (a * b)\\n PROOFSTEP ',\n", + " 'completion': ' intros; rw [← int.coe_nat_mul, gcd_mul_lcm, coe_nat_abs_eq_normalize]\\n'},\n", + " {'prompt': 'GOAL D : Type u,\\t_inst_4 : integral_domain D,\\t_inst_5 : unique_factorization_monoid D\\t⊢ unique_factorization_monoid (polynomial D)\\n PROOFSTEP ',\n", + " 'completion': ' haveI := arbitrary (normalization_monoid D)\\n'},\n", + " {'prompt': \"GOAL R : Type u_1,\\t_inst_2 : integral_domain R\\t⊢ polynomial.cyclotomic' 1 R = polynomial.X - 1\\n PROOFSTEP \",\n", + " 'completion': \" simp only [cyclotomic', finset.prod_singleton, ring_hom.map_one, is_primitive_root.primitive_roots_one]\\n\"},\n", + " {'prompt': 'GOAL a b : ordinal\\t⊢ a * (b + 1) = a * b + a\\n PROOFSTEP ',\n", + " 'completion': ' simp only [mul_add, mul_one]\\n'},\n", + " {'prompt': 'GOAL o : onote,\\tb : ordinal,\\th : o.NF_below b\\t⊢ o.repr < ω ^ b\\n PROOFSTEP ',\n", + " 'completion': ' induction h with _ e n a eb b h₁ h₂ h₃ _ IH\\n'},\n", + " {'prompt': 'GOAL v : ℕ → ℤ,\\tis js : list ℤ\\t⊢ js.length ≤ linear_order.max is.length js.length\\n PROOFSTEP ',\n", + " 'completion': ' apply le_max_right\\n'},\n", + " {'prompt': 'GOAL i : ℤ,\\th1 : 0 < i\\t⊢ omega.symmod i (i + 1) = -1\\n PROOFSTEP ',\n", + " 'completion': ' unfold symmod\\n'},\n", + " {'prompt': 'GOAL i : ℤ,\\th1 : 0 < i\\t⊢ ¬2 * i < i + 1\\n PROOFSTEP ',\n", + " 'completion': ' have h2 : 2 * i = (1 + 1) * i := rfl\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : emetric_space α,\\t_inst_2 : complete_space α\\t⊢ complete_space (topological_space.closeds α)\\n PROOFSTEP ',\n", + " 'completion': ' let B : ℕ → ℝ≥0∞ := λ n, (2⁻¹)^n\\n'},\n", + " {'prompt': 'GOAL i n : ℕ,\\th0 : n ≠ 0,\\thi : i.coprime n\\t⊢ ↑n ≠ 0\\n PROOFSTEP ',\n", + " 'completion': ' exact_mod_cast h0\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\tc : composition n,\\tj : fin n\\t⊢ c.size_up_to ↑(c.index j) ≤ ↑j\\n PROOFSTEP ',\n", + " 'completion': ' exact size_up_to_index_le _ _\\n'},\n", + " {'prompt': 'GOAL x : ℂ\\t⊢ complex.cos (2 * x) = 2 * complex.cos x ^ 2 - 1\\n PROOFSTEP ',\n", + " 'completion': \" rw [cos_two_mul', eq_sub_iff_add_eq.2 (sin_sq_add_cos_sq x), ← sub_add, sub_add_eq_add_sub, two_mul]\\n\"},\n", + " {'prompt': \"GOAL z z' : ℂ\\t⊢ ![(z + z').re, (z + z').im] = ![z.re, z.im] + ![z'.re, z'.im]\\n PROOFSTEP \",\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL l u n : ℕ\\t⊢ n ∈ finset.Ico l u ↔ n ∈ set.Ico l u\\n PROOFSTEP ',\n", + " 'completion': ' simp only [mem_Ico, finset.Ico.mem]\\n'},\n", + " {'prompt': 'GOAL a b : ℤ,\\tH : b ≠ 0\\t⊢ a * b / b = a\\n PROOFSTEP ',\n", + " 'completion': ' have := int.add_mul_div_right 0 a H; rwa [zero_add, int.zero_div, zero_add] at this\\n'},\n", + " {'prompt': 'GOAL a b : ℤ,\\tH : b ≠ 0\\t⊢ a * b / b = a\\n PROOFSTEP ',\n", + " 'completion': ' have := int.add_mul_div_right 0 a H\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\txs : lazy_list α\\t⊢ xs.append (λ («_» : unit), lazy_list.nil) = xs\\n PROOFSTEP ',\n", + " 'completion': ' induction xs\\n'},\n", + " {'prompt': \"GOAL α : Type u,\\tl l' : list α\\t⊢ l ∈ l'.cyclic_permutations ↔ l ~r l'\\n PROOFSTEP \",\n", + " 'completion': ' split\\n'},\n", + " {'prompt': \"GOAL α : Type u,\\tl l' : list α\\t⊢ l ∈ l'.cyclic_permutations → l ~r l'\\n PROOFSTEP \",\n", + " 'completion': ' intro h\\n'},\n", + " {'prompt': \"GOAL α : Type u,\\tl l' : list α\\t⊢ l ~r l' → l ∈ l'.cyclic_permutations\\n PROOFSTEP \",\n", + " 'completion': ' intro h\\n'},\n", + " {'prompt': 'GOAL a b : ℕ,\\tᾰ : a / b.succ.succ = a\\t⊢ 2 ≤ b + 2\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL n k m : ℕ\\t⊢ (n * k).dist (m * k) = n.dist m * k\\n PROOFSTEP ',\n", + " 'completion': ' rw [dist.def, dist.def, right_distrib, nat.mul_sub_right_distrib, nat.mul_sub_right_distrib]\\n'},\n", + " {'prompt': 'GOAL n m : ℕ,\\thnm : n ≤ m\\t⊢ m = n + (m - n)\\n PROOFSTEP ',\n", + " 'completion': ' exact (nat.add_sub_of_le hnm).symm\\n'},\n", + " {'prompt': 'GOAL m n : ℕ,\\th : n ≤ m\\t⊢ (even m ↔ even n) = ?m_1\\n PROOFSTEP ',\n", + " 'completion': ' rw [←nat.sub_add_cancel h, even_add]\\n'},\n", + " {'prompt': 'GOAL m n : ℕ,\\ts : m.psub n = option.none,\\th : m ≥ n\\t⊢ false\\n PROOFSTEP ',\n", + " 'completion': ' cases le.dest h with k e\\n'},\n", + " {'prompt': 'GOAL ⊢ ∀ (n : ℕ) (x : pos_num), npow_rec n.succ x = x * npow_rec n x\\n PROOFSTEP ',\n", + " 'completion': ' try { intros, refl }\\n'},\n", + " {'prompt': 'GOAL decidable_prime : decidable_pred pos_num.prime,\\tn : pos_num\\t⊢ n.bit1.min_fac = n.bit1 ↔ n.bit1.min_fac_aux ↑n 1 = n.bit1\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL decidable_prime : decidable_pred pos_num.prime,\\tn : pos_num\\t⊢ ↑(n.bit1).min_fac = ↑(n.bit1) ↔ n.bit1.min_fac_aux ↑n 1 = n.bit1\\n PROOFSTEP ',\n", + " 'completion': ' rw [← min_fac_to_nat, to_nat_inj]\\n'},\n", + " {'prompt': 'GOAL n m : ℕ+,\\tu : prime_multiset := n.factor_multiset\\t⊢ (n * m).factor_multiset = n.factor_multiset + m.factor_multiset\\n PROOFSTEP ',\n", + " 'completion': ' let v := factor_multiset m\\n'},\n", + " {'prompt': 'GOAL a b : ℕ,\\th : b ∣ a\\t⊢ ↑(a / b) = ↑a / ↑b\\n PROOFSTEP ',\n", + " 'completion': ' rcases h with ⟨c, rfl⟩\\n'},\n", + " {'prompt': 'GOAL a : ℝ≥0∞,\\th : a * ⊤ = 1,\\tthis : false\\t⊢ a = ⊤⁻¹\\n PROOFSTEP ',\n", + " 'completion': ' exact this.elim\\n'},\n", + " {'prompt': 'GOAL a b : ℝ≥0∞,\\tha : a = ⊤\\t⊢ (a + b).to_real ≤ a.to_real + b.to_real\\n PROOFSTEP ',\n", + " 'completion': ' simp only [ha, top_add, top_to_real, zero_add, to_real_nonneg]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\ts t : set α\\t⊢ s = t → s ⊆ t\\n PROOFSTEP ',\n", + " 'completion': ' rintro rfl x hx\\n'},\n", + " {'prompt': \"GOAL α : Type u_1,\\ts t : set α\\t⊢ (coe ⁻¹' t).nonempty ↔ (s ∩ t).nonempty\\n PROOFSTEP \",\n", + " 'completion': ' rw [inter_comm, ← image_preimage_coe, nonempty_image_iff]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tβ : Type u_2,\\tt : set β\\t⊢ ∅.prod t = ∅\\n PROOFSTEP ',\n", + " 'completion': ' ext\\n'},\n", + " {'prompt': 'GOAL N : Type u_2,\\t_inst_2 : add_monoid N,\\tP : add_submonoid N\\t⊢ P.fg ↔ (⇑add_submonoid.to_submonoid P).fg\\n PROOFSTEP ',\n", + " 'completion': ' convert (submonoid.fg_iff_add_fg P.to_submonoid).symm\\n'},\n", + " {'prompt': 'GOAL H : subgroup ↥(alternating_group (fin 5)),\\tHn : H.normal\\t⊢ H = ⊥ ∨ H = ⊤\\n PROOFSTEP ',\n", + " 'completion': ' refine or_not.imp (id) (λ Hb, _)\\n'},\n", + " {'prompt': 'GOAL f : stieltjes_function,\\tx : ℝ,\\th : x ≤ x\\t⊢ f.left_lim x ≤ f.left_lim x\\n PROOFSTEP ',\n", + " 'completion': ' exact le_rfl\\n'},\n", + " {'prompt': 'GOAL R : Type u_1,\\t_inst_1 : semiring R,\\tf : nat.arithmetic_function R\\t⊢ f.ppow 0 = ↑ζ\\n PROOFSTEP ',\n", + " 'completion': ' rw [ppow, dif_pos rfl]\\n'},\n", + " {'prompt': \"GOAL n : ℕ,\\th_odd : odd n,\\thlt : 1 < n\\t⊢ bernoulli' n = 0\\n PROOFSTEP \",\n", + " 'completion': \" let B := mk (λ n, bernoulli' n / n!)\\n\"},\n", + " {'prompt': 'GOAL p n : ℕ,\\t_inst_1 : fact (nat.prime p)\\t⊢ padic_val_nat p (p ^ n) = n\\n PROOFSTEP ',\n", + " 'completion': ' rw [padic_val_nat.pow p _ _ (fact.out p.prime).ne_zero, padic_val_nat_self p, mul_one]\\n'},\n", + " {'prompt': 'GOAL b : cardinal,\\th : 0 * b = 0,\\tha : ω ≤ 0\\t⊢ false\\n PROOFSTEP ',\n", + " 'completion': ' exact not_lt_of_le ha omega_pos\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : topological_space α,\\ta : α\\t⊢ 𝓝[{a}] a = has_pure.pure a\\n PROOFSTEP ',\n", + " 'completion': ' rw [nhds_within, principal_singleton, inf_eq_right.2 (pure_le_nhds a)]\\n'},\n", + " {'prompt': \"GOAL a b : ℚ,\\tthis : totally_bounded (coe ⁻¹' set.Icc ↑a ↑b)\\t⊢ totally_bounded (set.Icc a b)\\n PROOFSTEP \",\n", + " 'completion': ' rwa (set.ext (λ q, _) : Icc _ _ = _)\\n'},\n", + " {'prompt': 'GOAL i j : ℤ,\\th : ¬i = j\\t⊢ measure_theory.integral haar_circle ⇑(fourier (-i + j)) = 0\\n PROOFSTEP ',\n", + " 'completion': ' have hij : -i + j ≠ 0\\n'},\n", + " {'prompt': 'GOAL X Y : SemiNormedGroup₁,\\tf : X ⟶ Y,\\ts : category_theory.limits.cokernel_cofork f\\t⊢ (SemiNormedGroup₁.cokernel_cocone f).X ⟶ s.X\\n PROOFSTEP ',\n", + " 'completion': ' fsplit\\n'},\n", + " {'prompt': 'GOAL X Y : SemiNormedGroup₁,\\tf : X ⟶ Y,\\ts : category_theory.limits.cokernel_cofork f\\t⊢ normed_group_hom ↥((SemiNormedGroup₁.cokernel_cocone f).X) ↥(s.X)\\n PROOFSTEP ',\n", + " 'completion': ' apply normed_group_hom.lift _ s.π.1\\n'},\n", + " {'prompt': 'GOAL ⊢ 4 = 2 ^ 2\\t\\t⊢ real.sin (π / 2 ^ 2) = real.sqrt 2 / 2\\n PROOFSTEP ',\n", + " 'completion': ' norm_num\\n'},\n", + " {'prompt': 'GOAL ⊢ real.cos (π / 2 ^ 4) = real.sqrt (2 + real.sqrt (2 + real.sqrt 2)) / 2\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL C : Type u₁,\\t_inst_1 : category_theory.small_category C,\\tP : Cᵒᵖ ⥤ Type u₁\\t⊢ category_theory.limits.is_colimit (category_theory.cocone_of_representable P)\\n PROOFSTEP ',\n", + " 'completion': ' apply is_colimit.of_point_iso (colimit.is_colimit (functor_to_representables P))\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tb : buffer α,\\ta : α\\t⊢ (b.push_back a).read ⟨b.size, _⟩ = a\\n PROOFSTEP ',\n", + " 'completion': ' cases b\\n'},\n", + " {'prompt': 'GOAL x : ℝ\\t⊢ real.cosh (2 * x) = real.cosh x ^ 2 + real.sinh x ^ 2\\n PROOFSTEP ',\n", + " 'completion': ' rw ← of_real_inj; simp [cosh_two_mul]\\n'},\n", + " {'prompt': 'GOAL x : ℝ\\t⊢ ↑(real.cosh (2 * x)) = ↑(real.cosh x ^ 2 + real.sinh x ^ 2)\\n PROOFSTEP ',\n", + " 'completion': ' simp [cosh_two_mul]\\n'},\n", + " {'prompt': 'GOAL x : ℝ\\t⊢ real.cosh (2 * x) = real.cosh x ^ 2 + real.sinh x ^ 2\\n PROOFSTEP ',\n", + " 'completion': ' rw ← of_real_inj\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tm n : α →₀ ℕ\\t⊢ m ∈ n.Iic_finset ↔ m ≤ n\\n PROOFSTEP ',\n", + " 'completion': ' simp [Iic_finset, le_iff_exists_add, eq_comm]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : fintype α\\t⊢ ⇑multiset.card finset.univ.val + 1 = fintype.card α + 1\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL m n : ℕ,\\th : m < n.succ\\t⊢ int.sub_nat_nat m n.succ = -[1+ n - m]\\n PROOFSTEP ',\n", + " 'completion': \" have h' : n.succ - m = (n - m).succ\\n\"},\n", + " {'prompt': 'GOAL α : Type u,\\tl l₁ l₂ : list α\\t⊢ list.nil ++ l <+ list.nil ++ l\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tl : list α\\t⊢ l.nodup ↔ ∀ (x : α), ¬x ∈+ l\\n PROOFSTEP ',\n", + " 'completion': ' simp_rw [nodup_iff_sublist, duplicate_iff_sublist]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tR : α → α → Prop,\\ta : α\\t⊢ list.pairwise R [a]\\n PROOFSTEP ',\n", + " 'completion': ' simp only [pairwise_cons, mem_singleton, forall_prop_of_false (not_mem_nil _), forall_true_iff, pairwise.nil, and_true]\\n'},\n", + " {'prompt': \"GOAL α : Type u,\\tl l' : list α,\\th : l ∈ l'.cyclic_permutations\\t⊢ l ~r l'\\n PROOFSTEP \",\n", + " 'completion': ' obtain ⟨k, hk, rfl⟩ := nth_le_of_mem h\\n'},\n", + " {'prompt': \"GOAL α : Type u,\\tl l' : list α,\\th : l ~r l'\\t⊢ l ∈ l'.cyclic_permutations\\n PROOFSTEP \",\n", + " 'completion': ' obtain ⟨k, rfl⟩ := h.symm\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tx : multiset α,\\ta : list α\\t⊢ multiset.traverse id.mk ⟦a⟧ = ⟦a⟧\\n PROOFSTEP ',\n", + " 'completion': ' simp [traverse]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tx : multiset α,\\ta : list α\\t⊢ coe <$> a = ↑a\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL a b : ℕ\\t⊢ a / b = a ↔ a = 0 ∨ b = 1\\n PROOFSTEP ',\n", + " 'completion': ' split\\n'},\n", + " {'prompt': 'GOAL a b : ℕ\\t⊢ a / b = a → a = 0 ∨ b = 1\\n PROOFSTEP ',\n", + " 'completion': ' intro\\n'},\n", + " {'prompt': 'GOAL a : ℕ,\\tᾰ : a / 0 = a\\t⊢ a = 0 ∨ 0 = 1\\n PROOFSTEP ',\n", + " 'completion': ' simp * at *\\n'},\n", + " {'prompt': 'GOAL a : ℕ,\\tᾰ : a / 1 = a\\t⊢ a = 0 ∨ 1 = 1\\n PROOFSTEP ',\n", + " 'completion': ' right\\n'},\n", + " {'prompt': 'GOAL a b : ℕ\\t⊢ a = 0 ∨ b = 1 → a / b = a\\n PROOFSTEP ',\n", + " 'completion': ' rintros (rfl|rfl); simp\\n'},\n", + " {'prompt': 'GOAL a b : ℕ\\t⊢ a = 0 ∨ b = 1 → a / b = a\\n PROOFSTEP ',\n", + " 'completion': ' rintros (rfl|rfl)\\n'},\n", + " {'prompt': 'GOAL a b c : ℕ,\\th : a + b ≤ c\\t⊢ a ≤ a + b\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': \"GOAL n m m' : ℕ,\\th : n.lxor m = n.lxor m'\\t⊢ m = n.lxor (n.lxor m')\\n PROOFSTEP \",\n", + " 'completion': ' simp [←lxor_assoc, ←h]\\n'},\n", + " {'prompt': \"GOAL n m m' : ℕ,\\th : n.lxor m = n.lxor m'\\t⊢ n.lxor (n.lxor m') = m'\\n PROOFSTEP \",\n", + " 'completion': ' simp [←lxor_assoc]\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\tih : nat.of_digits 1 (1.digits n) = n\\t⊢ nat.of_digits 1 (1.digits n.succ) = n.succ\\n PROOFSTEP ',\n", + " 'completion': ' simp only [ih, add_comm 1, of_digits_one_cons, nat.cast_id, digits_one_succ]\\n'},\n", + " {'prompt': 'GOAL m n : ℕ,\\ts : m.psub n = option.none\\t⊢ option.none = option.none ↔ m < n\\n PROOFSTEP ',\n", + " 'completion': ' simp [eq_comm]\\n'},\n", + " {'prompt': 'GOAL m n val : ℕ,\\ts : m.psub n = option.some val\\t⊢ n ≤ val + n\\n PROOFSTEP ',\n", + " 'completion': ' apply nat.le_add_left\\n'},\n", + " {'prompt': 'GOAL ⊢ ∀ (a b c : pos_num), a * b * c = a * (b * c)\\n PROOFSTEP ',\n", + " 'completion': ' try { intros, refl }\\n'},\n", + " {'prompt': 'GOAL ⊢ ∀ (a b c : pos_num), a * b * c = a * (b * c)\\n PROOFSTEP ',\n", + " 'completion': ' transfer\\n'},\n", + " {'prompt': 'GOAL to_of_int : ∀ (n : ℤ), ↑↑n = n,\\tn : ℕ\\t⊢ ↑↑-[1+ n] = -[1+ n]\\n PROOFSTEP ',\n", + " 'completion': ' rw [int.cast_neg_succ_of_nat, cast_zneg, add_one, cast_succ, int.neg_succ_of_nat_eq, ← num.of_nat_to_znum, num.cast_to_znum, ← num.cast_to_nat, int.nat_cast_eq_coe_nat, num.to_of_nat]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tn : ℕ\\t⊢ (polynomial.X ^ n).coeff n = 1\\n PROOFSTEP ',\n", + " 'completion': ' simp [coeff_X_pow]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\th : 1 < 0\\t⊢ polynomial.X.iterated_deriv 0 = 0\\n PROOFSTEP ',\n", + " 'completion': ' exfalso\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tc₁ : computation α,\\ta : α\\t⊢ (c₁.think.orelse (computation.return a)).destruct = sum.inl a\\n PROOFSTEP ',\n", + " 'completion': ' simp [orelse]\\n'},\n", + " {'prompt': 'GOAL x : ℝ,\\tn : ℤ\\t⊢ ⇑circle_deg1_lift.translate (⇑multiplicative.of_add x) ^ n = ⇑circle_deg1_lift.translate (⇑multiplicative.of_add (↑n * x))\\n PROOFSTEP ',\n", + " 'completion': ' simp only [← gsmul_eq_mul, of_add_gsmul, monoid_hom.map_gpow]\\n'},\n", + " {'prompt': 'GOAL N : Type u_2,\\t_inst_2 : add_monoid N,\\tP : add_submonoid N\\t⊢ P = ⇑submonoid.to_add_submonoid (⇑add_submonoid.to_submonoid P)\\n PROOFSTEP ',\n", + " 'completion': \" exact set_like.ext' rfl\\n\"},\n", + " {'prompt': 'GOAL H : subgroup ↥(alternating_group (fin 5)),\\tHn : H.normal,\\tHb : ¬H = ⊥\\t⊢ H = ⊤\\n PROOFSTEP ',\n", + " 'completion': ' rw [eq_bot_iff_forall] at Hb\\n'},\n", + " {'prompt': 'GOAL f : stieltjes_function,\\tx y : ℝ,\\th : x ≤ y\\t⊢ f.left_lim x ≤ f.left_lim y\\n PROOFSTEP ',\n", + " 'completion': ' rcases eq_or_lt_of_le h with rfl|hxy\\n'},\n", + " {'prompt': \"GOAL pqr : multiset ℕ+,\\tn : ℕ+,\\tH : ADE_inequality.D' n = pqr\\t⊢ 1 < ADE_inequality.sum_inv pqr\\n PROOFSTEP \",\n", + " 'completion': \" rw [← H, D', sum_inv_pqr]\\n\"},\n", + " {'prompt': \"GOAL pqr : multiset ℕ+,\\tH : ADE_inequality.E' 3 = pqr\\t⊢ 1 < (↑2)⁻¹ + (↑3)⁻¹ + (↑3)⁻¹\\n PROOFSTEP \",\n", + " 'completion': ' norm_num\\n'},\n", + " {'prompt': \"GOAL pqr : multiset ℕ+,\\tH : ADE_inequality.E' 4 = pqr\\t⊢ 1 < (↑2)⁻¹ + (↑3)⁻¹ + (↑4)⁻¹\\n PROOFSTEP \",\n", + " 'completion': ' norm_num\\n'},\n", + " {'prompt': \"GOAL pqr : multiset ℕ+,\\tH : ADE_inequality.E' 5 = pqr\\t⊢ 1 < (↑2)⁻¹ + (↑3)⁻¹ + (↑5)⁻¹\\n PROOFSTEP \",\n", + " 'completion': ' norm_num\\n'},\n", + " {'prompt': 'GOAL R : Type u_1,\\t_inst_1 : comm_ring R,\\tM : submonoid R\\t⊢ ⇑((localization.monoid_of M).to_map) 0 = 0\\n PROOFSTEP ',\n", + " 'completion': ' rw [← mk_zero (1 : M), mk_one_eq_monoid_of_mk]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\tx : R,\\t_inst_1 : ring R,\\th : is_nilpotent x\\t⊢ is_nilpotent (-x)\\n PROOFSTEP ',\n", + " 'completion': ' obtain ⟨n, hn⟩ := h\\n'},\n", + " {'prompt': 'GOAL α : Type u\\t⊢ 2 ≤ # α ↔ ∃ (x y : α), x ≠ y\\n PROOFSTEP ',\n", + " 'completion': ' split\\n'},\n", + " {'prompt': 'GOAL α : Type u\\t⊢ 2 ≤ # α → (∃ (x y : α), x ≠ y)\\n PROOFSTEP ',\n", + " 'completion': ' rintro ⟨f⟩\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tx y : α,\\th : x ≠ y\\t⊢ 2 ≤ # α\\n PROOFSTEP ',\n", + " 'completion': \" by_contra h'\\n\"},\n", + " {'prompt': 'GOAL α : Type u\\t⊢ (∃ (x y : α), x ≠ y) → 2 ≤ # α\\n PROOFSTEP ',\n", + " 'completion': ' rintro ⟨x, y, h⟩\\n'},\n", + " {'prompt': 'GOAL ι : Type u_1,\\tf : ι → cardinal,\\ti : ι\\t⊢ f i ≤ cardinal.sup f\\n PROOFSTEP ',\n", + " 'completion': ' dsimp [sup]; cases min_eq _ _ with c hc; rw hc; exact c.2 i\\n'},\n", + " {'prompt': 'GOAL ι : Type u_1,\\tf : ι → cardinal,\\ti : ι\\t⊢ f i ≤ cardinal.sup f\\n PROOFSTEP ',\n", + " 'completion': ' dsimp [sup]; cases min_eq _ _ with c hc; rw hc\\n'},\n", + " {'prompt': 'GOAL ι : Type u_1,\\tf : ι → cardinal,\\ti : ι\\t⊢ f i ≤ cardinal.sup f\\n PROOFSTEP ',\n", + " 'completion': ' dsimp [sup]; cases min_eq _ _ with c hc\\n'},\n", + " {'prompt': 'GOAL ι : Type u_1,\\tf : ι → cardinal,\\ti : ι\\t⊢ f i ≤ cardinal.sup f\\n PROOFSTEP ',\n", + " 'completion': ' dsimp [sup]\\n'},\n", + " {'prompt': 'GOAL a : cardinal\\t⊢ a * 1 = a\\t\\tb : cardinal\\t⊢ 0 * b = 0\\n PROOFSTEP ',\n", + " 'completion': ' all_goals {simp}\\n'},\n", + " {'prompt': 'GOAL ⊢ 0 ≈ -0 ∧ (∀ (i : 0.left_moves), (0.move_left i).impartial) ∧ ∀ (j : 0.right_moves), (0.move_right j).impartial\\n PROOFSTEP ',\n", + " 'completion': ' dsimp\\n'},\n", + " {'prompt': 'GOAL ⊢ 0 ≈ -0 ∧ (∀ (i : pempty), (0.move_left i).impartial) ∧ ∀ (j : pempty), (0.move_right j).impartial\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL v : ℕ → ℤ,\\tis js : list ℤ\\t⊢ is.length ≤ 0 + (list.func.add is js).length\\n PROOFSTEP ',\n", + " 'completion': ' rw [zero_add, length_add]\\n'},\n", + " {'prompt': 'GOAL v : ℕ → ℤ,\\tis js : list ℤ\\t⊢ js.length ≤ 0 + (list.func.add is js).length\\n PROOFSTEP ',\n", + " 'completion': ' rw [zero_add, length_add]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : pseudo_metric_space α,\\tx : α,\\tε : ℝ≥0\\t⊢ ε = ↑ε.to_nnreal\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL X : Type u_1,\\t_inst_1 : topological_space X,\\t_inst_2 : loc_path_connected_space X\\t⊢ path_connected_space X ↔ connected_space X\\n PROOFSTEP ',\n", + " 'completion': ' split\\n'},\n", + " {'prompt': 'GOAL X : Type u_1,\\t_inst_1 : topological_space X,\\t_inst_2 : loc_path_connected_space X\\t⊢ path_connected_space X → connected_space X\\n PROOFSTEP ',\n", + " 'completion': ' introI h\\n'},\n", + " {'prompt': 'GOAL X : Type u_1,\\t_inst_1 : topological_space X,\\t_inst_2 : loc_path_connected_space X\\t⊢ connected_space X → path_connected_space X\\n PROOFSTEP ',\n", + " 'completion': ' introI hX\\n'},\n", + " {'prompt': 'GOAL i j : ℤ\\t⊢ has_inner.inner (fourier_Lp 2 i) (fourier_Lp 2 j) = ite (i = j) 1 0\\n PROOFSTEP ',\n", + " 'completion': ' rw continuous_map.inner_to_Lp haar_circle (fourier i) (fourier j)\\n'},\n", + " {'prompt': 'GOAL X Y : SemiNormedGroup₁,\\tf : X ⟶ Y,\\ts : category_theory.limits.cokernel_cofork f\\t⊢ (normed_group_hom.lift f.val.range (category_theory.limits.cofork.π s).val _).norm_noninc\\n PROOFSTEP ',\n", + " 'completion': ' exact normed_group_hom.lift_norm_noninc _ _ _ s.π.2\\n'},\n", + " {'prompt': 'GOAL x y z : ℍ\\t⊢ has_inner.inner (x + y) z = has_inner.inner x z + has_inner.inner y z\\n PROOFSTEP ',\n", + " 'completion': ' simp only [inner_def, add_mul, add_re]\\n'},\n", + " {'prompt': 'GOAL h : -1 ≠ 1\\t⊢ has_deriv_within_at real.arcsin (1 / real.sqrt (1 - (-1) ^ 2)) (set.Iic (-1)) (-1)\\n PROOFSTEP ',\n", + " 'completion': ' convert (has_deriv_within_at_const _ _ (-(π / 2))).congr _ _\\n'},\n", + " {'prompt': 'GOAL h : -1 ≠ 1\\t⊢ has_deriv_within_at real.arcsin (1 / real.sqrt (1 - (-1) ^ 2)) (set.Iic (-1)) (-1)\\n PROOFSTEP ',\n", + " 'completion': ' convert (has_deriv_within_at_const _ _ (-(π / 2))).congr _ _; simp [arcsin_of_le_neg_one] { contextual := tt }\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\tc : composition n,\\tj : fin n\\t⊢ ↑j < c.size_up_to (↑(c.index j) + 1)\\n PROOFSTEP ',\n", + " 'completion': ' exact lt_size_up_to_index_succ _ _\\n'},\n", + " {'prompt': 'GOAL x : ℕ,\\tb : bool\\t⊢ bitvec.add_lsb x b = 2 * x + cond b 1 0\\n PROOFSTEP ',\n", + " 'completion': ' simp [add_lsb,two_mul]\\n'},\n", + " {'prompt': 'GOAL K : Type u_1,\\t_inst_1 : is_R_or_C K,\\tz : K\\t⊢ ⇑is_R_or_C.re (bit0 z) = bit0 (⇑is_R_or_C.re z)\\n PROOFSTEP ',\n", + " 'completion': ' simp [bit0]\\n'},\n", + " {'prompt': 'GOAL K : Type u_1,\\t_inst_1 : is_R_or_C K,\\tz : K\\t⊢ ⇑is_R_or_C.conj (bit0 z) = bit0 (⇑is_R_or_C.conj z)\\n PROOFSTEP ',\n", + " 'completion': ' simp [bit0, ext_iff]\\n'},\n", + " {'prompt': 'GOAL K : Type u_1,\\t_inst_1 : is_R_or_C K,\\tz : K\\t⊢ z / is_R_or_C.I = -(z * is_R_or_C.I)\\n PROOFSTEP ',\n", + " 'completion': ' by_cases h : (I : K) = 0\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ ⇑(fin_congr _) ⟨n - n, _⟩ = 0\\t\\tn : ℕ\\t⊢ n ≤ n\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\ts : finset α,\\t_inst_1 : decidable_eq α\\t⊢ s.diag.card + s.off_diag.card = (s.product s).card\\n PROOFSTEP ',\n", + " 'completion': ' apply filter_card_add_filter_neg_card_eq_card\\n'},\n", + " {'prompt': 'GOAL l n m : ℕ\\t⊢ finset.Ico n m \\\\ finset.Ico n l = finset.Ico (linear_order.max n l) m\\n PROOFSTEP ',\n", + " 'completion': ' ext k\\n'},\n", + " {'prompt': 'GOAL l n m : ℕ\\t⊢ finset.Ico n m \\\\ finset.Ico n l = finset.Ico (linear_order.max n l) m\\n PROOFSTEP ',\n", + " 'completion': ' ext k; by_cases n ≤ k; simp [h, and_comm]\\n'},\n", + " {'prompt': 'GOAL l n m : ℕ\\t⊢ finset.Ico n m \\\\ finset.Ico n l = finset.Ico (linear_order.max n l) m\\n PROOFSTEP ',\n", + " 'completion': ' ext k; by_cases n ≤ k\\n'},\n", + " {'prompt': 'GOAL l u n : ℕ\\t⊢ n ∈ finset.Ico l (u + 1) ↔ n ∈ set.Icc l u\\n PROOFSTEP ',\n", + " 'completion': ' simp only [mem_Icc, finset.Ico.mem, nat.lt_add_one_iff, iff_self, implies_true_iff]\\n'},\n", + " {'prompt': 'GOAL a b : ℤ,\\tH : a % b = 0\\t⊢ a / b * b = a\\n PROOFSTEP ',\n", + " 'completion': ' rw [mul_comm, mul_div_cancel_of_mod_eq_zero H]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ti : ℕ\\t⊢ list.drop (list.take i (list.map list.length list.nil)).sum list.nil.join = (list.drop i list.nil).join\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tx : multiset α\\t⊢ ∀ (a : list α), multiset.traverse id.mk ⟦a⟧ = ⟦a⟧\\n PROOFSTEP ',\n", + " 'completion': ' intro\\n'},\n", + " {'prompt': 'GOAL R : Type u_1,\\t_inst_3 : comm_semiring R,\\tσ : Type u_2,\\t_inst_4 : is_empty σ\\t⊢ function.surjective ⇑mv_polynomial.C\\n PROOFSTEP ',\n", + " 'completion': ' refine λ p, ⟨p.to_fun 0, finsupp.ext (λ a, _)⟩\\n'},\n", + " {'prompt': 'GOAL m n k : ℕ\\t⊢ (m + n % k) % k = (m + n) % k\\n PROOFSTEP ',\n", + " 'completion': ' rw [add_comm, mod_add_mod, add_comm]\\n'},\n", + " {'prompt': 'GOAL a b : ℕ,\\tᾰ : a / b = a\\t⊢ a = 0 ∨ b = 1\\n PROOFSTEP ',\n", + " 'completion': ' cases b\\n'},\n", + " {'prompt': 'GOAL a b : ℕ,\\tᾰ : a / b.succ = a\\t⊢ a = 0 ∨ b.succ = 1\\n PROOFSTEP ',\n", + " 'completion': ' cases b\\n'},\n", + " {'prompt': 'GOAL a b : ℕ,\\tᾰ : a / b.succ.succ = a\\t⊢ a = 0 ∨ b.succ.succ = 1\\n PROOFSTEP ',\n", + " 'completion': ' left\\n'},\n", + " {'prompt': 'GOAL b n : ℕ,\\th : b.digits n = list.nil\\t⊢ nat.of_digits b (b.digits n) = nat.of_digits b list.nil\\n PROOFSTEP ',\n", + " 'completion': ' rw h\\n'},\n", + " {'prompt': 'GOAL n m : ℕ,\\thnm : n ≤ m\\t⊢ n! * n ^ (m - n) ≤ m!\\n PROOFSTEP ',\n", + " 'completion': ' suffices : n! * (n + 1) ^ (m - n) ≤ m!\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : semiring α,\\tn : num\\t⊢ bit0 ↑n + ↑1 = bit1 ↑n\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : ring R\\t⊢ ∀ (a : polynomial R), -a + a = 0\\n PROOFSTEP ',\n", + " 'completion': ' rintros ⟨⟩\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : ring R\\t⊢ ∀ (a : polynomial R), 0 • a = 0\\n PROOFSTEP ',\n", + " 'completion': ' rintro ⟨⟩\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp : polynomial R\\t⊢ (p * polynomial.X).coeff 0 = 0\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL F : Type u → Type u,\\t_inst_1 : functor F,\\tq : qpf F\\t⊢ q.supp_preservation ↔ q.is_uniform\\n PROOFSTEP ',\n", + " 'completion': ' split\\n'},\n", + " {'prompt': 'GOAL F : Type u → Type u,\\t_inst_1 : functor F,\\tq : qpf F\\t⊢ q.supp_preservation → q.is_uniform\\n PROOFSTEP ',\n", + " 'completion': \" intros h α a a' f f' h'\\n\"},\n", + " {'prompt': 'GOAL F : Type u → Type u,\\t_inst_1 : functor F,\\tq : qpf F\\t⊢ q.is_uniform → q.supp_preservation\\n PROOFSTEP ',\n", + " 'completion': ' rintros h α ⟨a,f⟩\\n'},\n", + " {'prompt': 'GOAL b : ℝ≥0∞,\\thb : b ≠ ⊤,\\ta : ℝ≥0\\t⊢ ↑a.to_real = b.to_real ↔ ↑a = b\\n PROOFSTEP ',\n", + " 'completion': ' lift b to ℝ≥0 using hb\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tc₁ : computation α,\\ta : α\\t⊢ (c₁.think <|> computation.return a).destruct = sum.inl a\\n PROOFSTEP ',\n", + " 'completion': ' unfold has_orelse.orelse\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tc₁ : computation α,\\ta : α\\t⊢ (c₁.think <|> computation.return a).destruct = sum.inl a\\n PROOFSTEP ',\n", + " 'completion': ' unfold has_orelse.orelse; simp [orelse]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts : wseq α,\\tn : ℕ\\t⊢ s.tail.drop n = s.drop (n + 1)\\n PROOFSTEP ',\n", + " 'completion': ' rw add_comm; symmetry; apply dropn_add\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts : wseq α,\\tn : ℕ\\t⊢ s.tail.drop n = s.drop (n + 1)\\n PROOFSTEP ',\n", + " 'completion': ' rw add_comm; symmetry\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts : wseq α,\\tn : ℕ\\t⊢ s.tail.drop n = s.drop (1 + n)\\n PROOFSTEP ',\n", + " 'completion': ' symmetry\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts : wseq α,\\tn : ℕ\\t⊢ s.drop (1 + n) = s.tail.drop n\\n PROOFSTEP ',\n", + " 'completion': ' apply dropn_add\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts : wseq α,\\tn : ℕ\\t⊢ s.tail.drop n = s.drop (n + 1)\\n PROOFSTEP ',\n", + " 'completion': ' rw add_comm\\n'},\n", + " {'prompt': 'GOAL K : Type u,\\t_inst_1 : field K,\\ts : subfield K,\\tn : ℤ\\t⊢ ↑n ∈ s\\n PROOFSTEP ',\n", + " 'completion': ' simp only [← gsmul_one, gsmul_mem, one_mem]\\n'},\n", + " {'prompt': 'GOAL m : ℕ,\\th : 2 ≤ 2 + m\\t⊢ (fin_rotate (2 + m)).cycle_type = {2 + m}\\n PROOFSTEP ',\n", + " 'completion': ' rw [add_comm, cycle_type_fin_rotate]\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\ti : fin (n + 1),\\tj : fin n\\t⊢ ⇑(i.cycle_range) (⇑(i.succ_above) j) = j.succ\\n PROOFSTEP ',\n", + " 'completion': ' cases lt_or_ge j.cast_succ i with h h\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tr : α → α → Prop,\\th : symmetric r\\t⊢ symmetric (relation.refl_trans_gen r)\\n PROOFSTEP ',\n", + " 'completion': ' intros x y h\\n'},\n", + " {'prompt': \"GOAL pqr : multiset ℕ+,\\tp' q' : ℕ+,\\tH : ADE_inequality.A' p' q' = pqr\\t⊢ 0 < (↑↑p')⁻¹\\n PROOFSTEP \",\n", + " 'completion': ' simp only [pnat.pos, nat.cast_pos, inv_pos]\\n'},\n", + " {'prompt': \"GOAL pqr : multiset ℕ+,\\tp' q' : ℕ+,\\tH : ADE_inequality.A' p' q' = pqr\\t⊢ 0 < (↑↑q')⁻¹\\n PROOFSTEP \",\n", + " 'completion': ' simp only [pnat.pos, nat.cast_pos, inv_pos]\\n'},\n", + " {'prompt': 'GOAL a : ℕ,\\ta1 : 1 < a,\\tm n : ℕ,\\th : n ≤ m\\t⊢ ℕ\\n PROOFSTEP ',\n", + " 'completion': ' repeat {rw ← neg_mul_eq_mul_neg at h}\\n'},\n", + " {'prompt': 'GOAL x y z : ℤ,\\th : pythagorean_triple x y z,\\th0 : x.gcd y = 0\\t⊢ h.is_classified\\n PROOFSTEP ',\n", + " 'completion': ' have hx : x = 0\\n'},\n", + " {'prompt': 'GOAL x y z : ℤ,\\th : pythagorean_triple x y z,\\th0 : ¬x.gcd y = 0\\t⊢ h.is_classified\\n PROOFSTEP ',\n", + " 'completion': ' apply h.is_classified_of_normalize_is_primitive_classified\\n'},\n", + " {'prompt': 'GOAL x y z : ℤ,\\th : pythagorean_triple x y z,\\th0 : ¬x.gcd y = 0\\t⊢ _.is_primitive_classified\\n PROOFSTEP ',\n", + " 'completion': ' apply h.normalize.is_primitive_classified_of_coprime\\n'},\n", + " {'prompt': 'GOAL α : Type ?,\\tA B : semilattice_sup α,\\tx y : α,\\t_inst : semilattice_sup α\\t⊢ Prop\\n PROOFSTEP ',\n", + " 'completion': ' exact x ≤ y\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : field R,\\ta : R,\\th : a = 0\\t⊢ is_unit 1\\n PROOFSTEP ',\n", + " 'completion': ' exact is_unit_one\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tf : punit ⊕ punit ↪ α\\t⊢ ⇑f (sum.inl punit.star) ≠ ⇑f (sum.inr punit.star)\\n PROOFSTEP ',\n", + " 'completion': ' intro h\\n'},\n", + " {'prompt': \"GOAL α : Type u,\\tx y : α,\\th : x ≠ y,\\th' : subsingleton α\\t⊢ false\\n PROOFSTEP \",\n", + " 'completion': ' apply h\\n'},\n", + " {'prompt': 'GOAL ι : Type u_1,\\tf : ι → cardinal,\\ti : ι\\t⊢ f i ≤ cardinal.min _ subtype.val\\n PROOFSTEP ',\n", + " 'completion': ' cases min_eq _ _ with c hc\\n'},\n", + " {'prompt': 'GOAL G : Type w,\\t_inst_1 : add_group_with_zero_nhd G,\\ta : G\\t⊢ continuous_at (λ (a : G), -a) a\\n PROOFSTEP ',\n", + " 'completion': \" rw [continuous_at, nhds_eq, nhds_eq, tendsto_map'_iff]\\n\"},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : topological_space α,\\ts : set α,\\th : dense s\\t⊢ dense sᶜᶜ\\n PROOFSTEP ',\n", + " 'completion': ' rwa compl_compl\\n'},\n", + " {'prompt': 'GOAL X : Type u_1,\\t_inst_1 : topological_space X,\\tS : discrete_quotient X,\\t_inst_2 : compact_space X\\t⊢ fintype ↥S\\n PROOFSTEP ',\n", + " 'completion': ' have cond : is_compact (⊤ : set X) := compact_univ\\n'},\n", + " {'prompt': 'GOAL X : Type u_1,\\t_inst_1 : topological_space X,\\t_inst_2 : loc_path_connected_space X,\\th : path_connected_space X\\t⊢ connected_space X\\n PROOFSTEP ',\n", + " 'completion': ' apply_instance\\n'},\n", + " {'prompt': 'GOAL X : Type u_1,\\t_inst_1 : topological_space X,\\t_inst_2 : loc_path_connected_space X,\\thX : connected_space X\\t⊢ path_connected_space X\\n PROOFSTEP ',\n", + " 'completion': ' inhabit X\\n'},\n", + " {'prompt': 'GOAL ⊢ ∀ (i j : ℤ), has_inner.inner (fourier_Lp 2 i) (fourier_Lp 2 j) = ite (i = j) 1 0\\n PROOFSTEP ',\n", + " 'completion': ' intros i j\\n'},\n", + " {'prompt': 'GOAL V : Type u_5,\\t_inst_1 : semi_normed_group V,\\tx y : V\\t⊢ has_dist.dist ↑x ↑y = ∥↑x - ↑y∥\\n PROOFSTEP ',\n", + " 'completion': ' rw [← completion.coe_sub, norm_coe, metric.completion.dist_eq, dist_eq_norm]\\n'},\n", + " {'prompt': 'GOAL x y : ℍ,\\tr : ℝ\\t⊢ has_inner.inner (r • x) y = ⇑is_R_or_C.conj r * has_inner.inner x y\\n PROOFSTEP ',\n", + " 'completion': ' simp [inner_def]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_2 : complete_lattice α,\\tJ : Type u,\\t𝒥 : category_theory.small_category J\\t⊢ category_theory.limits.has_limits_of_shape J α\\n PROOFSTEP ',\n", + " 'completion': ' exactI { has_limit := λ F, has_limit.mk (limit_cone F) }\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : primcodable α,\\tβ : Sort ?,\\te : β ≃ α\\t⊢ Sort ?\\n PROOFSTEP ',\n", + " 'completion': ' haveI := primcodable.of_equiv α e; exact primrec e\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : primcodable α,\\tβ : Sort ?,\\te : β ≃ α\\t⊢ Sort ?\\n PROOFSTEP ',\n", + " 'completion': ' haveI := primcodable.of_equiv α e\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : primcodable α,\\tβ : Type u_2,\\te : β ≃ α\\t⊢ primrec ⇑e\\n PROOFSTEP ',\n", + " 'completion': ' letI : primcodable β := primcodable.of_equiv α e\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : primcodable α,\\tβ : Type u_2,\\te : β ≃ α\\t⊢ primrec ⇑e\\n PROOFSTEP ',\n", + " 'completion': ' letI : primcodable β := primcodable.of_equiv α e; exact encode_iff.1 primrec.encode\\n'},\n", + " {'prompt': \"GOAL k' k : turing.to_partrec.cont,\\tv : list ℕ\\t⊢ turing.to_partrec.step_normal turing.to_partrec.code.zero' (k.then k') v = (turing.to_partrec.step_normal turing.to_partrec.code.zero' k v).then k'\\n PROOFSTEP \",\n", + " 'completion': ' simp only [cont.then, step_normal, cfg.then, *] {constructor_eq := ff}\\n'},\n", + " {'prompt': \"GOAL k' k : turing.to_partrec.cont,\\tv : list ℕ\\t⊢ turing.to_partrec.step_normal turing.to_partrec.code.succ (k.then k') v = (turing.to_partrec.step_normal turing.to_partrec.code.succ k v).then k'\\n PROOFSTEP \",\n", + " 'completion': ' simp only [cont.then, step_normal, cfg.then, *] {constructor_eq := ff}\\n'},\n", + " {'prompt': \"GOAL k' k : turing.to_partrec.cont,\\tv : list ℕ\\t⊢ turing.to_partrec.step_normal turing.to_partrec.code.tail (k.then k') v = (turing.to_partrec.step_normal turing.to_partrec.code.tail k v).then k'\\n PROOFSTEP \",\n", + " 'completion': ' simp only [cont.then, step_normal, cfg.then, *] {constructor_eq := ff}\\n'},\n", + " {'prompt': 'GOAL Γ : Type u_1,\\t_inst_1 : inhabited Γ,\\tT : turing.tape Γ\\t⊢ turing.tape.move turing.dir.left (turing.tape.move turing.dir.right T) = T\\n PROOFSTEP ',\n", + " 'completion': ' cases T; simp [tape.move]\\n'},\n", + " {'prompt': 'GOAL Γ : Type u_1,\\t_inst_1 : inhabited Γ,\\tT : turing.tape Γ\\t⊢ turing.tape.move turing.dir.left (turing.tape.move turing.dir.right T) = T\\n PROOFSTEP ',\n", + " 'completion': ' cases T\\n'},\n", + " {'prompt': 'GOAL α : Type ?,\\tβ : Type ?,\\tx : α ⊕ β\\t⊢ bifunctor.bimap id id x = x\\n PROOFSTEP ',\n", + " 'completion': ' cases x\\n'},\n", + " {'prompt': 'GOAL α : Type ?,\\tβ : Type ?,\\tx : α\\t⊢ bifunctor.bimap id id (sum.inl x) = sum.inl x\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL α : Type ?,\\tβ : Type ?,\\tx : β\\t⊢ bifunctor.bimap id id (sum.inr x) = sum.inr x\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL α β : Type,\\ta : α,\\tf : α → parser β,\\t_inst_1 : (f a).step\\t⊢ (f a).step\\n PROOFSTEP ',\n", + " 'completion': ' apply_instance\\n'},\n", + " {'prompt': 'GOAL x : ℂ\\t⊢ complex.cos (3 * x) = 4 * complex.cos x ^ 3 - 3 * complex.cos x\\n PROOFSTEP ',\n", + " 'completion': ' have h1 : x + 2 * x = 3 * x\\n'},\n", + " {'prompt': 'GOAL K : Type u_1,\\t_inst_1 : is_R_or_C K,\\tn : ℕ,\\th : ↑n = 0\\t⊢ n = 0\\n PROOFSTEP ',\n", + " 'completion': ' rwa [← of_real_nat_cast, of_real_eq_zero, nat.cast_eq_zero] at h\\n'},\n", + " {'prompt': 'GOAL c : ℝ,\\tz : ℂ\\t⊢ ![(c • z).re, (c • z).im] = ⇑(ring_hom.id ℝ) c • ![z.re, z.im]\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : decidable_eq α,\\tU V : finset α\\t⊢ decidable (U ⊓ V = ⊥)\\n PROOFSTEP ',\n", + " 'completion': ' apply_instance\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\ts : finset α,\\t_inst_1 : decidable_eq α\\t⊢ s.off_diag.card = s.card * s.card - s.card\\n PROOFSTEP ',\n", + " 'completion': ' suffices : (diag s).card + (off_diag s).card = s.card * s.card\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\ts : finset α,\\t_inst_1 : decidable_eq α\\t⊢ s.diag.card + s.off_diag.card = s.card * s.card\\n PROOFSTEP ',\n", + " 'completion': ' rw ← card_product\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tn : ℕ,\\ts : finset α,\\th : n < s.card\\t⊢ (finset.powerset_len n s).nonempty\\n PROOFSTEP ',\n", + " 'completion': ' classical\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : fintype α\\t⊢ ⇑multiset.card (multiset.map option.some finset.univ.val) + 1 = fintype.card α + 1\\n PROOFSTEP ',\n", + " 'completion': ' rw multiset.card_map\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : fintype α\\t⊢ ⇑multiset.card (multiset.map option.some finset.univ.val) + 1 = fintype.card α + 1\\n PROOFSTEP ',\n", + " 'completion': ' rw multiset.card_map; refl\\n'},\n", + " {'prompt': 'GOAL m n : ℕ,\\th : m < n.succ\\t⊢ int.of_nat m + -[1+ n] = -[1+ n - m]\\n PROOFSTEP ',\n", + " 'completion': ' change sub_nat_nat _ _ = _\\n'},\n", + " {'prompt': 'GOAL a b : ℤ,\\tHa : 0 ≤ a,\\tHb : b ≤ 0\\t⊢ 0 ≤ -(a / b)\\n PROOFSTEP ',\n", + " 'completion': ' rw [← int.div_neg]\\n'},\n", + " {'prompt': 'GOAL a b : ℤ,\\tHa : 0 ≤ a,\\tHb : b ≤ 0\\t⊢ 0 ≤ -(a / b)\\n PROOFSTEP ',\n", + " 'completion': ' rw [← int.div_neg]; exact int.div_nonneg Ha (neg_nonneg_of_nonpos Hb)\\n'},\n", + " {'prompt': 'GOAL a b : ℤ,\\tHa : 0 ≤ a,\\tHb : b ≤ 0\\t⊢ 0 ≤ a / -b\\n PROOFSTEP ',\n", + " 'completion': ' exact int.div_nonneg Ha (neg_nonneg_of_nonpos Hb)\\n'},\n", + " {'prompt': 'GOAL a b : ℕ,\\tw : a ∣ b,\\th : b / a = 0\\t⊢ b = 0\\n PROOFSTEP ',\n", + " 'completion': ' rw [←nat.div_mul_cancel w, h, zero_mul]\\n'},\n", + " {'prompt': 'GOAL ⊢ nat.of_digits 0 (0.digits 0) = 0\\t\\tcase nat.zero, nat.succ\\tn : ℕ\\t⊢ nat.of_digits 0 (0.digits n.succ) = n.succ\\n PROOFSTEP ',\n", + " 'completion': ' { refl, }\\n'},\n", + " {'prompt': 'GOAL m n k n : ℕ,\\t_x : k ∣ 0,\\tkn : k ∣ n\\t⊢ k ∣ n\\n PROOFSTEP ',\n", + " 'completion': ' exact kn\\n'},\n", + " {'prompt': 'GOAL m n : ℕ,\\th : n ≤ m\\t⊢ even (m - n) ↔ (even m ↔ even n)\\n PROOFSTEP ',\n", + " 'completion': ' conv { to_rhs, rw [←nat.sub_add_cancel h, even_add] }\\n'},\n", + " {'prompt': 'GOAL m n : ℕ,\\th : n ≤ m\\t⊢ odd (m - n) ↔ (odd n ↔ even m)\\n PROOFSTEP ',\n", + " 'completion': ' rw [odd_iff_not_even, even_sub h, not_iff, not_iff_comm, odd_iff_not_even]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp : polynomial R\\t⊢ p.comp 0 = ⇑polynomial.C (polynomial.eval 0 p)\\n PROOFSTEP ',\n", + " 'completion': ' rw [← C_0, comp_C]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp : polynomial R\\t⊢ p.comp 1 = ⇑polynomial.C (polynomial.eval 1 p)\\n PROOFSTEP ',\n", + " 'completion': ' rw [← C_1, comp_C]\\n'},\n", + " {'prompt': 'GOAL a b : ℝ≥0∞,\\th : a * b = 1,\\thb : b ≠ ⊤\\t⊢ a = b⁻¹\\n PROOFSTEP ',\n", + " 'completion': ' rw [← mul_one a, ← mul_inv_cancel (right_ne_zero_of_mul_eq_one h) hb, ← mul_assoc, h, one_mul]\\n'},\n", + " {'prompt': 'GOAL a : ℝ≥0∞,\\tb : ℝ≥0,\\th : a ≤ ↑b,\\tthis : ↑(a.to_nnreal) = a\\t⊢ ↑(a.to_nnreal) ≤ ↑b\\n PROOFSTEP ',\n", + " 'completion': ' rw ← this at h\\n'},\n", + " {'prompt': 'GOAL a : ℝ≥0∞,\\tb : ℝ≥0,\\tthis : ↑(a.to_nnreal) = a,\\th : ↑(a.to_nnreal) ≤ ↑b\\t⊢ ↑(a.to_nnreal) ≤ ↑b\\n PROOFSTEP ',\n", + " 'completion': ' exact_mod_cast h\\n'},\n", + " {'prompt': 'GOAL a b : ℝ≥0∞,\\tha : a ≠ ⊤,\\thb : b ≠ 0\\t⊢ a < a + b\\n PROOFSTEP ',\n", + " 'completion': ' rwa [← pos_iff_ne_zero, ← add_lt_add_iff_left ha, add_zero] at hb\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ta : α,\\ts : wseq α\\t⊢ (wseq.cons a s).destruct.destruct = sum.inl (option.some (a, s))\\n PROOFSTEP ',\n", + " 'completion': ' simp [destruct, cons, computation.rmap]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts : wseq α,\\tS : wseq (wseq α)\\t⊢ (wseq.cons s S).join = (s.append S.join).think\\n PROOFSTEP ',\n", + " 'completion': ' simp [think, join]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\ts : set α,\\tx : α,\\thx : x ∈ s\\t⊢ x ∈ s\\n PROOFSTEP ',\n", + " 'completion': ' exact hx\\n'},\n", + " {'prompt': 'GOAL M : Type u_1,\\t_inst_2 : comm_monoid M,\\tS : submonoid M,\\tm : multiset ↥S\\t⊢ ↑(m.prod) ∈ S\\n PROOFSTEP ',\n", + " 'completion': ' exact m.prod.coe_prop\\n'},\n", + " {'prompt': 'GOAL a : ℝ,\\tp q : ℚ\\t⊢ ⇑measure_theory.measure_space.volume (set.Ioo ↑p ↑q) = ⇑(⇑(measure_theory.measure.map (has_add.add a)) measure_theory.measure_space.volume) (set.Ioo ↑p ↑q)\\n PROOFSTEP ',\n", + " 'completion': ' simp [measure.map_apply (measurable_const_add a) measurable_set_Ioo, sub_sub_sub_cancel_right]\\n'},\n", + " {'prompt': 'GOAL ι : Type u_1,\\t_inst_1 : fintype ι,\\ta : ι → ℝ\\t⊢ ⇑(measure_theory.measure.map (has_add.add a)) measure_theory.measure_space.volume = measure_theory.measure_space.volume\\n PROOFSTEP ',\n", + " 'completion': ' refine (measure.pi_eq (λ s hs, _)).symm\\n'},\n", + " {'prompt': \"GOAL pqr : multiset ℕ+,\\tp' q' : ℕ+,\\tH : ADE_inequality.A' p' q' = pqr\\t⊢ 1 < ADE_inequality.sum_inv pqr\\n PROOFSTEP \",\n", + " 'completion': \" rw [← H, A', sum_inv_pqr, add_assoc]\\n\"},\n", + " {'prompt': 'GOAL p : ℕ,\\thp_prime : fact (nat.prime p),\\tε : ℚ,\\thε : 0 < ε\\t⊢ 0 < ↑ε\\n PROOFSTEP ',\n", + " 'completion': ' exact_mod_cast hε\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : order_bot α,\\tb : α\\t⊢ option.none.get_or_else ⊥ ≤ b ↔ option.none ≤ ↑b\\n PROOFSTEP ',\n", + " 'completion': ' simp [none_eq_bot, some_eq_coe]\\n'},\n", + " {'prompt': 'GOAL D : Type u,\\t_inst_4 : integral_domain D,\\t_inst_5 : unique_factorization_monoid D,\\t_inst : normalization_monoid D\\t⊢ unique_factorization_monoid (polynomial D)\\n PROOFSTEP ',\n", + " 'completion': ' haveI := to_gcd_monoid D\\n'},\n", + " {'prompt': 'GOAL R : Type u_5,\\t_inst_5 : integral_domain R,\\tx : R,\\thx : x = 1\\t⊢ x = 1\\n PROOFSTEP ',\n", + " 'completion': ' exact hx\\n'},\n", + " {'prompt': 'GOAL p : ℕ,\\tR : Type u_1,\\thp : fact (nat.prime p),\\t_inst_1 : comm_ring R\\t⊢ 0.frobenius_fun = 0\\n PROOFSTEP ',\n", + " 'completion': ' refine is_poly.ext ((frobenius_fun_is_poly p).comp (witt_vector.zero_is_poly)) ((witt_vector.zero_is_poly).comp (frobenius_fun_is_poly p)) _ _ 0\\n'},\n", + " {'prompt': 'GOAL p : ℕ,\\tR : Type u_1,\\thp : fact (nat.prime p),\\t_inst_1 : comm_ring R\\t⊢ 1.frobenius_fun = 1\\n PROOFSTEP ',\n", + " 'completion': ' refine is_poly.ext ((frobenius_fun_is_poly p).comp (witt_vector.one_is_poly)) ((witt_vector.one_is_poly).comp (frobenius_fun_is_poly p)) _ _ 0\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tf : punit ⊕ punit ↪ α\\t⊢ ∃ (x y : α), x ≠ y\\n PROOFSTEP ',\n", + " 'completion': ' refine ⟨f $ sum.inl ⟨⟩, f $ sum.inr ⟨⟩, _⟩\\n'},\n", + " {'prompt': 'GOAL a b : cardinal,\\th : a * b = a,\\tha : ω ≤ a\\t⊢ a ≠ 0\\n PROOFSTEP ',\n", + " 'completion': ' rintro rfl\\n'},\n", + " {'prompt': 'GOAL m n : ℤ,\\tz : ↥circle\\t⊢ ⇑(fourier (m + n)) z = ⇑(fourier m) z * ⇑(fourier n) z\\n PROOFSTEP ',\n", + " 'completion': ' simp [fpow_add (nonzero_of_mem_circle z)]\\n'},\n", + " {'prompt': 'GOAL V : Type u_5,\\t_inst_1 : semi_normed_group V,\\tx y : uniform_space.completion V\\t⊢ has_dist.dist x y = ∥x - y∥\\n PROOFSTEP ',\n", + " 'completion': ' apply completion.induction_on₂ x y; clear x y\\n'},\n", + " {'prompt': 'GOAL V : Type u_5,\\t_inst_1 : semi_normed_group V,\\tx y : uniform_space.completion V\\t⊢ has_dist.dist x y = ∥x - y∥\\n PROOFSTEP ',\n", + " 'completion': ' apply completion.induction_on₂ x y\\n'},\n", + " {'prompt': 'GOAL V : Type u_5,\\t_inst_1 : semi_normed_group V\\t⊢ continuous (λ (x : uniform_space.completion V × uniform_space.completion V), ∥x.fst - x.snd∥)\\n PROOFSTEP ',\n", + " 'completion': ' exact continuous.comp completion.continuous_extension continuous_sub\\n'},\n", + " {'prompt': 'GOAL V : Type u_5,\\t_inst_1 : semi_normed_group V\\t⊢ ∀ (a b : V), has_dist.dist ↑a ↑b = ∥↑a - ↑b∥\\n PROOFSTEP ',\n", + " 'completion': ' intros x y\\n'},\n", + " {'prompt': 'GOAL h : -1 ≠ 1\\t⊢ ∀ (x : ℝ), x ∈ set.Iic (-1) → real.arcsin x = -(π / 2)\\n PROOFSTEP ',\n", + " 'completion': ' simp [arcsin_of_le_neg_one] { contextual := tt }\\n'},\n", + " {'prompt': 'GOAL X : Type u,\\t_inst_1 : preorder X,\\tx y : X,\\th : x ⟶ y\\t⊢ _.hom = h\\n PROOFSTEP ',\n", + " 'completion': ' cases h\\n'},\n", + " {'prompt': 'GOAL C : Type u₁,\\t_inst_1 : category_theory.small_category C,\\tP : Cᵒᵖ ⥤ Type u₁\\t⊢ category_theory.is_iso ((category_theory.limits.colimit.is_colimit (category_theory.functor_to_representables P)).desc (category_theory.cocone_of_representable P))\\n PROOFSTEP ',\n", + " 'completion': ' change is_iso (colimit.desc _ (cocone.extend _ _))\\n'},\n", + " {'prompt': 'GOAL J : Type v,\\t_inst_1 : category_theory.small_category J,\\tF : J ⥤ category_theory.discrete punit,\\tc : category_theory.limits.cone F\\t⊢ category_theory.limits.is_limit c\\n PROOFSTEP ',\n", + " 'completion': ' tidy\\n'},\n", + " {'prompt': 'GOAL C : Type u,\\t_inst_1 : category_theory.category C,\\tX : C,\\tP : Cᵒᵖ ⥤ Type v\\t⊢ category_theory.presieve.is_sheaf_for P ⇑⊤\\n PROOFSTEP ',\n", + " 'completion': ' rw ← generate_of_singleton_split_epi (𝟙 X)\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\tc : composition n,\\tj : fin n\\t⊢ ↑j - c.size_up_to ↑(c.index j) < c.blocks_fun (c.index j)\\n PROOFSTEP ',\n", + " 'completion': \" rw [nat.sub_lt_right_iff_lt_add, add_comm, ← size_up_to_succ']\\n\"},\n", + " {'prompt': 'GOAL Γ : Type ?,\\t_inst_1 : inhabited Γ,\\tl : turing.list_blank Γ\\t⊢ list.nil.head = (list.nil ++ list.repeat (inhabited.default Γ) 0).head\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL ⊢ ∀ {α : Type ?} {β : Type ?} (x : α ⊕ β), bifunctor.bimap id id x = x\\n PROOFSTEP ',\n", + " 'completion': ' intros\\n'},\n", + " {'prompt': \"GOAL α β : Type,\\ta : α,\\tf : α → parser β,\\t_inst_1 : (f a).step\\t⊢ (list.mmap' f [a]).step\\n PROOFSTEP \",\n", + " 'completion': ' convert step.and_then\\n'},\n", + " {'prompt': \"GOAL α β : Type,\\ta : α,\\tf : α → parser β,\\t_inst_1 : (f a).step\\t⊢ (list.mmap' f list.nil).static\\n PROOFSTEP \",\n", + " 'completion': ' exact static.pure\\n'},\n", + " {'prompt': 'GOAL x y : ℝ\\t⊢ real.sinh (x + y) = real.sinh x * real.cosh y + real.cosh x * real.sinh y\\n PROOFSTEP ',\n", + " 'completion': ' rw ← of_real_inj; simp [sinh_add]\\n'},\n", + " {'prompt': 'GOAL x y : ℝ\\t⊢ ↑(real.sinh (x + y)) = ↑(real.sinh x * real.cosh y + real.cosh x * real.sinh y)\\n PROOFSTEP ',\n", + " 'completion': ' simp [sinh_add]\\n'},\n", + " {'prompt': 'GOAL x y : ℝ\\t⊢ real.sinh (x + y) = real.sinh x * real.cosh y + real.cosh x * real.sinh y\\n PROOFSTEP ',\n", + " 'completion': ' rw ← of_real_inj\\n'},\n", + " {'prompt': 'GOAL x y : ℂ\\t⊢ complex.sin (x - y) = complex.sin x * complex.cos y - complex.cos x * complex.sin y\\n PROOFSTEP ',\n", + " 'completion': ' simp [sub_eq_add_neg, sin_add, sin_neg, cos_neg]\\n'},\n", + " {'prompt': 'GOAL x y : ℝ\\t⊢ real.sin (x + y) = real.sin x * real.cos y + real.cos x * real.sin y\\n PROOFSTEP ',\n", + " 'completion': ' rw [← of_real_inj]; simp [sin, sin_add]\\n'},\n", + " {'prompt': 'GOAL x y : ℝ\\t⊢ ↑(real.sin (x + y)) = ↑(real.sin x * real.cos y + real.cos x * real.sin y)\\n PROOFSTEP ',\n", + " 'completion': ' simp [sin, sin_add]\\n'},\n", + " {'prompt': 'GOAL x y : ℝ\\t⊢ real.sin (x + y) = real.sin x * real.cos y + real.cos x * real.sin y\\n PROOFSTEP ',\n", + " 'completion': ' rw [← of_real_inj]\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\ti : fin (n + 2),\\th : i < fin.last (n + 1)\\t⊢ ↑(i.cast_pred) ≤ ↑i\\n PROOFSTEP ',\n", + " 'completion': ' rw [cast_pred, pred_above, dif_neg]\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\tj : fin (n + 1),\\th : j ≠ 0\\t⊢ ↑(j.pred h) = ↑j - 1\\n PROOFSTEP ',\n", + " 'completion': ' cases j\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : decidable_eq α,\\ts : finset α\\t⊢ ∃ (a : list α), a.to_finset = s\\n PROOFSTEP ',\n", + " 'completion': ' rcases to_finset_surj_on (set.mem_univ s) with ⟨l, -, hls⟩\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\ts : multiset α,\\ta : α\\t⊢ a ∈ s.to_finset ↔ multiset.count a s ≠ 0\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tm n : α →₀ ℕ,\\th : m < n\\t⊢ ⇑finsupp.to_multiset m < ⇑finsupp.to_multiset n\\n PROOFSTEP ',\n", + " 'completion': ' exact to_multiset_strict_mono h\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tβ : Type v,\\tb : β\\t⊢ list.map (function.const α b) list.nil = list.repeat b list.nil.length\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': \"GOAL α : Type u_1,\\tx : α,\\tl l' : list α,\\thx : x ∈+ list.nil\\t⊢ x ∈+ list.nil\\n PROOFSTEP \",\n", + " 'completion': ' exact hx\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tm n : ℕ,\\t_inst_1 : has_repr α\\t⊢ has_repr (fin m → fin n → α)\\n PROOFSTEP ',\n", + " 'completion': ' apply_instance\\n'},\n", + " {'prompt': 'GOAL b m : ℕ\\t⊢ ∀ {d : ℕ}, d ∈ (b + 2).digits m → d < b + 2\\n PROOFSTEP ',\n", + " 'completion': ' apply nat.strong_induction_on m\\n'},\n", + " {'prompt': 'GOAL n m : ℕ,\\thnm : n ≤ m\\t⊢ n! * (n + 1) ^ (m - n) ≤ m!\\n PROOFSTEP ',\n", + " 'completion': ' convert nat.factorial_mul_pow_le_factorial\\n'},\n", + " {'prompt': 'GOAL m n k n : ℕ,\\t_x : k ∣ 0,\\tkn : k ∣ n\\t⊢ k ∣ 0.gcd n\\n PROOFSTEP ',\n", + " 'completion': ' rw gcd_zero_left\\n'},\n", + " {'prompt': 'GOAL m n k n : ℕ,\\t_x : k ∣ 0,\\tkn : k ∣ n\\t⊢ k ∣ 0.gcd n\\n PROOFSTEP ',\n", + " 'completion': ' rw gcd_zero_left; exact kn\\n'},\n", + " {'prompt': 'GOAL m n val : ℕ,\\ts : m.psub n = option.some val\\t⊢ option.some val = option.none ↔ m < n\\n PROOFSTEP ',\n", + " 'completion': ' simp [eq_comm]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : ring R,\\ta : add_monoid_algebra R ℕ\\t⊢ 0 • {to_finsupp := a} = 0\\n PROOFSTEP ',\n", + " 'completion': ' simp [smul_to_finsupp, ← zero_to_finsupp]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp q : polynomial R\\t⊢ (p + q).nat_degree ≤ linear_order.max p.nat_degree q.nat_degree\\n PROOFSTEP ',\n", + " 'completion': ' cases le_max_iff.1 (degree_add_le p q)\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp q : polynomial R\\t⊢ (p + q).nat_degree ≤ linear_order.max p.nat_degree q.nat_degree\\n PROOFSTEP ',\n", + " 'completion': ' cases le_max_iff.1 (degree_add_le p q); simp [nat_degree_le_nat_degree h]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp : polynomial R,\\thp : 0 < p.nat_trailing_degree\\t⊢ ¬p.nat_trailing_degree = 0\\n PROOFSTEP ',\n", + " 'completion': ' contrapose! hp\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp : polynomial R,\\thp : p.nat_trailing_degree = 0\\t⊢ p.nat_trailing_degree ≤ 0\\n PROOFSTEP ',\n", + " 'completion': ' simpa\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tn : ℕ,\\th : 1 < n\\t⊢ polynomial.X.iterated_deriv n = 0\\n PROOFSTEP ',\n", + " 'completion': ' induction n with n ih\\n'},\n", + " {'prompt': \"GOAL α : Type u,\\ts : seq α\\t⊢ option.none = prod.fst <$> (λ (a' : α), (a', s.tail)) <$> option.none\\n PROOFSTEP \",\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : linear_order α,\\ta b c : α\\t⊢ linear_order.min a b ≤ linear_order.max a c\\n PROOFSTEP ',\n", + " 'completion': ' exact (min_le_left _ _).trans (le_max_left _ _)\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : linear_order α,\\ta b c : α\\t⊢ linear_order.min a c ≤ linear_order.max a b\\n PROOFSTEP ',\n", + " 'completion': ' exact (min_le_left _ _).trans (le_max_left _ _)\\n'},\n", + " {'prompt': 'GOAL p : ℕ,\\t_inst_4 : fact (nat.prime p),\\ta : units (zmod p)\\t⊢ a ^ (p - 1) = 1\\n PROOFSTEP ',\n", + " 'completion': ' rw [← card_units p, pow_card_eq_one]\\n'},\n", + " {'prompt': 'GOAL R : Type u_1,\\t_inst_1 : comm_ring R,\\tI : ideal R,\\t_inst_6 : is_adic_complete I R\\t⊢ I ≤ ⊥.jacobson\\n PROOFSTEP ',\n", + " 'completion': ' intros x hx\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : topological_space α,\\t_inst_2 : t1_space α,\\t_inst_3 : encodable α\\t⊢ borel α = ⊤\\n PROOFSTEP ',\n", + " 'completion': ' refine (top_le_iff.1 $ λ s hs, bUnion_of_singleton s ▸ _)\\n'},\n", + " {'prompt': \"GOAL pqr : multiset ℕ+,\\tp' q' : ℕ+,\\tH : ADE_inequality.A' p' q' = pqr\\t⊢ 0 < (↑↑p')⁻¹ + (↑↑q')⁻¹\\n PROOFSTEP \",\n", + " 'completion': ' apply add_pos; simp only [pnat.pos, nat.cast_pos, inv_pos]\\n'},\n", + " {'prompt': \"GOAL pqr : multiset ℕ+,\\tp' q' : ℕ+,\\tH : ADE_inequality.A' p' q' = pqr\\t⊢ 0 < (↑↑p')⁻¹ + (↑↑q')⁻¹\\n PROOFSTEP \",\n", + " 'completion': ' apply add_pos\\n'},\n", + " {'prompt': \"GOAL pqr : multiset ℕ+,\\tn : ℕ+,\\tH : ADE_inequality.D' n = pqr\\t⊢ 1 < (↑2)⁻¹ + (↑2)⁻¹ + (↑n)⁻¹\\n PROOFSTEP \",\n", + " 'completion': ' simp only [lt_add_iff_pos_right, pnat.one_coe, inv_one, nat.cast_one, coe_coe, pnat.coe_bit0, nat.cast_bit0]\\n'},\n", + " {'prompt': \"GOAL pqr : multiset ℕ+,\\tn : ℕ+,\\tH : ADE_inequality.D' n = pqr\\t⊢ 1 < 2⁻¹ + 2⁻¹ + (↑↑n)⁻¹\\n PROOFSTEP \",\n", + " 'completion': ' norm_num\\n'},\n", + " {'prompt': 'GOAL A : Type u_1,\\t_inst_1 : comm_ring A,\\t_inst_2 : algebra ℚ A,\\tn m : ℕ\\t⊢ ↑m! ≠ 0\\n PROOFSTEP ',\n", + " 'completion': ' exact_mod_cast factorial_ne_zero m\\n'},\n", + " {'prompt': 'GOAL a : ℕ,\\ta1 : 1 < a,\\tm n : ℕ,\\th : n ≤ m\\t⊢ 1 < ?m_1\\n PROOFSTEP ',\n", + " 'completion': ' repeat {rw ← neg_mul_eq_mul_neg at h}\\n'},\n", + " {'prompt': 'GOAL a : ℕ,\\ta1 : 1 < a,\\tm n : ℕ,\\th : n ≤ m\\t⊢ 1 < ?m_1\\n PROOFSTEP ',\n", + " 'completion': ' rw ← neg_mul_eq_mul_neg at h\\n'},\n", + " {'prompt': 'GOAL x y z : ℤ,\\th : pythagorean_triple x y z,\\th0 : x.gcd y = 0\\t⊢ x = 0\\n PROOFSTEP ',\n", + " 'completion': ' apply int.nat_abs_eq_zero.mp\\n'},\n", + " {'prompt': 'GOAL x y z : ℤ,\\th : pythagorean_triple x y z,\\th0 : x.gcd y = 0\\t⊢ x.nat_abs = 0\\n PROOFSTEP ',\n", + " 'completion': ' apply nat.eq_zero_of_gcd_eq_zero_left h0\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : partial_order α,\\tc : closure_operator α\\t⊢ c = closure_operator.mk₃ ⇑c c.closed _ _ _\\n PROOFSTEP ',\n", + " 'completion': ' ext\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : comm_ring R,\\tf : polynomial R\\t⊢ polynomial.eval₂ (adjoin_root.of f) (adjoin_root.root f) f = 0\\n PROOFSTEP ',\n", + " 'completion': ' rw [← algebra_map_eq, ← aeval_def, aeval_eq, mk_self]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : field R,\\ta : R,\\th : a = 0\\t⊢ is_unit (1 - a)\\n PROOFSTEP ',\n", + " 'completion': ' rw [h, sub_zero]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : field R,\\ta : R,\\th : a = 0\\t⊢ is_unit (1 - a)\\n PROOFSTEP ',\n", + " 'completion': ' rw [h, sub_zero]; exact is_unit_one\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : comm_ring R,\\ts : R\\t⊢ ideal.span {s} ^ 0 = ideal.span {s ^ 0}\\n PROOFSTEP ',\n", + " 'completion': ' simp [set.singleton_one]\\n'},\n", + " {'prompt': 'GOAL R : Type u_1,\\t_inst_1 : integral_domain R,\\t_inst_2 : gcd_monoid R,\\tr : R\\t⊢ (⇑polynomial.C r).content = ⇑normalize r\\n PROOFSTEP ',\n", + " 'completion': ' rw content\\n'},\n", + " {'prompt': \"GOAL α : Type u,\\tx y : α,\\th : x ≠ y,\\th' : ¬2 ≤ # α\\t⊢ false\\n PROOFSTEP \",\n", + " 'completion': \" rw [not_le, ←nat.cast_two, nat_succ, lt_succ, nat.cast_one, le_one_iff_subsingleton] at h'\\n\"},\n", + " {'prompt': \"GOAL α : Type u,\\tx y : α,\\th : x ≠ y,\\th' : subsingleton α\\t⊢ x = y\\n PROOFSTEP \",\n", + " 'completion': ' exactI subsingleton.elim _ _\\n'},\n", + " {'prompt': 'GOAL m n : ℕ,\\tIH : ↑(m * n) = ↑m * ↑n\\t⊢ ↑(m * n.succ) = ↑m * ↑(n.succ)\\n PROOFSTEP ',\n", + " 'completion': ' rw [nat.mul_succ, nat.cast_add, IH, nat.cast_succ, mul_add_one]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : topological_space α,\\ts : set α,\\th : closure s ⊆ s\\t⊢ is_closed s\\n PROOFSTEP ',\n", + " 'completion': ' rw subset.antisymm subset_closure h; exact is_closed_closure\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : topological_space α,\\ts : set α,\\th : closure s ⊆ s\\t⊢ is_closed s\\n PROOFSTEP ',\n", + " 'completion': ' rw subset.antisymm subset_closure h\\n'},\n", + " {'prompt': 'GOAL r : ℝ\\t⊢ ∀ᶠ (a : ereal × ereal) in 𝓝 ⊤ ×ᶠ 𝓝 ⊤, ↑r < a.fst + a.snd\\n PROOFSTEP ',\n", + " 'completion': ' rw eventually_prod_iff\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : pseudo_metric_space α,\\ts : set α,\\th : ¬metric.bounded s\\t⊢ metric.diam s = 0\\n PROOFSTEP ',\n", + " 'completion': ' rw [diam, ediam_of_unbounded h, ennreal.top_to_real]\\n'},\n", + " {'prompt': \"GOAL α : Type u_1,\\t_inst_1 : uniform_space α,\\ts : set α,\\ths : is_complete s\\t⊢ is_complete (coe '' set.univ)\\n PROOFSTEP \",\n", + " 'completion': ' simp [hs]\\n'},\n", + " {'prompt': 'GOAL V : Type u_5,\\t_inst_1 : semi_normed_group V\\t⊢ ∀ (x y : uniform_space.completion V), has_dist.dist x y = ∥x - y∥\\n PROOFSTEP ',\n", + " 'completion': ' intros x y\\n'},\n", + " {'prompt': 'GOAL x : ℝ,\\th : x ≠ 1\\t⊢ has_deriv_within_at real.arcsin (1 / real.sqrt (1 - x ^ 2)) (set.Iic x) x\\n PROOFSTEP ',\n", + " 'completion': \" rcases em (x = -1) with (rfl|h')\\n\"},\n", + " {'prompt': 'GOAL α : Type u,\\tσ : Type v,\\tM : NFA α σ,\\tS : set σ\\t⊢ M.to_ε_NFA.ε_closure S = S\\n PROOFSTEP ',\n", + " 'completion': ' ext a\\n'},\n", + " {'prompt': 'GOAL a : primrec (λ (a : list ?m_1), denumerable.of_nat (ℕ × nat.partrec.code) a.length)\\t⊢ primrec G\\t\\t⊢ Type ?\\t\\t⊢ primcodable ?m_1\\n PROOFSTEP ',\n", + " 'completion': ' have k := fst.comp a\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\ti : fin (n + 2),\\th : i < fin.last (n + 1)\\t⊢ ↑(i.cast_lt _) ≤ ↑i\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tf : α →₀ ℕ,\\ta : α\\t⊢ ⇑f a = 0 → 0 = ⇑f a\\n PROOFSTEP ',\n", + " 'completion': ' exact eq.symm\\n'},\n", + " {'prompt': 'GOAL n a b : ℤ,\\th : a ≡ b [ZMOD n]\\t⊢ a ^ 0 ≡ b ^ 0 [ZMOD n]\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': \"GOAL α : Type u,\\tlast'_is_some : ∀ {l : list α}, ↥(l.last'.is_some) ↔ l ≠ list.nil\\t⊢ ↥(list.nil.last'.is_some) ↔ list.nil ≠ list.nil\\n PROOFSTEP \",\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': \"GOAL α : Type u,\\tl' : list α,\\tk : ℕ,\\th : l'.rotate k ~r l'\\t⊢ l'.rotate k ∈ l'.cyclic_permutations\\n PROOFSTEP \",\n", + " 'completion': ' rw mem_iff_nth_le\\n'},\n", + " {'prompt': 'GOAL a b c : ℕ,\\tha0 : a = 0\\t⊢ a / c * b / a ≤ b / c\\n PROOFSTEP ',\n", + " 'completion': ' simp [ha0]\\n'},\n", + " {'prompt': 'GOAL a b c : ℕ,\\tha0 : ¬a = 0\\t⊢ a / c * b ≤ b * a / c\\n PROOFSTEP ',\n", + " 'completion': ' rw [mul_comm]\\n'},\n", + " {'prompt': 'GOAL a b c : ℕ,\\tha0 : ¬a = 0\\t⊢ a / c * b ≤ b * a / c\\n PROOFSTEP ',\n", + " 'completion': ' rw [mul_comm]; exact mul_div_le_mul_div_assoc _ _ _\\n'},\n", + " {'prompt': 'GOAL a b c : ℕ,\\tha0 : ¬a = 0\\t⊢ b * (a / c) ≤ b * a / c\\n PROOFSTEP ',\n", + " 'completion': ' exact mul_div_le_mul_div_assoc _ _ _\\n'},\n", + " {'prompt': 'GOAL a b c : ℕ,\\tha0 : ¬a = 0\\t⊢ b * a / c / a = b / c\\n PROOFSTEP ',\n", + " 'completion': ' rw [nat.div_div_eq_div_mul, mul_comm b, mul_comm c, nat.mul_div_mul _ _ (nat.pos_of_ne_zero ha0)]\\n'},\n", + " {'prompt': 'GOAL m n : ℕ,\\th : n ≤ m\\t⊢ (even (m - n) ↔ (even m ↔ even n)) = ?m_1\\n PROOFSTEP ',\n", + " 'completion': ' to_rhs\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\th : n % 2 = 0\\t⊢ ¬n % 2 = 1 ↔ n % 2 = 0\\n PROOFSTEP ',\n", + " 'completion': ' simp [h]\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\th : n % 2 = 1\\t⊢ ¬n % 2 = 1 ↔ n % 2 = 0\\n PROOFSTEP ',\n", + " 'completion': ' simp [h]\\n'},\n", + " {'prompt': 'GOAL n m : ℕ+,\\tu : prime_multiset := n.factor_multiset,\\tv : prime_multiset := m.factor_multiset\\t⊢ (n * m).factor_multiset = n.factor_multiset + m.factor_multiset\\n PROOFSTEP ',\n", + " 'completion': ' have : n = u.prod := (prod_factor_multiset n).symm\\n'},\n", + " {'prompt': 'GOAL R : Type u_1,\\t_inst_2 : semiring R,\\tn k : ℕ\\t⊢ ((1 + polynomial.X) ^ n).coeff k = ↑(n.choose k)\\n PROOFSTEP ',\n", + " 'completion': ' rw [add_comm _ X, coeff_X_add_one_pow]\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\tF : typevec n → Type u_1,\\t_inst_1 : mvfunctor F,\\tq : mvqpf F\\t⊢ q.liftp_preservation ↔ q.is_uniform\\n PROOFSTEP ',\n", + " 'completion': ' rw [← supp_preservation_iff_liftp_preservation, supp_preservation_iff_uniform]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : linear_ordered_field α,\\tx y : α\\t⊢ const x ≤ const y ↔ x ≤ y\\n PROOFSTEP ',\n", + " 'completion': ' rw le_iff_lt_or_eq; exact or_congr const_lt const_equiv\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : linear_ordered_field α,\\tx y : α\\t⊢ const x ≤ const y ↔ x ≤ y\\n PROOFSTEP ',\n", + " 'completion': ' rw le_iff_lt_or_eq\\n'},\n", + " {'prompt': 'GOAL a b : ℝ≥0∞,\\tha : ¬a = ⊤,\\thb : b = ⊤\\t⊢ (a + b).to_real ≤ a.to_real + b.to_real\\n PROOFSTEP ',\n", + " 'completion': ' simp only [hb, add_top, top_to_real, add_zero, to_real_nonneg]\\n'},\n", + " {'prompt': 'GOAL a b : ℝ≥0∞,\\tha : a ≠ ⊤,\\thb : b ≠ ⊤\\t⊢ a.to_real = b.to_real ↔ a = b\\n PROOFSTEP ',\n", + " 'completion': ' lift a to ℝ≥0 using ha\\n'},\n", + " {'prompt': 'GOAL x y : ℝ,\\th : 0 ≤ y\\t⊢ abs x ≤ real.sqrt y → abs x ^ 2 ≤ y\\n PROOFSTEP ',\n", + " 'completion': ' exact (le_sqrt (abs_nonneg x) h).mp\\n'},\n", + " {'prompt': 'GOAL α : Type ?,\\tq : semiquot α\\t⊢ ∀ (a : α), a ∈ id <$> q ↔ a ∈ q\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts₁ s₂ : seq α,\\ta : α,\\th : a ∈ s₁\\t⊢ a ∈ s₁.append s₂\\n PROOFSTEP ',\n", + " 'completion': ' apply mem_rec_on h; intros; simp [*]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts₁ s₂ : seq α,\\ta : α,\\th : a ∈ s₁\\t⊢ a ∈ s₁.append s₂\\n PROOFSTEP ',\n", + " 'completion': ' apply mem_rec_on h; intros\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts₁ s₂ : seq α,\\ta : α,\\th : a ∈ s₁\\t⊢ a ∈ s₁.append s₂\\n PROOFSTEP ',\n", + " 'completion': ' apply mem_rec_on h\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : linear_order α,\\ta b : α\\t⊢ set.Ioi a \\\\ set.Ici b = set.Ioo a b\\n PROOFSTEP ',\n", + " 'completion': ' rw [diff_eq, compl_Ici, Ioi_inter_Iio]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : linear_order α,\\ta b : α\\t⊢ a ≤ b → [a, b] = [b, a]\\n PROOFSTEP ',\n", + " 'completion': ' simp {contextual := tt}\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : linear_order α,\\ta b : α\\t⊢ b ≤ a → [a, b] = [b, a]\\n PROOFSTEP ',\n", + " 'completion': ' simp {contextual := tt}\\n'},\n", + " {'prompt': 'GOAL M : Type u_1,\\t_inst_2 : comm_monoid M,\\tS : submonoid M,\\tm : multiset ↥S\\t⊢ (multiset.map coe m).prod ∈ S\\n PROOFSTEP ',\n", + " 'completion': ' rw ← coe_multiset_prod\\n'},\n", + " {'prompt': 'GOAL p : ℕ,\\t_inst_1 : fact (nat.prime p),\\tf g : padic_seq p,\\th : f ≈ g\\t⊢ f.valuation = g.valuation\\n PROOFSTEP ',\n", + " 'completion': ' by_cases hf : f ≈ 0\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tB : filter_basis α,\\tU : set α,\\tU_in : U ∈ B.filter\\t⊢ U ∈ filter.generate B.sets\\n PROOFSTEP ',\n", + " 'completion': ' rcases B.mem_filter_iff.mp U_in with ⟨V, V_in, h⟩\\n'},\n", + " {'prompt': 'GOAL R : Type u_1,\\t_inst_1 : comm_ring R,\\tM : submonoid R,\\ty : localization M\\t⊢ -y + y = 0\\n PROOFSTEP ',\n", + " 'completion': ' exact localization.induction_on y (by tac)\\n'},\n", + " {'prompt': 'GOAL R : Type u_5,\\t_inst_5 : integral_domain R\\t⊢ ∀ (x : R), x ∈ primitive_roots 1 R → x = 1\\n PROOFSTEP ',\n", + " 'completion': ' intros x hx\\n'},\n", + " {'prompt': 'GOAL R : Type u_5,\\t_inst_5 : integral_domain R,\\tx : R,\\thx : x ∈ primitive_roots 1 R\\t⊢ x = 1\\n PROOFSTEP ',\n", + " 'completion': ' rw [mem_primitive_roots zero_lt_one, is_primitive_root.one_right_iff] at hx\\n'},\n", + " {'prompt': 'GOAL p : ℕ,\\tR : Type u_1,\\thp : fact (nat.prime p),\\t_inst_1 : comm_ring R\\t⊢ ⇑(witt_vector.teichmuller p) 0 = 0\\n PROOFSTEP ',\n", + " 'completion': ' ext ⟨⟩; { rw zero_coeff, refl }\\n'},\n", + " {'prompt': 'GOAL p : ℕ,\\tR : Type u_1,\\thp : fact (nat.prime p),\\t_inst_1 : comm_ring R\\t⊢ ⇑(witt_vector.teichmuller p) 0 = 0\\n PROOFSTEP ',\n", + " 'completion': ' ext ⟨⟩\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tf : punit ⊕ punit ↪ α,\\th : ⇑f (sum.inl punit.star) = ⇑f (sum.inr punit.star)\\t⊢ false\\n PROOFSTEP ',\n", + " 'completion': ' cases f.2 h\\n'},\n", + " {'prompt': 'GOAL ⊢ dite (0 < ω) (λ (h : 0 < ω), classical.some _) (λ (h : ¬0 < ω), 0) = 0\\n PROOFSTEP ',\n", + " 'completion': ' have h : 0 < ω := nat_lt_omega 0\\n'},\n", + " {'prompt': 'GOAL a : cardinal,\\tha : ω ≤ a,\\tthis : a ≠ 0,\\th : a * 0 = a\\t⊢ false\\n PROOFSTEP ',\n", + " 'completion': ' apply this\\n'},\n", + " {'prompt': 'GOAL a : cardinal,\\tha : ω ≤ a,\\tthis : a ≠ 0,\\th : 0 = a\\t⊢ a = 0\\n PROOFSTEP ',\n", + " 'completion': ' subst h\\n'},\n", + " {'prompt': 'GOAL a : cardinal,\\tha : a < ω,\\th2a : ¬a = 0,\\th : a * 0 = a\\t⊢ false\\n PROOFSTEP ',\n", + " 'completion': ' apply h2a\\n'},\n", + " {'prompt': 'GOAL a : cardinal,\\tha : a < ω,\\th2a : ¬a = 0,\\th : 0 = a\\t⊢ a = 0\\n PROOFSTEP ',\n", + " 'completion': ' subst h\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : topological_space α,\\ts : set α,\\th : closure s ⊆ s\\t⊢ is_closed (closure s)\\n PROOFSTEP ',\n", + " 'completion': ' exact is_closed_closure\\n'},\n", + " {'prompt': 'GOAL ⊢ ∀ (x : ℝ), ∀ᶠ (a : ereal × ereal) in 𝓝 ⊤ ×ᶠ 𝓝 ⊤, ↑x < a.fst + a.snd\\n PROOFSTEP ',\n", + " 'completion': ' assume r\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : pseudo_metric_space α,\\tx : α,\\tε : ℝ≥0\\t⊢ emetric.ball x ↑ε = metric.ball x ↑ε\\n PROOFSTEP ',\n", + " 'completion': ' convert metric.emetric_ball\\n'},\n", + " {'prompt': 'GOAL E : Type u_1,\\t_inst_1 : inner_product_space ℝ E,\\tc : E,\\tf : times_cont_diff_bump_of_inner c\\t⊢ function.support ⇑f = metric.ball c f.R\\n PROOFSTEP ',\n", + " 'completion': ' ext x\\n'},\n", + " {'prompt': 'GOAL X Y : SemiNormedGroup₁,\\tf : X ⟶ Y,\\ts : category_theory.limits.cokernel_cofork f,\\tb : ↥X\\t⊢ ⇑((category_theory.limits.cofork.π s).val) (⇑(f.val.to_add_monoid_hom) b) = 0\\n PROOFSTEP ',\n", + " 'completion': ' change (f ≫ s.π) b = 0\\n'},\n", + " {'prompt': 'GOAL C : Type u₁,\\t_inst_1 : category_theory.small_category C,\\tP : Cᵒᵖ ⥤ Type u₁\\t⊢ category_theory.is_iso (𝟙 (category_theory.limits.colimit (category_theory.functor_to_representables P)) ≫ category_theory.extend_along_yoneda_yoneda.hom.app P)\\n PROOFSTEP ',\n", + " 'completion': ' apply_instance\\n'},\n", + " {'prompt': 'GOAL C : Type u,\\t_inst_1 : category_theory.category C,\\tF : category_theory.limits.walking_parallel_pair ⥤ C\\t⊢ (category_theory.limits.parallel_pair (F.map category_theory.limits.walking_parallel_pair_hom.left) (F.map category_theory.limits.walking_parallel_pair_hom.right)).obj category_theory.limits.walking_parallel_pair.zero = F.obj category_theory.limits.walking_parallel_pair.zero\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL C : Type u,\\t_inst_1 : category_theory.category C,\\tF : category_theory.limits.walking_parallel_pair ⥤ C\\t⊢ (category_theory.limits.parallel_pair (F.map category_theory.limits.walking_parallel_pair_hom.left) (F.map category_theory.limits.walking_parallel_pair_hom.right)).obj category_theory.limits.walking_parallel_pair.one = F.obj category_theory.limits.walking_parallel_pair.one\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL C : Type u,\\t_inst_1 : category_theory.category C,\\tX : C,\\tP : Cᵒᵖ ⥤ Type v\\t⊢ category_theory.presieve.is_sheaf_for P (category_theory.presieve.singleton (𝟙 X))\\n PROOFSTEP ',\n", + " 'completion': ' apply is_sheaf_for_singleton_iso\\n'},\n", + " {'prompt': \"GOAL c : turing.to_partrec.code,\\tk k' : turing.to_partrec.cont,\\tv : list ℕ\\t⊢ turing.to_partrec.step_normal c (k.then k') v = (turing.to_partrec.step_normal c k v).then k'\\n PROOFSTEP \",\n", + " 'completion': ' induction c with generalizing k v; simp only [cont.then, step_normal, cfg.then, *] {constructor_eq := ff}\\n'},\n", + " {'prompt': \"GOAL c : turing.to_partrec.code,\\tk k' : turing.to_partrec.cont,\\tv : list ℕ\\t⊢ turing.to_partrec.step_normal c (k.then k') v = (turing.to_partrec.step_normal c k v).then k'\\n PROOFSTEP \",\n", + " 'completion': ' induction c with generalizing k v\\n'},\n", + " {'prompt': 'GOAL _x : char_buffer,\\t_x _x : ℕ,\\t_x : dlist string\\t⊢ parser.any_char _x _x = parse_result.fail _x _x → _x = _x\\n PROOFSTEP ',\n", + " 'completion': ' rw [any_char_eq_fail, and.comm]\\n'},\n", + " {'prompt': 'GOAL x : ℂ\\t⊢ complex.sinh x * complex.cosh x + complex.cosh x * complex.sinh x = 2 * complex.sinh x * complex.cosh x\\n PROOFSTEP ',\n", + " 'completion': ' ring\\n'},\n", + " {'prompt': 'GOAL α : Sort u,\\t_inst_1 : decidable_eq α,\\ti j a : α\\t⊢ ⇑(equiv.swap i j) (⇑(equiv.swap i j) a) = a\\n PROOFSTEP ',\n", + " 'completion': ' rw [← equiv.trans_apply, equiv.swap_swap, equiv.refl_apply]\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\ti : fin (n + 2),\\th : i < fin.last (n + 1)\\t⊢ ¬⇑fin.cast_succ (fin.last n) < i\\n PROOFSTEP ',\n", + " 'completion': ' simpa [lt_iff_coe_lt_coe, le_iff_coe_le_coe, lt_succ_iff] using h\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : decidable_eq α,\\ts : finset α,\\ta : α\\t⊢ a ∈ s.val.to_finset ↔ a ∈ s\\n PROOFSTEP ',\n", + " 'completion': ' rw [multiset.mem_to_finset, ←mem_def]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tM : Type u_5,\\t_inst_1 : has_zero M,\\tf : α →₀ M\\t⊢ f.support.nonempty ↔ f ≠ 0\\n PROOFSTEP ',\n", + " 'completion': ' simp only [finsupp.support_eq_empty, finset.nonempty_iff_ne_empty, ne.def]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tm n : α →₀ ℕ,\\th : m < n\\t⊢ ⇑multiset.card (⇑finsupp.to_multiset m) < ⇑multiset.card (⇑finsupp.to_multiset n)\\n PROOFSTEP ',\n", + " 'completion': ' apply multiset.card_lt_of_lt\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tβ : Type v,\\tf : α → β\\t⊢ list.map f list.nil = list.nil → list.nil = list.nil\\n PROOFSTEP ',\n", + " 'completion': ' simp only [forall_prop_of_true, map, forall_prop_of_false, not_false_iff]\\n'},\n", + " {'prompt': 'GOAL σ : Type u_1,\\tR : Type u,\\t_inst_1 : comm_semiring R,\\ts : set σ\\t⊢ mv_polynomial.supported R s = (mv_polynomial.rename coe).range\\n PROOFSTEP ',\n", + " 'completion': ' rw [supported, set.image_eq_range, adjoin_range_eq_range_aeval, rename]\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ nat.of_digits 0 (0.digits n) = n\\t\\tcase nat.succ\\tn b : ℕ\\t⊢ nat.of_digits b.succ (b.succ.digits n) = n\\n PROOFSTEP ',\n", + " 'completion': ' { cases n with n, { refl, }, { change of_digits 0 [n+1] = n+1, dsimp [of_digits], simp, } }\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tl : ordnode α,\\tx : α,\\tr : ordnode α\\t⊢ (l.balance_l x r).dual = r.dual.balance_r x l.dual\\n PROOFSTEP ',\n", + " 'completion': ' unfold balance_l balance_r\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : ring R,\\ta : add_monoid_algebra R ℕ\\t⊢ -{to_finsupp := a} + {to_finsupp := a} = 0\\n PROOFSTEP ',\n", + " 'completion': ' simp [neg_to_finsupp, add_to_finsupp, ← zero_to_finsupp]\\n'},\n", + " {'prompt': 'GOAL r : ℝ≥0\\t⊢ ∀ (b : ℝ≥0∞), b ∈ {d : ℝ≥0∞ | ⊤ ≤ d + ↑r} → ⊤ ≤ b\\n PROOFSTEP ',\n", + " 'completion': ' simp [add_eq_top]\\n'},\n", + " {'prompt': \"GOAL α : Type u,\\ts : seq α\\t⊢ s.nth 0 = prod.fst <$> (λ (a' : α), (a', s.tail)) <$> s.nth 0\\n PROOFSTEP \",\n", + " 'completion': ' cases nth s 0\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : partial_order α,\\ta b : α\\t⊢ set.Icc a b \\\\ {a, b} = set.Ioo a b\\n PROOFSTEP ',\n", + " 'completion': ' rw [insert_eq, ← diff_diff, Icc_diff_left, Ioc_diff_right]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : semiring α,\\tx y : α\\t⊢ id (x + y) = id x + id y\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : semiring α,\\tx y : α\\t⊢ id (x * y) = id x * id y\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL f : circle_deg1_lift,\\tx : ℝ\\t⊢ filter.tendsto (λ (n : ℕ), (⇑(f ^ n) x - x) / ↑n) filter.at_top (𝓝 f.translation_number)\\n PROOFSTEP ',\n", + " 'completion': \" rw [← translation_number_conj_eq' (translate $ multiplicative.of_add x)]\\n\"},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : comm_ring R,\\tq p : polynomial R,\\thp : (q * q * p).separable\\t⊢ is_unit q\\n PROOFSTEP ',\n", + " 'completion': ' apply is_coprime_self.mp\\n'},\n", + " {'prompt': 'GOAL V : Type u_1,\\t_inst_1 : inner_product_space ℝ V,\\tx y : V\\t⊢ has_inner.inner x y / (∥x∥ * ∥y∥) ≤ 1\\n PROOFSTEP ',\n", + " 'completion': ' exact (abs_le.mp (abs_real_inner_div_norm_mul_norm_le_one x y)).2\\n'},\n", + " {'prompt': 'GOAL a b c : Prop,\\t_inst_1 : decidable a\\t⊢ a → b ∨ c ↔ (a → b) ∨ (a → c)\\n PROOFSTEP ',\n", + " 'completion': ' simp [decidable.imp_iff_not_or, or.comm, or.left_comm]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : nontrivial α,\\t_inst_2 : decidable_eq α,\\tx : α\\t⊢ ∃ (y : α), y ≠ x\\n PROOFSTEP ',\n", + " 'completion': \" rcases exists_pair_ne α with ⟨y, y', h⟩\\n\"},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : measurable_space α,\\tj : measure_theory.jordan_decomposition α,\\tr : ℝ≥0\\t⊢ (r • j).to_signed_measure = r • j.to_signed_measure\\n PROOFSTEP ',\n", + " 'completion': ' ext1 i hi\\n'},\n", + " {'prompt': 'GOAL f : stieltjes_function,\\tx y : ℝ,\\th : x ≤ y,\\thxy : x < y\\t⊢ f.left_lim x ≤ f.left_lim y\\n PROOFSTEP ',\n", + " 'completion': ' exact (f.left_lim_le le_rfl).trans (f.le_left_lim hxy)\\n'},\n", + " {'prompt': \"GOAL pqr : multiset ℕ+,\\tp' q' : ℕ+,\\tH : ADE_inequality.A' p' q' = pqr\\t⊢ 1 < (↑1)⁻¹ + ((↑p')⁻¹ + (↑q')⁻¹)\\n PROOFSTEP \",\n", + " 'completion': ' simp only [lt_add_iff_pos_right, pnat.one_coe, inv_one, nat.cast_one, coe_coe]\\n'},\n", + " {'prompt': 'GOAL A : Type u_1,\\t_inst_1 : comm_ring A,\\t_inst_2 : algebra ℚ A\\t⊢ bernoulli_power_series A * (power_series.exp A - 1) = power_series.X\\n PROOFSTEP ',\n", + " 'completion': ' ext n\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : bounded_lattice α,\\tx y : α,\\th : is_compl x y\\t⊢ x ⊓ y ≤ ⊥\\n PROOFSTEP ',\n", + " 'completion': ' exact h.1\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : bounded_lattice α,\\tx y : α,\\th : is_compl x y\\t⊢ ⊤ ≤ y ⊔ x\\n PROOFSTEP ',\n", + " 'completion': ' rw sup_comm\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : bounded_lattice α,\\tx y : α,\\th : is_compl x y\\t⊢ ⊤ ≤ x ⊔ y\\n PROOFSTEP ',\n", + " 'completion': ' exact h.2\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : bounded_lattice α,\\tx y : α,\\th : is_compl x y\\t⊢ y ⊓ x ≤ ⊥\\n PROOFSTEP ',\n", + " 'completion': ' rw inf_comm\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : order_bot α,\\tb a : α\\t⊢ (option.some a).get_or_else ⊥ ≤ b ↔ option.some a ≤ ↑b\\n PROOFSTEP ',\n", + " 'completion': ' simp [none_eq_bot, some_eq_coe]\\n'},\n", + " {'prompt': 'GOAL R : Type u_1,\\t_inst_1 : integral_domain R,\\t_inst_2 : gcd_monoid R,\\tp : polynomial R,\\thp : p.is_primitive\\t⊢ p.prim_part = p\\n PROOFSTEP ',\n", + " 'completion': ' rw [← one_mul p.prim_part, ← C_1, ← hp.content_eq_one, ← p.eq_C_content_mul_prim_part]\\n'},\n", + " {'prompt': 'GOAL R : Type u_1,\\t_inst_1 : integral_domain R,\\t_inst_2 : gcd_monoid R,\\tr : R\\t⊢ (⇑polynomial.C r).support.gcd (⇑polynomial.C r).coeff = ⇑normalize r\\n PROOFSTEP ',\n", + " 'completion': ' by_cases h0 : r = 0\\n'},\n", + " {'prompt': 'GOAL σ : Type u_1,\\tR : Type u_2,\\t_inst_1 : comm_semiring R,\\t_inst_3 : fintype σ\\t⊢ mv_polynomial.esymm σ R 0 = 1\\n PROOFSTEP ',\n", + " 'completion': ' simp only [esymm, powerset_len_zero, sum_singleton, prod_empty]\\n'},\n", + " {'prompt': 'GOAL ι : Type u_1,\\tf : ι → cardinal\\t⊢ ¬cardinal.prod f = 0 ↔ ¬∃ (i : ι), f i = 0\\n PROOFSTEP ',\n", + " 'completion': ' simpa using prod_ne_zero f\\n'},\n", + " {'prompt': 'GOAL G : pgame\\t⊢ G.impartial_aux ↔ G ≈ -G ∧ (∀ (i : G.left_moves), (G.move_left i).impartial_aux) ∧ ∀ (j : G.right_moves), (G.move_right j).impartial_aux\\n PROOFSTEP ',\n", + " 'completion': ' split\\n'},\n", + " {'prompt': 'GOAL G : pgame\\t⊢ G.impartial_aux → (G ≈ -G ∧ (∀ (i : G.left_moves), (G.move_left i).impartial_aux) ∧ ∀ (j : G.right_moves), (G.move_right j).impartial_aux)\\n PROOFSTEP ',\n", + " 'completion': ' intro hi\\n'},\n", + " {'prompt': 'GOAL G : pgame\\t⊢ (G ≈ -G ∧ (∀ (i : G.left_moves), (G.move_left i).impartial_aux) ∧ ∀ (j : G.right_moves), (G.move_right j).impartial_aux) → G.impartial_aux\\n PROOFSTEP ',\n", + " 'completion': ' intro hi\\n'},\n", + " {'prompt': 'GOAL X : Type u_1,\\t_inst_1 : topological_space X,\\tS : discrete_quotient X,\\t_inst_2 : compact_space X,\\tcond : is_compact ⊤\\t⊢ fintype ↥S\\n PROOFSTEP ',\n", + " 'completion': ' rw is_compact_iff_finite_subcover at cond\\n'},\n", + " {'prompt': 'GOAL X : Type u_1,\\t_inst_1 : topological_space X,\\tα : Type u_2,\\tf : locally_constant X α\\t⊢ ⇑(f.locally_constant_lift) ∘ f.discrete_quotient.proj = ⇑f\\n PROOFSTEP ',\n", + " 'completion': ' ext\\n'},\n", + " {'prompt': 'GOAL X : Type u_1,\\t_inst_1 : topological_space X,\\t_inst_2 : loc_path_connected_space X,\\thX : connected_space X,\\tinst : inhabited X\\t⊢ path_connected_space X\\n PROOFSTEP ',\n", + " 'completion': ' let x₀ := default X\\n'},\n", + " {'prompt': 'GOAL X Y : SemiNormedGroup₁,\\tf : X ⟶ Y,\\ts : category_theory.limits.cokernel_cofork f,\\tb : ↥X\\t⊢ ⇑(f ≫ category_theory.limits.cofork.π s) b = 0\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : semi_normed_group α,\\tw : α,\\tr : ℝ\\t⊢ w ∈ metric.sphere 0 r ↔ ∥w∥ = r\\n PROOFSTEP ',\n", + " 'completion': ' simp [dist_eq_norm]\\n'},\n", + " {'prompt': 'GOAL ⊢ real.sin (π / 4) = real.sin (π / 2 ^ 2)\\t\\t⊢ real.sin (π / 2 ^ 2) = real.sqrt 2 / 2\\n PROOFSTEP ',\n", + " 'completion': ' congr\\n'},\n", + " {'prompt': 'GOAL ⊢ 16 = 2 ^ 4\\t\\t⊢ real.cos (π / 2 ^ 4) = real.sqrt (2 + real.sqrt (2 + real.sqrt 2)) / 2\\n PROOFSTEP ',\n", + " 'completion': ' norm_num\\n'},\n", + " {'prompt': 'GOAL X : Type u,\\t_inst_1 : preorder X,\\tx y : X,\\th : plift (x ≤ y)\\t⊢ _.hom = {down := h}\\n PROOFSTEP ',\n", + " 'completion': ' cases h\\n'},\n", + " {'prompt': 'GOAL C : Type u₁,\\t_inst_1 : category_theory.small_category C,\\tP : Cᵒᵖ ⥤ Type u₁\\t⊢ category_theory.is_iso (category_theory.limits.colimit.desc (category_theory.functor_to_representables P) ((category_theory.limits.colimit.cocone (category_theory.functor_to_representables P)).extend (category_theory.extend_along_yoneda_yoneda.hom.app P)))\\n PROOFSTEP ',\n", + " 'completion': ' rw [colimit.desc_extend, colimit.desc_cocone]\\n'},\n", + " {'prompt': 'GOAL C : Type u,\\t_inst_1 : category_theory.category C,\\tX : C,\\tP : Cᵒᵖ ⥤ Type v\\t⊢ category_theory.presieve.is_sheaf_for P ⇑(category_theory.sieve.generate (category_theory.presieve.singleton (𝟙 X)))\\n PROOFSTEP ',\n", + " 'completion': ' rw ← is_sheaf_for_iff_generate\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : primcodable α,\\tβ : Type ?,\\te : β ≃ α,\\t_inst : primcodable β\\t⊢ Sort ?\\n PROOFSTEP ',\n", + " 'completion': ' exact primrec e\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : primcodable α,\\tp : α → Prop\\t⊢ ∀ (a : α), p a ↔ p (id a)\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL Γ : Type ?,\\t_inst_1 : inhabited Γ,\\tl : turing.list_blank Γ,\\ti : ℕ\\t⊢ list.nil.head = (list.nil ++ list.repeat (inhabited.default Γ) i).head\\n PROOFSTEP ',\n", + " 'completion': ' cases i\\n'},\n", + " {'prompt': 'GOAL Γ : Type ?,\\t_inst_1 : inhabited Γ,\\tl : turing.list_blank Γ,\\ti : ℕ\\t⊢ list.nil.head = (list.nil ++ list.repeat (inhabited.default Γ) i).head\\n PROOFSTEP ',\n", + " 'completion': ' cases i; refl\\n'},\n", + " {'prompt': 'GOAL Γ : Type ?,\\t_inst_1 : inhabited Γ,\\tl : turing.list_blank Γ,\\ti : ℕ\\t⊢ list.nil.head = (list.nil ++ list.repeat (inhabited.default Γ) i.succ).head\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL F : Type u → Type v,\\tα : Type u,\\t_inst_1 : functor F,\\t_inst_2 : is_lawful_functor F\\t⊢ functor.map id = id\\n PROOFSTEP ',\n", + " 'completion': ' apply funext; apply id_map\\n'},\n", + " {'prompt': 'GOAL F : Type u → Type v,\\tα : Type u,\\t_inst_1 : functor F,\\t_inst_2 : is_lawful_functor F\\t⊢ functor.map id = id\\n PROOFSTEP ',\n", + " 'completion': ' apply funext\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tb : buffer α,\\ta : α,\\ti : ℕ,\\th : i < b.size\\t⊢ i < (b.push_back a).size\\n PROOFSTEP ',\n", + " 'completion': ' convert nat.lt_succ_of_lt h\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tb : buffer α,\\ta : α,\\ti : ℕ,\\th : i < b.size\\t⊢ (b.push_back a).size = b.size.succ\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL K : Type u_1,\\t_inst_1 : is_R_or_C K,\\tr : ℝ,\\tz : K\\t⊢ ⇑is_R_or_C.conj (r • z) = r • ⇑is_R_or_C.conj z\\n PROOFSTEP ',\n", + " 'completion': ' simp_rw conj_eq_re_sub_im\\n'},\n", + " {'prompt': 'GOAL K : Type u_1,\\t_inst_1 : is_R_or_C K,\\tz : K,\\th : is_R_or_C.I = 0\\t⊢ z / is_R_or_C.I = -(z * is_R_or_C.I)\\n PROOFSTEP ',\n", + " 'completion': ' simp [h]\\n'},\n", + " {'prompt': 'GOAL K : Type u_1,\\t_inst_1 : is_R_or_C K,\\tz : K,\\th : ¬is_R_or_C.I = 0\\t⊢ z / is_R_or_C.I = -(z * is_R_or_C.I)\\n PROOFSTEP ',\n", + " 'completion': ' field_simp [mul_assoc, I_mul_I_of_nonzero h]\\n'},\n", + " {'prompt': 'GOAL l n m k : ℕ\\t⊢ k ∈ finset.Ico n m \\\\ finset.Ico n l ↔ k ∈ finset.Ico (linear_order.max n l) m\\n PROOFSTEP ',\n", + " 'completion': ' by_cases n ≤ k\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst : Π (a : Prop), decidable a,\\tn : ℕ,\\th : n < ∅.card\\t⊢ (finset.powerset_len n ∅).nonempty\\n PROOFSTEP ',\n", + " 'completion': ' simpa using h\\n'},\n", + " {'prompt': 'GOAL m : ℕ,\\tb : ℤ,\\tbpos : 0 < b\\t⊢ -[1+ m] % b = b - 1 - ↑m % b\\n PROOFSTEP ',\n", + " 'completion': ' rw [sub_sub, add_comm]; exact match b, eq_succ_of_zero_lt bpos with ._, ⟨n, rfl⟩ := rfl end\\n'},\n", + " {'prompt': 'GOAL m : ℕ,\\tb : ℤ,\\tbpos : 0 < b\\t⊢ -[1+ m] % b = b - (↑m % b + 1)\\n PROOFSTEP ',\n", + " 'completion': ' exact match b, eq_succ_of_zero_lt bpos with ._, ⟨n, rfl⟩ := rfl end\\n'},\n", + " {'prompt': 'GOAL m : ℕ,\\tb : ℤ,\\tbpos : 0 < b\\t⊢ -[1+ m] % b = b - 1 - ↑m % b\\n PROOFSTEP ',\n", + " 'completion': ' rw [sub_sub, add_comm]\\n'},\n", + " {'prompt': 'GOAL M₀ : Type u_1,\\t_inst_2 : monoid_with_zero M₀,\\t_inst_3 : nontrivial M₀,\\t_inst_4 : no_zero_divisors M₀\\t⊢ list.nil.prod = 0 ↔ 0 ∈ list.nil\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tL : list (list α),\\ti : ℕ\\t⊢ list.drop (list.take i (list.map list.length L)).sum L.join = (list.drop i L).join\\n PROOFSTEP ',\n", + " 'completion': ' induction L generalizing i\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ nat.of_digits 1 (1.digits n) = n\\t\\tcase nat.succ, nat.succ\\tn b : ℕ\\t⊢ nat.of_digits b.succ.succ (b.succ.succ.digits n) = n\\n PROOFSTEP ',\n", + " 'completion': ' { induction n with n ih, { refl, }, { simp only [ih, add_comm 1, of_digits_one_cons, nat.cast_id, digits_one_succ], } }\\n'},\n", + " {'prompt': 'GOAL b n : ℕ,\\th : b.digits n = list.nil,\\tthis : nat.of_digits b (b.digits n) = nat.of_digits b list.nil\\t⊢ n = 0\\n PROOFSTEP ',\n", + " 'completion': ' convert this\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\ta : R,\\tf : polynomial R\\t⊢ ⇑polynomial.C a * f = a • f\\n PROOFSTEP ',\n", + " 'completion': ' ext\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\th : 0 ≠ 1,\\tp : polynomial R,\\thp : p.monic\\t⊢ p ≠ 0\\n PROOFSTEP ',\n", + " 'completion': ' nontriviality R\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp : polynomial R,\\thp : 0 < p.nat_trailing_degree\\t⊢ p.next_coeff_up = p.coeff (p.nat_trailing_degree + 1)\\n PROOFSTEP ',\n", + " 'completion': ' rw [next_coeff_up, if_neg]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : comm_ring R,\\tp q : polynomial R,\\thp0 : p = 0\\t⊢ (p /ₘ q).degree ≤ p.degree\\n PROOFSTEP ',\n", + " 'completion': ' simp only [hp0, zero_div_by_monic, le_refl]\\n'},\n", + " {'prompt': \"GOAL α : Type u_1,\\tβ : Type u_2,\\ts : set α,\\tt : set β\\t⊢ prod.swap ⁻¹' t.prod s = s.prod t\\n PROOFSTEP \",\n", + " 'completion': ' ext ⟨x, y⟩\\n'},\n", + " {'prompt': \"GOAL α : Type u,\\tn : ℕ,\\ta : α,\\ts s' : sym α n\\t⊢ a::s = a::s' ↔ s = s'\\n PROOFSTEP \",\n", + " 'completion': ' cases s\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : semiring α\\t⊢ ∀ {x y : α}, id (x + y) = id x + id y\\n PROOFSTEP ',\n", + " 'completion': ' intros\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : semiring α\\t⊢ ∀ {x y : α}, id (x * y) = id x * id y\\n PROOFSTEP ',\n", + " 'completion': ' intros\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tf : α → α,\\tx : α,\\thx : x ∈ function.periodic_pts f\\t⊢ 0 < function.minimal_period f x\\n PROOFSTEP ',\n", + " 'completion': ' simp only [minimal_period, dif_pos hx, gt_iff_lt.1 (nat.find_spec hx).fst]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : comm_ring R,\\tq p : polynomial R,\\thp : (q * q * p).separable\\t⊢ is_coprime q q\\n PROOFSTEP ',\n", + " 'completion': ' have : is_coprime (q * (q * p)) (q * (q.derivative * p + q.derivative * p + q * p.derivative))\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\tih : ⇑equiv.perm.sign (fin_rotate (n + 1)) = (-1) ^ n\\t⊢ ⇑equiv.perm.sign (⇑(equiv.perm.decompose_fin.symm) (1, fin_rotate n.succ)) = (-1) ^ n.succ\\n PROOFSTEP ',\n", + " 'completion': ' simp [ih, pow_succ]\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\tih : ⇑equiv.perm.sign (fin_rotate (n + 1)) = (-1) ^ n\\t⊢ ⇑equiv.perm.sign (fin_rotate (n.succ + 1)) = (-1) ^ n.succ\\n PROOFSTEP ',\n", + " 'completion': ' rw fin_rotate_succ\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : decidable_eq α,\\t_inst_2 : fintype α,\\tf g : equiv.perm α\\t⊢ f.disjoint g ↔ disjoint f.support g.support\\n PROOFSTEP ',\n", + " 'completion': ' simp [disjoint_iff_eq_or_eq, disjoint_iff, finset.ext_iff, not_and_distrib]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tr : α → α → Prop,\\th : symmetric r,\\tx y : α\\t⊢ relation.refl_trans_gen r x x\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tm0 : measurable_space α,\\tμ : measure_theory.measure α,\\tf : filter α\\t⊢ μ.finite_at_filter (f ⊓ μ.ae) ↔ μ.finite_at_filter f\\n PROOFSTEP ',\n", + " 'completion': ' refine ⟨_, λ h, h.filter_mono inf_le_left⟩\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tm0 : measurable_space α,\\tμ : measure_theory.measure α,\\tf : filter α\\t⊢ μ.finite_at_filter (f ⊓ μ.ae) → μ.finite_at_filter f\\n PROOFSTEP ',\n", + " 'completion': ' rintros ⟨s, ⟨t, ht, u, hu, rfl⟩, hμ⟩\\n'},\n", + " {'prompt': 'GOAL X : Type u_1,\\t_inst_3 : pseudo_emetric_space X,\\t_inst_4 : measurable_space X,\\t_inst_5 : opens_measurable_space X,\\tμ : measure_theory.measure X\\t⊢ μ.inner_regular is_closed is_open\\n PROOFSTEP ',\n", + " 'completion': ' intros U hU r hr\\n'},\n", + " {'prompt': 'GOAL p : ℕ,\\th : 1 ≠ 0 ∧ p ≠ 1\\t⊢ ↑((multiplicity ↑p 1.num).get _) - ↑((multiplicity ↑p ↑(1.denom)).get _) = 0\\n PROOFSTEP ',\n", + " 'completion': ' simp *\\n'},\n", + " {'prompt': 'GOAL x y z : ℤ,\\th : pythagorean_triple x y z,\\th0 : x.gcd y = 0,\\thx : x = 0\\t⊢ h.is_classified\\n PROOFSTEP ',\n", + " 'completion': ' have hy : y = 0\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : lattice α,\\ta b : α\\t⊢ a ⊔ b = b ↔ a ⊓ b = a\\n PROOFSTEP ',\n", + " 'completion': ' rw [sup_eq_right, ←inf_eq_left]\\n'},\n", + " {'prompt': \"GOAL α : Type u_1,\\t_inst_1 : semilattice_sup α,\\tf : ℕ → α\\t⊢ ⇑(partial_sups f) 0 = (finset.range (0 + 1)).sup' _ f\\n PROOFSTEP \",\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : boolean_algebra α,\\ta b : α\\t⊢ a Δ b = a ⊓ bᶜ ⊔ b ⊓ aᶜ\\n PROOFSTEP ',\n", + " 'completion': ' simp only [(Δ), sdiff_eq]\\n'},\n", + " {'prompt': 'GOAL Γ : Type u_1,\\tR : Type u_2,\\t_inst_1 : ordered_cancel_add_comm_monoid Γ,\\t_inst_2 : non_assoc_semiring R,\\tr : R\\t⊢ (⇑hahn_series.C r).order = 0\\n PROOFSTEP ',\n", + " 'completion': ' by_cases h : r = 0\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : comm_ring R,\\ts : R,\\tn : ℕ\\t⊢ ideal.span {s} ^ n = ideal.span {s ^ n}\\n PROOFSTEP ',\n", + " 'completion': ' induction n with n ih\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\tx : R,\\t_inst_1 : ring R,\\tn : ℕ,\\thn : x ^ n = 0\\t⊢ is_nilpotent (-x)\\n PROOFSTEP ',\n", + " 'completion': ' use n\\n'},\n", + " {'prompt': 'GOAL D : Type u,\\t_inst_4 : integral_domain D,\\t_inst_5 : unique_factorization_monoid D,\\t_inst : normalization_monoid D,\\t_inst_1 : gcd_monoid D\\t⊢ unique_factorization_monoid (polynomial D)\\n PROOFSTEP ',\n", + " 'completion': ' exact ufm_of_gcd_of_wf_dvd_monoid\\n'},\n", + " {'prompt': 'GOAL a : cardinal,\\tha : ω ≤ a,\\tthis : a ≠ 0,\\th : a * 0 = a\\t⊢ a = 0\\n PROOFSTEP ',\n", + " 'completion': ' rw [_root_.mul_zero] at h\\n'},\n", + " {'prompt': 'GOAL a b : cardinal,\\th : a * b = a,\\tha : a < ω\\t⊢ b = 1 ∨ a = 0\\n PROOFSTEP ',\n", + " 'completion': ' by_cases h2a : a = 0\\n'},\n", + " {'prompt': 'GOAL a : cardinal,\\tha : a < ω,\\th2a : ¬a = 0,\\th : a * 0 = a\\t⊢ a = 0\\n PROOFSTEP ',\n", + " 'completion': ' rw [mul_zero] at h\\n'},\n", + " {'prompt': 'GOAL G : pgame,\\thi : G.impartial_aux\\t⊢ G ≈ -G ∧ (∀ (i : G.left_moves), (G.move_left i).impartial_aux) ∧ ∀ (j : G.right_moves), (G.move_right j).impartial_aux\\n PROOFSTEP ',\n", + " 'completion': ' unfold1 impartial_aux at hi\\n'},\n", + " {'prompt': 'GOAL G : pgame,\\thi : G ≈ -G ∧ (∀ (i : G.left_moves), (G.move_left i).impartial_aux) ∧ ∀ (j : G.right_moves), (G.move_right j).impartial_aux\\t⊢ G.impartial_aux\\n PROOFSTEP ',\n", + " 'completion': ' unfold1 impartial_aux\\n'},\n", + " {'prompt': 'GOAL v : ℕ → ℤ,\\tis js : list ℤ\\t⊢ omega.coeffs.val v (list.func.add is js) = omega.coeffs.val v is + omega.coeffs.val v js\\n PROOFSTEP ',\n", + " 'completion': ' unfold val\\n'},\n", + " {'prompt': 'GOAL v : ℕ → ℤ,\\tis js : list ℤ\\t⊢ omega.coeffs.val_between v is 0 (list.func.add is js).length = omega.coeffs.val_between v is 0 is.length\\n PROOFSTEP ',\n", + " 'completion': ' apply val_between_eq_of_le\\n'},\n", + " {'prompt': 'GOAL v : ℕ → ℤ,\\tis js : list ℤ\\t⊢ omega.coeffs.val_between v js 0 (list.func.add is js).length = omega.coeffs.val_between v js 0 js.length\\n PROOFSTEP ',\n", + " 'completion': ' apply val_between_eq_of_le\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : topological_space α,\\ts : set α,\\ths : is_open s\\t⊢ frontier s = closure s \\\\ s\\n PROOFSTEP ',\n", + " 'completion': ' rw [frontier, hs.interior_eq]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : pseudo_metric_space α,\\tx : α,\\tε : ℝ\\t⊢ metric.ball x ε = ∅ ↔ ε ≤ 0\\n PROOFSTEP ',\n", + " 'completion': ' rw [← not_nonempty_iff_eq_empty, nonempty_ball, not_lt]\\n'},\n", + " {'prompt': 'GOAL i j : ℤ\\t⊢ ∫ (x : ↥circle), ⇑is_R_or_C.conj (⇑(fourier i) x) * ⇑(fourier j) x ∂haar_circle = ite (i = j) 1 0\\n PROOFSTEP ',\n", + " 'completion': ' split_ifs\\n'},\n", + " {'prompt': 'GOAL i j : ℤ,\\th : i = j\\t⊢ ∫ (x : ↥circle), ⇑is_R_or_C.conj (⇑(fourier i) x) * ⇑(fourier j) x ∂haar_circle = 1\\n PROOFSTEP ',\n", + " 'completion': ' simp [h, is_probability_measure.measure_univ, ← fourier_neg, ← fourier_add, -fourier_to_fun]\\n'},\n", + " {'prompt': 'GOAL i j : ℤ,\\th : ¬i = j\\t⊢ ∫ (x : ↥circle), ⇑is_R_or_C.conj (⇑(fourier i) x) * ⇑(fourier j) x ∂haar_circle = 0\\n PROOFSTEP ',\n", + " 'completion': ' simp only [← fourier_add, ← fourier_neg, is_R_or_C.conj_to_complex]\\n'},\n", + " {'prompt': 'GOAL i j : ℤ,\\th : ¬i = j,\\thij : -i + j ≠ 0\\t⊢ measure_theory.integral haar_circle ⇑(fourier (-i + j)) = 0\\n PROOFSTEP ',\n", + " 'completion': ' exact integral_zero_of_mul_left_eq_neg (is_mul_left_invariant_haar_measure _) (fourier_add_half_inv_index hij)\\n'},\n", + " {'prompt': 'GOAL V : Type u_5,\\t_inst_1 : semi_normed_group V\\t⊢ is_closed {x : uniform_space.completion V × uniform_space.completion V | has_dist.dist x.fst x.snd = ∥x.fst - x.snd∥}\\n PROOFSTEP ',\n", + " 'completion': ' refine is_closed_eq (completion.uniform_continuous_extension₂ _).continuous _\\n'},\n", + " {'prompt': 'GOAL X : Type u,\\t_inst_1 : preorder X,\\tx y : X,\\th : x ≤ y\\t⊢ _.hom = {down := {down := h}}\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL C : Type u,\\t_inst_1 : category_theory.category C,\\tJ : Type v,\\t_inst_2 : fintype J,\\t_inst_3 : category_theory.limits.has_finite_wide_pushouts C\\t⊢ category_theory.limits.has_colimits_of_shape (category_theory.limits.wide_pushout_shape J) C\\n PROOFSTEP ',\n", + " 'completion': ' haveI := @has_finite_wide_pushouts.out C _ _ J (classical.dec_eq _)\\n'},\n", + " {'prompt': 'GOAL C : Type u₁,\\t_inst_1 : category_theory.category C,\\t_inst_2 : category_theory.monoidal_category C,\\tX : C,\\t_inst_3 : category_theory.has_left_dual X\\t⊢ ᘁ(𝟙 X) = 𝟙 ᘁX\\n PROOFSTEP ',\n", + " 'completion': ' simp only [left_adjoint_mate, monoidal_category.tensor_id, category.id_comp, evaluation_coevaluation_assoc, category.comp_id, iso.inv_hom_id]\\n'},\n", + " {'prompt': 'GOAL C : Type u,\\t_inst_1 : category_theory.category C,\\tX Y : C,\\t_inst_3 : category_theory.limits.has_zero_morphisms C,\\t_inst_4 : category_theory.limits.has_zero_object C\\t⊢ (category_theory.limits.image_subobject 0).arrow = 0\\n PROOFSTEP ',\n", + " 'completion': ' rw ←image_subobject_arrow\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : linear_order α,\\tA B : finset α\\t⊢ (A \\\\ B).to_colex ≤ (B \\\\ A).to_colex ↔ A.to_colex ≤ B.to_colex\\n PROOFSTEP ',\n", + " 'completion': ' rw [le_iff_le_iff_lt_iff_lt, sdiff_lt_sdiff_iff_lt]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : primcodable α,\\t_inst : Π (a : ℕ), decidable (a ∈ set.range encodable.encode) := encodable.decidable_range_encode α\\t⊢ primrec ulower.down\\n PROOFSTEP ',\n", + " 'completion': ' exact subtype_mk primrec.encode\\n'},\n", + " {'prompt': 'GOAL α : Type ?,\\tβ : Type ?,\\te : α ≃ β,\\tl : list α\\t⊢ list.map ⇑(e.symm) (list.map ⇑e l) = l\\n PROOFSTEP ',\n", + " 'completion': ' rw [list.map_map, e.symm_comp_self, list.map_id]\\n'},\n", + " {'prompt': 'GOAL α : Type ?,\\tβ : Type ?,\\te : α ≃ β,\\tl : list β\\t⊢ list.map ⇑e (list.map ⇑(e.symm) l) = l\\n PROOFSTEP ',\n", + " 'completion': ' rw [list.map_map, e.self_comp_symm, list.map_id]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : linear_order α,\\ts : finset α,\\th : s.nonempty\\t⊢ (finset.sort has_le.le s).length - 1 < (finset.sort has_le.le s).length\\n PROOFSTEP ',\n", + " 'completion': ' simpa using nat.sub_lt (card_pos.mpr h) zero_lt_one\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : infinite α,\\tm n : ℕ,\\th : nat_embedding_aux α m = nat_embedding_aux α n\\t⊢ m = n\\n PROOFSTEP ',\n", + " 'completion': ' letI := classical.dec_eq α\\n'},\n", + " {'prompt': 'GOAL n a b : ℤ,\\tm : ℕ,\\th : a ≡ b [ZMOD n]\\t⊢ a ^ m ≡ b ^ m [ZMOD n]\\n PROOFSTEP ',\n", + " 'completion': ' induction m with d hd\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : decidable_eq α,\\ta : α,\\tl : list α,\\th : a ∈ l\\t⊢ (l.erase a).length = l.length.pred\\n PROOFSTEP ',\n", + " 'completion': ' rw erase_eq_erasep; exact length_erasep_of_mem h rfl\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : decidable_eq α,\\ta : α,\\tl : list α,\\th : a ∈ l\\t⊢ (l.erase a).length = l.length.pred\\n PROOFSTEP ',\n", + " 'completion': ' rw erase_eq_erasep\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tβ : Type v,\\tl : list α,\\tb : β\\t⊢ list.map (function.const α b) l = list.repeat b l.length\\n PROOFSTEP ',\n", + " 'completion': ' induction l; [refl, simp only [*, map]]; split; refl\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tβ : Type v,\\tl : list α,\\tb : β\\t⊢ list.map (function.const α b) l = list.repeat b l.length\\n PROOFSTEP ',\n", + " 'completion': ' induction l; [refl, simp only [*, map]]; split\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tβ : Type v,\\tl : list α,\\tb : β\\t⊢ list.map (function.const α b) l = list.repeat b l.length\\n PROOFSTEP ',\n", + " 'completion': ' induction l; [refl, simp only [*, map]]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tβ : Type v,\\tl : list α,\\tb : β\\t⊢ list.map (function.const α b) l = list.repeat b l.length\\n PROOFSTEP ',\n", + " 'completion': ' induction l\\n'},\n", + " {'prompt': \"GOAL α : Type u,\\tlast'_is_some : ∀ {l : list α}, ↥(l.last'.is_some) ↔ l ≠ list.nil,\\ta : α\\t⊢ ↥([a].last'.is_some) ↔ [a] ≠ list.nil\\n PROOFSTEP \",\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tl : list α,\\tx : α,\\t_inst_1 : decidable_eq α\\t⊢ x ∈+ l ↔ 2 ≤ list.count x l\\n PROOFSTEP ',\n", + " 'completion': ' simp [duplicate_iff_sublist, le_count_iff_repeat_sublist]\\n'},\n", + " {'prompt': 'GOAL b : ℕ,\\tl2 : list ℕ\\t⊢ nat.of_digits b (list.nil ++ l2) = nat.of_digits b list.nil + b ^ list.nil.length * nat.of_digits b l2\\n PROOFSTEP ',\n", + " 'completion': ' simp [of_digits]\\n'},\n", + " {'prompt': 'GOAL b : ℕ,\\th : ∀ (m : ℕ), m < 0 → nat.of_digits b.succ.succ (b.succ.succ.digits m) = m\\t⊢ nat.of_digits b.succ.succ list.nil = 0\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL m n : ℕ,\\th : n ≤ m\\t⊢ even (m - n) ↔ (even (m - n) ↔ even n ↔ even n)\\n PROOFSTEP ',\n", + " 'completion': ' by_cases h : even n; simp [h]\\n'},\n", + " {'prompt': 'GOAL m n : ℕ,\\th : n ≤ m\\t⊢ even (m - n) ↔ (even (m - n) ↔ even n ↔ even n)\\n PROOFSTEP ',\n", + " 'completion': ' by_cases h : even n\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : ring R\\t⊢ ∀ (n : ℕ) (a : polynomial R), -[1+ n] • a = -(↑(n.succ) • a)\\n PROOFSTEP ',\n", + " 'completion': ' rintros n ⟨⟩\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : ring R,\\tn : ℕ,\\ta : add_monoid_algebra R ℕ\\t⊢ -[1+ n] • a = -(↑(n.succ) • a)\\n PROOFSTEP ',\n", + " 'completion': ' simp [add_smul, add_mul]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tc : R,\\tn : ℕ,\\th : c ≠ 0\\t⊢ (⇑(polynomial.monomial n) c).support = {n}\\n PROOFSTEP ',\n", + " 'completion': ' exact support_monomial _ _ h\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp q : polynomial R,\\th0 : p.comp q = 0\\t⊢ 0 ≤ p.nat_degree * q.nat_degree\\n PROOFSTEP ',\n", + " 'completion': ' exact nat.zero_le _\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\ta : R,\\t_inst_1 : comm_semiring R,\\tp q : polynomial R,\\tH : p.is_root a\\t⊢ (p * q).is_root a\\n PROOFSTEP ',\n", + " 'completion': ' rw [is_root, eval_mul, is_root.def.1 H, zero_mul]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp : polynomial R,\\tn : ℕ\\t⊢ p.coeff n = 0 → 0 = p.coeff n\\n PROOFSTEP ',\n", + " 'completion': ' exact λ h, h.symm\\n'},\n", + " {'prompt': 'GOAL x y : ℝ,\\thx : 0 ≤ x,\\thy : 0 ≤ y\\t⊢ x ≤ real.sqrt y ↔ x ^ 2 ≤ y\\n PROOFSTEP ',\n", + " 'completion': ' rw [mul_self_le_mul_self_iff hx (sqrt_nonneg _), sq, mul_self_sqrt hy]\\n'},\n", + " {'prompt': 'GOAL x y : ℝ,\\th : 0 ≤ y\\t⊢ x ^ 2 ≤ y ↔ -real.sqrt y ≤ x ∧ x ≤ real.sqrt y\\n PROOFSTEP ',\n", + " 'completion': ' split\\n'},\n", + " {'prompt': 'GOAL x y : ℝ,\\th : 0 ≤ y\\t⊢ x ^ 2 ≤ y → -real.sqrt y ≤ x ∧ x ≤ real.sqrt y\\n PROOFSTEP ',\n", + " 'completion': ' simpa only [abs_le] using abs_le_sqrt\\n'},\n", + " {'prompt': 'GOAL x y : ℝ,\\th : 0 ≤ y\\t⊢ -real.sqrt y ≤ x ∧ x ≤ real.sqrt y → x ^ 2 ≤ y\\n PROOFSTEP ',\n", + " 'completion': ' rw [← abs_le, ← sq_abs]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tβ : Type v,\\ts : set α,\\tt : set β,\\ths : s.countable,\\tht : t.countable\\t⊢ (s.prod t).countable\\n PROOFSTEP ',\n", + " 'completion': ' haveI : encodable s := hs.to_encodable\\n'},\n", + " {'prompt': 'GOAL F : Type u_1,\\t_inst_1 : field F,\\tE : Type u_2,\\t_inst_2 : field E,\\t_inst_3 : algebra F E\\t⊢ ⊥.to_subalgebra = ⊥\\n PROOFSTEP ',\n", + " 'completion': ' ext\\n'},\n", + " {'prompt': 'GOAL K : Type v,\\t_inst_1 : field K,\\tn : ℕ,\\tf : polynomial K,\\thfn : f.nat_degree = n + 1\\t⊢ Sort ?\\n PROOFSTEP ',\n", + " 'completion': \" exact algebra_map K (splitting_field_aux _ _ hfn) = (algebra_map (adjoin_root f.factor) (splitting_field_aux n f.remove_factor (nat_degree_remove_factor' hfn))).comp (adjoin_root.of f.factor)\\n\"},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : fintype α,\\t_inst_2 : decidable_eq α,\\tσ τ : equiv.perm α\\t⊢ is_conj σ τ ↔ σ.partition = τ.partition\\n PROOFSTEP ',\n", + " 'completion': ' rw [is_conj_iff_cycle_type_eq]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : fintype α,\\t_inst_2 : decidable_eq α,\\tσ τ : equiv.perm α\\t⊢ σ.cycle_type = τ.cycle_type ↔ σ.partition = τ.partition\\n PROOFSTEP ',\n", + " 'completion': ' refine ⟨λ h, _, λ h, _⟩\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\ti : fin (n + 1),\\tj : fin n,\\th : ⇑fin.cast_succ j < i\\t⊢ ⇑(i.cycle_range) (⇑(i.succ_above) j) = j.succ\\n PROOFSTEP ',\n", + " 'completion': ' rw [fin.succ_above_below _ _ h, fin.cycle_range_of_lt h, fin.coe_succ_eq_succ]\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\ti : fin (n + 1),\\tj : fin n,\\th : ⇑fin.cast_succ j ≥ i\\t⊢ ⇑(i.cycle_range) (⇑(i.succ_above) j) = j.succ\\n PROOFSTEP ',\n", + " 'completion': ' rw [fin.succ_above_above _ _ h, fin.cycle_range_of_gt (fin.le_cast_succ_iff.mp h)]\\n'},\n", + " {'prompt': 'GOAL ι : Type u_6,\\t_inst_8 : fintype ι,\\t_inst_14 : decidable_eq ι,\\tw : ι → units ℂ\\t⊢ (quadratic_form.weighted_sum_squares ℂ w).isometry (quadratic_form.weighted_sum_squares ℂ 1)\\n PROOFSTEP ',\n", + " 'completion': \" have hw' : ∀ i : ι, (w i : ℂ) ^ - (1 / 2 : ℂ) ≠ 0\\n\"},\n", + " {'prompt': 'GOAL α : Type u_1,\\tm0 : measurable_space α,\\tμ : measure_theory.measure α,\\ts : set α,\\ths : measurable_set s\\t⊢ s ∈ (μ.restrict s).ae\\n PROOFSTEP ',\n", + " 'completion': ' simp only [ae_restrict_eq hs, exists_prop, mem_principal, mem_inf_iff]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tm0 : measurable_space α,\\tμ : measure_theory.measure α,\\ts : set α,\\ths : measurable_set s\\t⊢ s ∈ (μ.restrict s).ae\\n PROOFSTEP ',\n", + " 'completion': ' simp only [ae_restrict_eq hs, exists_prop, mem_principal, mem_inf_iff]; exact ⟨_, univ_mem, s, subset.rfl, (univ_inter s).symm⟩\\n'},\n", + " {'prompt': 'GOAL f : stieltjes_function,\\tc : ℝ,\\tt : set ℝ\\t⊢ f.length (t ∩ set.Ioi c) + f.length (t \\\\ set.Ioi c) ≤ f.length t\\n PROOFSTEP ',\n", + " 'completion': ' refine le_infi (λ a, le_infi (λ b, le_infi (λ h, _)))\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : order_bot α,\\ta : with_bot α,\\tb : α\\t⊢ option.get_or_else a ⊥ ≤ b ↔ a ≤ ↑b\\n PROOFSTEP ',\n", + " 'completion': ' cases a; simp [none_eq_bot, some_eq_coe]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : order_bot α,\\ta : with_bot α,\\tb : α\\t⊢ option.get_or_else a ⊥ ≤ b ↔ a ≤ ↑b\\n PROOFSTEP ',\n", + " 'completion': ' cases a\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : has_le α,\\t_inst_2 : has_le β,\\te : α ≃o β\\t⊢ e.symm.symm = e\\n PROOFSTEP ',\n", + " 'completion': ' ext\\n'},\n", + " {'prompt': 'GOAL a b : cardinal,\\th : a * b = a,\\tha : ω ≤ a,\\tthis : a ≠ 0\\t⊢ ¬a < b\\n PROOFSTEP ',\n", + " 'completion': ' intro hb\\n'},\n", + " {'prompt': 'GOAL a b : cardinal,\\th : a * b = a,\\tha : ω ≤ a,\\tthis : a ≠ 0\\t⊢ b ≤ a\\n PROOFSTEP ',\n", + " 'completion': ' rw [← not_lt]\\n'},\n", + " {'prompt': 'GOAL a b : cardinal,\\th : a * b = a,\\tha : ω ≤ a,\\tthis : a ≠ 0\\t⊢ b ≠ 0\\n PROOFSTEP ',\n", + " 'completion': ' rintro rfl\\n'},\n", + " {'prompt': 'GOAL a b : cardinal,\\th : a * b = a,\\tha : a < ω,\\th2a : a = 0\\t⊢ a = 0\\n PROOFSTEP ',\n", + " 'completion': ' exact h2a\\n'},\n", + " {'prompt': 'GOAL a b : cardinal,\\th : a * b = a,\\tha : a < ω,\\th2a : ¬a = 0\\t⊢ b ≠ 0\\n PROOFSTEP ',\n", + " 'completion': ' rintro rfl\\n'},\n", + " {'prompt': 'GOAL n m : ℕ,\\thb : ↑m ≠ 0,\\th : ↑n * ↑m = ↑n,\\th2a : ↑n ≠ 0\\t⊢ ↑m = 1\\n PROOFSTEP ',\n", + " 'completion': ' rw [← one_le_iff_ne_zero] at h2a hb\\n'},\n", + " {'prompt': 'GOAL n m : ℕ,\\th : ↑n * ↑m = ↑n,\\thb : 1 ≤ ↑m,\\th2a : 1 ≤ ↑n\\t⊢ ↑m = 1\\n PROOFSTEP ',\n", + " 'completion': ' norm_cast at h2a hb h ⊢\\n'},\n", + " {'prompt': 'GOAL n m : ℕ,\\th : n * m = n,\\thb : 1 ≤ m,\\th2a : 1 ≤ n\\t⊢ m ≤ 1\\n PROOFSTEP ',\n", + " 'completion': ' rw [← not_lt]\\n'},\n", + " {'prompt': 'GOAL n m : ℕ,\\th : n * m = n,\\thb : 1 ≤ m,\\th2a : 1 ≤ n\\t⊢ ¬1 < m\\n PROOFSTEP ',\n", + " 'completion': ' intro h2b\\n'},\n", + " {'prompt': 'GOAL n m : ℕ,\\th : n * m = n,\\thb : 1 ≤ m,\\th2a : 1 ≤ n\\t⊢ m = 1\\n PROOFSTEP ',\n", + " 'completion': ' apply le_antisymm _ hb\\n'},\n", + " {'prompt': 'GOAL a b : cardinal,\\thb : b ≠ 0,\\tha : ω ≤ a,\\thab : b ≤ a\\t⊢ a * b = a\\n PROOFSTEP ',\n", + " 'completion': ' rw [mul_eq_max_of_omega_le_left ha hb, max_eq_left hab]\\n'},\n", + " {'prompt': 'GOAL x _p : ℕ,\\t_do_match : ℕ → slim_check.gen {y // x < y},\\ty : ℕ\\t⊢ x < x + y + 1\\n PROOFSTEP ',\n", + " 'completion': ' linarith\\n'},\n", + " {'prompt': 'GOAL r : ℝ,\\tx : ereal,\\thx : ↑r < x,\\ty : ereal,\\thy : ↑0 < y\\t⊢ ↑r < x + y\\n PROOFSTEP ',\n", + " 'completion': ' convert add_lt_add hx hy\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : topological_space α,\\t_inst_2 : t1_space α,\\tx y : α,\\th : y ≠ x\\t⊢ y ∈ {x}ᶜ\\n PROOFSTEP ',\n", + " 'completion': ' rwa [mem_compl_eq, mem_singleton_iff]\\n'},\n", + " {'prompt': 'GOAL i n : ℕ,\\th0 : n ≠ 0,\\thi : i.coprime n\\t⊢ is_primitive_root (complex.exp (2 * ↑π * complex.I * (↑i / ↑n))) n\\n PROOFSTEP ',\n", + " 'completion': ' rw is_primitive_root.iff_def\\n'},\n", + " {'prompt': 'GOAL 𝕜 : Type u_1,\\tE : Type u_2,\\t_inst_1 : is_R_or_C 𝕜,\\t_inst_2 : inner_product_space 𝕜 E,\\tx : E\\t⊢ has_inner.inner 0 x = 0\\n PROOFSTEP ',\n", + " 'completion': ' rw [← zero_smul 𝕜 (0:E), inner_smul_left, ring_hom.map_zero, zero_mul]\\n'},\n", + " {'prompt': 'GOAL p : ℝ,\\ta b : ℝ≥0∞,\\thp1 : 1 ≤ p\\t⊢ a ^ p + b ^ p ≤ (a + b) ^ p\\n PROOFSTEP ',\n", + " 'completion': ' have hp_pos : 0 < p := lt_of_lt_of_le zero_lt_one hp1\\n'},\n", + " {'prompt': 'GOAL V : Type u_5,\\t_inst_1 : semi_normed_group V,\\tx y : uniform_space.completion V\\t⊢ ∀ (a b : V), has_dist.dist ↑a ↑b = ∥↑a - ↑b∥\\n PROOFSTEP ',\n", + " 'completion': ' clear x y\\n'},\n", + " {'prompt': 'GOAL x : ℝ,\\tn : ℕ\\t⊢ ⇑is_R_or_C.re_clm ((λ (n : ℕ), (1 / ↑n!) • ↑x ^ n) n) = x ^ n / ↑n!\\n PROOFSTEP ',\n", + " 'completion': ' rw [re_clm.map_smul, ← complex.of_real_pow, re_clm_apply, re_to_complex, complex.of_real_re, smul_eq_mul, one_div, mul_comm, div_eq_mul_inv]\\n'},\n", + " {'prompt': 'GOAL C : Type u,\\t_inst_1 : category_theory.category C,\\tF : category_theory.limits.walking_parallel_pair ⥤ C,\\tj : category_theory.limits.walking_parallel_pair\\t⊢ (category_theory.limits.parallel_pair (F.map category_theory.limits.walking_parallel_pair_hom.left) (F.map category_theory.limits.walking_parallel_pair_hom.right)).obj j = F.obj j\\n PROOFSTEP ',\n", + " 'completion': ' cases j; refl\\n'},\n", + " {'prompt': 'GOAL C : Type u,\\t_inst_1 : category_theory.category C,\\tF : category_theory.limits.walking_parallel_pair ⥤ C,\\tj : category_theory.limits.walking_parallel_pair\\t⊢ (category_theory.limits.parallel_pair (F.map category_theory.limits.walking_parallel_pair_hom.left) (F.map category_theory.limits.walking_parallel_pair_hom.right)).obj j = F.obj j\\n PROOFSTEP ',\n", + " 'completion': ' cases j\\n'},\n", + " {'prompt': 'GOAL Γ : Type ?,\\t_inst_1 : inhabited Γ,\\tl : turing.list_blank Γ\\t⊢ ∀ (a b : list Γ), turing.blank_extends a b → a.head = b.head\\n PROOFSTEP ',\n", + " 'completion': ' rintro _ _ ⟨i, rfl⟩\\n'},\n", + " {'prompt': 'GOAL F : Type u₀ → Type u₁ → Type u₂,\\t_inst_1 : bifunctor F,\\t_inst_2 : is_lawful_bifunctor F,\\tα : Type u₀\\t⊢ is_lawful_functor (F α)\\n PROOFSTEP ',\n", + " 'completion': ' refine {..}; intros; simp [functor.map] with functor_norm\\n'},\n", + " {'prompt': 'GOAL F : Type u₀ → Type u₁ → Type u₂,\\t_inst_1 : bifunctor F,\\t_inst_2 : is_lawful_bifunctor F,\\tα : Type u₀\\t⊢ is_lawful_functor (F α)\\n PROOFSTEP ',\n", + " 'completion': ' refine {..}; intros\\n'},\n", + " {'prompt': 'GOAL F : Type u₀ → Type u₁ → Type u₂,\\t_inst_1 : bifunctor F,\\t_inst_2 : is_lawful_bifunctor F,\\tα : Type u₀\\t⊢ is_lawful_functor (F α)\\n PROOFSTEP ',\n", + " 'completion': ' refine {..}\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\ta : α,\\tb_fst : ℕ,\\tb_snd : array b_fst α\\t⊢ (buffer.push_back ⟨b_fst, b_snd⟩ a).read ⟨buffer.size ⟨b_fst, b_snd⟩, _⟩ = a\\n PROOFSTEP ',\n", + " 'completion': ' convert array.read_push_back_right\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\tp : fin (n + 1),\\ti : fin (n + 2),\\th : i ≤ ⇑fin.cast_succ p\\t⊢ p.pred_above i = i.cast_pred\\n PROOFSTEP ',\n", + " 'completion': ' have : i ≤ (last n).cast_succ := h.trans p.le_last\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tl₁ l₂ : list α,\\th : l₁ <+ l₂,\\tl : list α\\t⊢ l₁ ++ l <+ l₂ ++ l\\n PROOFSTEP ',\n", + " 'completion': ' induction h with _ _ a _ ih _ _ a _ ih\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tl : list α,\\tn : ℕ\\t⊢ l.rotate n = list.drop (n % l.length) l ++ list.take (n % l.length) l\\n PROOFSTEP ',\n", + " 'completion': ' cases l.length.zero_le.eq_or_lt with hl hl\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : decidable_eq α,\\ts t : multiset α,\\th : t ≤ s\\t⊢ s - t + t = s\\n PROOFSTEP ',\n", + " 'completion': ' rw [add_comm, add_sub_of_le h]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tγ : Type u_3,\\t_inst_1 : comm_monoid γ,\\tm : multiset α\\t⊢ (multiset.map (λ (a : α), 1) m).prod = 1\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_3 : decidable_eq α,\\tm : multiset (multiset α)\\t⊢ m.sup.nodup ↔ ∀ (a : multiset α), a ∈ m → a.nodup\\n PROOFSTEP ',\n", + " 'completion': ' apply m.induction_on\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_3 : decidable_eq α,\\tm : multiset (multiset α)\\t⊢ 0.sup.nodup ↔ ∀ (a : multiset α), a ∈ 0 → a.nodup\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL a b : ℕ,\\tᾰ : a / b.succ.succ = a,\\tthis : a / (b + 2) ≤ a / 2\\t⊢ a = 0\\n PROOFSTEP ',\n", + " 'completion': ' refine eq_zero_of_le_half _\\n'},\n", + " {'prompt': 'GOAL b : ℕ,\\tl1 l2 : list ℕ\\t⊢ nat.of_digits b (l1 ++ l2) = nat.of_digits b l1 + b ^ l1.length * nat.of_digits b l2\\n PROOFSTEP ',\n", + " 'completion': ' induction l1 with hd tl IH\\n'},\n", + " {'prompt': 'GOAL b : ℕ,\\th : ∀ (m : ℕ), m < 0 → nat.of_digits b.succ.succ (b.succ.succ.digits m) = m\\t⊢ nat.of_digits b.succ.succ (b.succ.succ.digits 0) = 0\\n PROOFSTEP ',\n", + " 'completion': ' rw digits_zero\\n'},\n", + " {'prompt': 'GOAL b n : ℕ\\t⊢ b.digits n = list.nil → n = 0\\t\\tb n : ℕ\\t⊢ n = 0 → b.digits n = list.nil\\n PROOFSTEP ',\n", + " 'completion': ' { intro h, have : of_digits b (digits b n) = of_digits b [], by rw h, convert this, rw of_digits_digits }\\n'},\n", + " {'prompt': 'GOAL n m : ℕ,\\thnm : n ≤ m,\\tthis : n! * (n + 1) ^ (m - n) ≤ m!\\t⊢ 0 < n!\\n PROOFSTEP ',\n", + " 'completion': ' exact factorial_pos n\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\tpow_succ_le_asc_factorial : ∀ (k : ℕ), (n + 1) ^ k ≤ n.asc_factorial k\\t⊢ (n + 1) ^ 0 ≤ n.asc_factorial 0\\n PROOFSTEP ',\n", + " 'completion': ' rw [asc_factorial_zero, pow_zero]\\n'},\n", + " {'prompt': 'GOAL decidable_prime : decidable_pred pos_num.prime,\\tn : pos_num\\t⊢ 2 ≤ ↑(n.bit0)\\t\\tdecidable_prime : decidable_pred pos_num.prime,\\tn : pos_num\\t⊢ ↑(n.bit0) = ↑(n.bit0).min_fac ↔ n = 1\\n PROOFSTEP ',\n", + " 'completion': ' { exact bit0_le_bit0.2 (to_nat_pos _) }\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp q : polynomial R,\\th0 : p.comp q = 0\\t⊢ (p.comp q).nat_degree ≤ p.nat_degree * q.nat_degree\\n PROOFSTEP ',\n", + " 'completion': ' rw [h0, nat_degree_zero]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp q : polynomial R,\\th0 : p.comp q = 0\\t⊢ (p.comp q).nat_degree ≤ p.nat_degree * q.nat_degree\\n PROOFSTEP ',\n", + " 'completion': ' rw [h0, nat_degree_zero]; exact nat.zero_le _\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : linear_ordered_field α,\\tx y : α\\t⊢ const x ≤ const y ↔ x < y ∨ x = y\\n PROOFSTEP ',\n", + " 'completion': ' exact or_congr const_lt const_equiv\\n'},\n", + " {'prompt': \"GOAL α : Type u,\\ts : seq α,\\tval : α\\t⊢ option.some val = prod.fst <$> (λ (a' : α), (a', s.tail)) <$> option.some val\\n PROOFSTEP \",\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts : wseq α,\\tS : wseq (wseq α)\\t⊢ (wseq.join._match_1 <$> wseq.cons s S).join = seq.cons option.none (s.append (wseq.join._match_1 <$> S).join)\\n PROOFSTEP ',\n", + " 'completion': ' unfold functor.map\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\ts : wseq α,\\tS : wseq (wseq α)\\t⊢ (seq.map wseq.join._match_1 (wseq.cons s S)).join = seq.cons option.none (s.append (seq.map wseq.join._match_1 S).join)\\n PROOFSTEP ',\n", + " 'completion': ' simp [join, cons, append]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tβ : Type u_2,\\tt : set β,\\tx : α × β\\t⊢ x ∈ ∅.prod t ↔ x ∈ ∅\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : comm_ring R,\\tp q : polynomial R,\\thp : p.separable,\\thq : q * q ∣ p\\t⊢ is_unit q\\n PROOFSTEP ',\n", + " 'completion': ' obtain ⟨p, rfl⟩ := hq\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : fintype α,\\t_inst_2 : decidable_eq α,\\tσ τ : equiv.perm α,\\th : σ.cycle_type = τ.cycle_type\\t⊢ σ.partition = τ.partition\\n PROOFSTEP ',\n", + " 'completion': ' rw [partition.ext_iff, parts_partition, parts_partition, ← sum_cycle_type, ← sum_cycle_type, h]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : fintype α,\\t_inst_2 : decidable_eq α,\\tσ τ : equiv.perm α,\\th : σ.partition = τ.partition\\t⊢ σ.cycle_type = τ.cycle_type\\n PROOFSTEP ',\n", + " 'completion': ' rw [← filter_parts_partition_eq_cycle_type, ← filter_parts_partition_eq_cycle_type, h]\\n'},\n", + " {'prompt': 'GOAL R : Type u_1,\\t_inst_1 : comm_ring R,\\tc₁ c₂ : R,\\tq : ℍ[R,c₁,c₂]\\t⊢ ⇑clifford_algebra_quaternion.equiv (⇑clifford_algebra_quaternion.of_quaternion (⇑quaternion_algebra.conj q)) = ⇑clifford_algebra_quaternion.equiv (⇑clifford_algebra.involute (⇑clifford_algebra.reverse (⇑clifford_algebra_quaternion.of_quaternion q)))\\n PROOFSTEP ',\n", + " 'completion': ' rw [equiv_apply, equiv_apply, to_quaternion_involute_reverse, to_quaternion_of_quaternion, to_quaternion_of_quaternion]\\n'},\n", + " {'prompt': \"GOAL α : Sort u_1,\\tp : α → Prop,\\ta' : α\\t⊢ (∃ (a : α), a' = a ∧ p a) ↔ p a'\\n PROOFSTEP \",\n", + " 'completion': \" simp [@eq_comm _ a']\\n\"},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : measurable_space α,\\tf : α → ℝ≥0∞,\\ta : α\\t⊢ ↑(⇑(measure_theory.simple_func.eapprox_diff f 0) a) = ⇑(measure_theory.simple_func.eapprox f 0) a\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL f : nat.arithmetic_function ℂ,\\tz : ℝ,\\thz : 1 < z,\\th : ∀ (n : ℕ), complex.abs (⇑f n) ≤ 0\\t⊢ f.l_series_summable ↑z\\n PROOFSTEP ',\n", + " 'completion': ' have hf : f = 0 := arithmetic_function.ext (λ n, complex.abs_eq_zero.1 (le_antisymm (h n) (complex.abs_nonneg _)))\\n'},\n", + " {'prompt': 'GOAL p : ℕ,\\thp_prime : fact (nat.prime p),\\tε : ℚ,\\thε : 0 < ε\\t⊢ ∃ (k : ℕ), ↑p ^ -↑k < ε\\n PROOFSTEP ',\n", + " 'completion': ' obtain ⟨k, hk⟩ := @exists_pow_neg_lt p _ ε (by exact_mod_cast hε)\\n'},\n", + " {'prompt': 'GOAL p : ℕ,\\t_inst_1 : fact (nat.prime p),\\tf : padic_seq p,\\thf : ¬f ≈ 0,\\tv1 v3 : ℕ\\t⊢ padic_seq.stationary_point hf ≤ ?m_1\\n PROOFSTEP ',\n", + " 'completion': ' apply le_max_left _ v3\\n'},\n", + " {'prompt': 'GOAL x y z : ℤ,\\th : pythagorean_triple x y z,\\th0 : x.gcd y = 0,\\thx : x = 0\\t⊢ y = 0\\n PROOFSTEP ',\n", + " 'completion': ' apply int.nat_abs_eq_zero.mp\\n'},\n", + " {'prompt': 'GOAL x y z : ℤ,\\th : pythagorean_triple x y z,\\th0 : x.gcd y = 0,\\thx : x = 0\\t⊢ y.nat_abs = 0\\n PROOFSTEP ',\n", + " 'completion': ' apply nat.eq_zero_of_gcd_eq_zero_right h0\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : partial_order α,\\tc : closure_operator α,\\tx : α\\t⊢ ⇑c x = ⇑(closure_operator.mk₃ ⇑c c.closed _ _ _) x\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tB : filter_basis α\\t⊢ filter.generate B.sets ≤ B.filter\\t\\tα : Type u_1,\\tB : filter_basis α\\t⊢ B.filter ≤ filter.generate B.sets\\n PROOFSTEP ',\n", + " 'completion': ' { intros U U_in, rcases B.mem_filter_iff.mp U_in with ⟨V, V_in, h⟩, exact generate_sets.superset (generate_sets.basic V_in) h }\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : monoid α,\\tf g h : filter α\\t⊢ f * g * h = f * (g * h)\\n PROOFSTEP ',\n", + " 'completion': ' ext s\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : complete_lattice α,\\th : well_founded gt,\\ta : ℕ →ₘ α,\\tm : ℕ\\t⊢ ⇑a m ≤ monotonic_sequence_limit a\\n PROOFSTEP ',\n", + " 'completion': ' by_cases hm : m ≤ monotonic_sequence_limit_index a\\n'},\n", + " {'prompt': 'GOAL p : ℕ,\\tR : Type u_1,\\thp : fact (nat.prime p),\\t_inst_1 : comm_ring R,\\tn : ℕ\\t⊢ (⇑(witt_vector.teichmuller p) 0).coeff n.succ = 0\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL S : Type u,\\t_inst_1 : pgame.state S,\\tn : ℕ,\\ts : S,\\th : pgame.state.turn_bound s ≤ n\\t⊢ fintype (pgame.of_aux n s h).left_moves\\n PROOFSTEP ',\n", + " 'completion': ' apply fintype.of_equiv _ (left_moves_of_aux _ _).symm\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : topological_space α,\\ts t : set α,\\ths : is_Gδ s,\\tht : is_Gδ t\\t⊢ is_Gδ (s ∪ t)\\n PROOFSTEP ',\n", + " 'completion': ' rcases hs with ⟨S, Sopen, Scount, rfl⟩\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : uniform_space α,\\ts : set (α × α),\\ths : s ∈ 𝓤 α\\t⊢ closure s ∈ 𝓤 α\\n PROOFSTEP ',\n", + " 'completion': ' filter_upwards [hs] subset_closure\\n'},\n", + " {'prompt': \"GOAL 𝕜 : Type u_1,\\t_inst_4 : nondiscrete_normed_field 𝕜,\\t𝕜' : Type u_5,\\t_inst_8 : normed_ring 𝕜',\\t_inst_9 : normed_algebra 𝕜 𝕜'\\t⊢ ∥continuous_linear_map.lmul 𝕜 𝕜'∥ = 1\\n PROOFSTEP \",\n", + " 'completion': \" haveI := normed_algebra.nontrivial 𝕜 𝕜'; exact (lmulₗᵢ 𝕜 𝕜').norm_to_continuous_linear_map\\n\"},\n", + " {'prompt': \"GOAL 𝕜 : Type u_1,\\t_inst_4 : nondiscrete_normed_field 𝕜,\\t𝕜' : Type u_5,\\t_inst_8 : normed_ring 𝕜',\\t_inst_9 : normed_algebra 𝕜 𝕜'\\t⊢ ∥continuous_linear_map.lmul 𝕜 𝕜'∥ = 1\\n PROOFSTEP \",\n", + " 'completion': \" haveI := normed_algebra.nontrivial 𝕜 𝕜'\\n\"},\n", + " {'prompt': \"GOAL x : ℝ,\\th : x ≠ 1,\\th' : ¬x = -1\\t⊢ has_deriv_within_at real.arcsin (1 / real.sqrt (1 - x ^ 2)) (set.Iic x) x\\n PROOFSTEP \",\n", + " 'completion': \" exact (has_deriv_at_arcsin h' h).has_deriv_within_at\\n\"},\n", + " {'prompt': 'GOAL X Y Z : Type u,\\tf : X ⟶ Y,\\tg : Y ⟶ Z\\t⊢ category_theory.discrete.functor (f ≫ g) = category_theory.discrete.functor f ≫ category_theory.discrete.functor g\\n PROOFSTEP ',\n", + " 'completion': ' apply functor.ext\\n'},\n", + " {'prompt': 'GOAL α : Type,\\t_x : char_buffer,\\t_x _x : ℕ,\\t_x : α\\t⊢ failure _x _x = parse_result.done _x _x → _x = _x + 1\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL K : Type u_1,\\t_inst_1 : is_R_or_C K,\\tr : ℝ\\t⊢ ⇑is_R_or_C.re ↑(bit1 r) = ⇑is_R_or_C.re (bit1 ↑r) ∧ ⇑is_R_or_C.im ↑(bit1 r) = ⇑is_R_or_C.im (bit1 ↑r)\\n PROOFSTEP ',\n", + " 'completion': ' simp [bit1]\\n'},\n", + " {'prompt': 'GOAL z : ℂ\\t⊢ (λ (c : fin 2 → ℝ), ↑(c 0) + c 1 • complex.I) ((λ (z : ℂ), ![z.re, z.im]) z) = z\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL R : Type u_1,\\tS : Type u_2,\\t_inst_1 : non_assoc_semiring R,\\t_inst_2 : non_assoc_semiring S,\\te : R ≃+* S\\t⊢ e.symm.to_ring_hom.comp e.to_ring_hom = ring_hom.id R\\n PROOFSTEP ',\n", + " 'completion': ' ext\\n'},\n", + " {'prompt': 'GOAL n j_val : ℕ,\\tj_property : j_val < n + 1,\\th : ⟨j_val, j_property⟩ ≠ 0\\t⊢ ↑(fin.pred ⟨j_val, j_property⟩ h) = ↑⟨j_val, j_property⟩ - 1\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : decidable_eq (finset α),\\ts : finset α\\t⊢ s.powerset = (finset.range (s.card + 1)).bUnion (λ (i : ℕ), finset.powerset_len i s)\\n PROOFSTEP ',\n", + " 'completion': ' refine ext (λ a, ⟨λ ha, _, λ ha, _ ⟩)\\n'},\n", + " {'prompt': \"GOAL m n : ℕ,\\th : m < n.succ,\\th' : n.succ - m = (n - m).succ\\t⊢ int.sub_nat_nat m n.succ = -[1+ n - m]\\n PROOFSTEP \",\n", + " 'completion': ' simp [*, sub_nat_nat]\\n'},\n", + " {'prompt': 'GOAL a b : ℤ,\\tthis : (a + b * 1) % b = a % b\\t⊢ (a + b) % b = a % b\\n PROOFSTEP ',\n", + " 'completion': ' rwa mul_one at this\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : _root_.decidable_eq α,\\tdecidable_eq : _root_.decidable_eq (lazy_list α),\\t_x : α,\\t_x : thunk (lazy_list α)\\t⊢ ¬lazy_list.nil = lazy_list.cons _x _x\\n PROOFSTEP ',\n", + " 'completion': ' cc\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : _root_.decidable_eq α,\\tdecidable_eq : _root_.decidable_eq (lazy_list α),\\t_x : α,\\t_x : thunk (lazy_list α)\\t⊢ ¬lazy_list.cons _x _x = lazy_list.nil\\n PROOFSTEP ',\n", + " 'completion': ' cc\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tβ : Type v,\\tf : α → β,\\tl : list α\\t⊢ list.map f l = list.nil → l = list.nil\\n PROOFSTEP ',\n", + " 'completion': ' cases l; simp only [forall_prop_of_true, map, forall_prop_of_false, not_false_iff]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tβ : Type v,\\tf : α → β,\\tl : list α\\t⊢ list.map f l = list.nil → l = list.nil\\n PROOFSTEP ',\n", + " 'completion': ' cases l\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : decidable_eq α,\\ta : α,\\tl : list α,\\th : a ∈ l\\t⊢ (list.erasep (eq a) l).length = l.length.pred\\n PROOFSTEP ',\n", + " 'completion': ' exact length_erasep_of_mem h rfl\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tk : ℕ,\\th : list.nil.rotate k ~r list.nil\\t⊢ ∃ (n : ℕ), n < list.nil.cyclic_permutations.length ∧ list.nil.rotate n = list.nil.rotate k\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tβ : Type u_2,\\ts : multiset α,\\tf : α → β\\t⊢ multiset.map f s = 0 ↔ s = 0\\n PROOFSTEP ',\n", + " 'completion': ' rw [← multiset.card_eq_zero, multiset.card_map, multiset.card_eq_zero]\\n'},\n", + " {'prompt': 'GOAL h : 0 ≠ 0\\t⊢ ∃ (i : ℕ), 0.test_bit i = bool.tt ∧ ∀ (j : ℕ), i < j → 0.test_bit j = bool.ff\\n PROOFSTEP ',\n", + " 'completion': ' exact false.elim (h rfl)\\n'},\n", + " {'prompt': 'GOAL b n : ℕ,\\th : ∀ (m : ℕ), m < n → nat.of_digits b.succ.succ (b.succ.succ.digits m) = m\\t⊢ nat.of_digits b.succ.succ (b.succ.succ.digits n) = n\\n PROOFSTEP ',\n", + " 'completion': ' cases n\\n'},\n", + " {'prompt': 'GOAL b n : ℕ,\\th : ∀ (m : ℕ), m < n.succ → nat.of_digits b.succ.succ (b.succ.succ.digits m) = m\\t⊢ nat.of_digits b.succ.succ (b.succ.succ.digits n.succ) = n.succ\\n PROOFSTEP ',\n", + " 'completion': ' simp only [nat.succ_eq_add_one, digits_add_two_add_one]\\n'},\n", + " {'prompt': 'GOAL b n : ℕ,\\th : b.digits n = list.nil,\\tthis : nat.of_digits b (b.digits n) = nat.of_digits b list.nil\\t⊢ n = nat.of_digits b (b.digits n)\\n PROOFSTEP ',\n", + " 'completion': ' rw of_digits_digits\\n'},\n", + " {'prompt': 'GOAL m n : ℕ,\\ts : m.psub n = option.none,\\th : m ≥ n,\\tk : ℕ,\\te : n + k = m\\t⊢ false\\n PROOFSTEP ',\n", + " 'completion': ' injection s.symm.trans (psub_eq_some.2 $ (add_comm _ _).trans e)\\n'},\n", + " {'prompt': 'GOAL n m : ℕ+,\\tu : prime_multiset := n.factor_multiset,\\tv : prime_multiset := m.factor_multiset,\\tthis : n = u.prod\\t⊢ (n * m).factor_multiset = n.factor_multiset + m.factor_multiset\\n PROOFSTEP ',\n", + " 'completion': ' rw[this]\\n'},\n", + " {'prompt': 'GOAL n m : ℕ+,\\tu : prime_multiset := n.factor_multiset,\\tv : prime_multiset := m.factor_multiset,\\tthis : n = u.prod\\t⊢ (u.prod * m).factor_multiset = u.prod.factor_multiset + m.factor_multiset\\n PROOFSTEP ',\n", + " 'completion': ' have : m = v.prod := (prod_factor_multiset m).symm\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp q : polynomial R,\\thq : q.monic,\\tH : p.leading_coeff = 0\\t⊢ (p * q).leading_coeff = p.leading_coeff\\n PROOFSTEP ',\n", + " 'completion': ' rw [H, leading_coeff_eq_zero.1 H, zero_mul, leading_coeff_zero]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp q : polynomial R,\\thq : q.monic,\\tH : p.leading_coeff ≠ 0\\t⊢ (p * q).leading_coeff = p.leading_coeff\\n PROOFSTEP ',\n", + " 'completion': \" rw [leading_coeff_mul', hq.leading_coeff, mul_one]; rwa [hq.leading_coeff, mul_one]\\n\"},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp q : polynomial R,\\thq : q.monic,\\tH : p.leading_coeff ≠ 0\\t⊢ (p * q).leading_coeff = p.leading_coeff\\n PROOFSTEP ',\n", + " 'completion': \" rw [leading_coeff_mul', hq.leading_coeff, mul_one]\\n\"},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp q : polynomial R,\\thq : q.monic,\\tH : p.leading_coeff ≠ 0\\t⊢ p.leading_coeff * q.leading_coeff ≠ 0\\n PROOFSTEP ',\n", + " 'completion': ' rwa [hq.leading_coeff, mul_one]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : linear_order α,\\ta b c : α\\t⊢ set.Ioc a b ∪ set.Ioc a c = set.Ioc a (linear_order.max b c)\\n PROOFSTEP ',\n", + " 'completion': ' rw [Ioc_union_Ioc, min_self]; exact (min_le_left _ _).trans (le_max_left _ _)\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : linear_order α,\\ta b c : α\\t⊢ set.Ioc a b ∪ set.Ioc a c = set.Ioc a (linear_order.max b c)\\n PROOFSTEP ',\n", + " 'completion': ' rw [Ioc_union_Ioc, min_self]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : partial_order α,\\ta b : α,\\th : a ≤ b\\t⊢ set.Icc a b \\\\ set.Ioc a b = {a}\\n PROOFSTEP ',\n", + " 'completion': ' rw [← Icc_diff_left, diff_diff_cancel_left (singleton_subset_iff.2 $ left_mem_Icc.2 h)]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : partial_order α,\\ta b : α,\\thab : a ≤ b\\t⊢ set.Ico a b ∪ {b} = set.Icc a b\\n PROOFSTEP ',\n", + " 'completion': ' simpa only [dual_Ioc, dual_Icc] using @Ioc_union_left (order_dual α) _ b a hab\\n'},\n", + " {'prompt': 'GOAL F : Type u,\\t_inst_1 : decidable_eq F,\\t_inst_2 : field F,\\ts : finset F,\\tx : F\\t⊢ polynomial.eval x (lagrange.basis s x) = 1\\n PROOFSTEP ',\n", + " 'completion': ' rw [basis, ← coe_eval_ring_hom, (eval_ring_hom x).map_prod, coe_eval_ring_hom, finset.prod_eq_one]\\n'},\n", + " {'prompt': 'GOAL f g : nat.arithmetic_function ℂ,\\tz : ℂ,\\thf : f.l_series_summable z,\\thg : g.l_series_summable z\\t⊢ (f + g).l_series z = f.l_series z + g.l_series z\\n PROOFSTEP ',\n", + " 'completion': ' simp only [l_series, add_apply]\\n'},\n", + " {'prompt': 'GOAL p : ℕ,\\t_inst_1 : fact (nat.prime p),\\tf : padic_seq p,\\thf : ¬f ≈ 0,\\tv1 v3 : ℕ\\t⊢ padic_seq.stationary_point hf ≤ padic_seq.stationary_point hf\\n PROOFSTEP ',\n", + " 'completion': ' apply le_refl\\n'},\n", + " {'prompt': 'GOAL p : ℕ,\\t_inst_1 : fact (nat.prime p),\\tf g : padic_seq p,\\th : f ≈ g,\\thf : f ≈ 0\\t⊢ f.valuation = g.valuation\\n PROOFSTEP ',\n", + " 'completion': ' have hg : g ≈ 0\\n'},\n", + " {'prompt': 'GOAL p : ℕ,\\t_inst_1 : fact (nat.prime p),\\tf g : padic_seq p,\\th : f ≈ g,\\thf : ¬f ≈ 0\\t⊢ f.valuation = g.valuation\\n PROOFSTEP ',\n", + " 'completion': ' have hg : ¬ g ≈ 0\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tι : Sort w,\\t_inst_1 : complete_boolean_algebra α,\\tf : ι → α\\t⊢ (supr f)ᶜᶜ = (⨅ (i : ι), (f i)ᶜ)ᶜ\\n PROOFSTEP ',\n", + " 'completion': ' simp [compl_infi]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tf : filter α,\\t_inst_1 : f.ne_bot,\\tp : Prop,\\th : p\\t⊢ (∃ᶠ (x : α) in f, p) ↔ p\\n PROOFSTEP ',\n", + " 'completion': ' simpa [h]\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tf : filter α,\\t_inst_1 : f.ne_bot,\\tp : Prop,\\th : ¬p\\t⊢ (∃ᶠ (x : α) in f, p) ↔ p\\n PROOFSTEP ',\n", + " 'completion': ' simp [h]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : complete_lattice α,\\th : well_founded gt,\\ta : ℕ →ₘ α\\t⊢ (⨆ (m : ℕ), ⇑a m) = monotonic_sequence_limit a\\n PROOFSTEP ',\n", + " 'completion': ' suffices : (⨆ (m : ℕ), a m) ≤ monotonic_sequence_limit a\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : complete_lattice α,\\th : well_founded gt,\\ta : ℕ →ₘ α\\t⊢ (⨆ (m : ℕ), ⇑a m) ≤ monotonic_sequence_limit a\\n PROOFSTEP ',\n", + " 'completion': ' apply supr_le\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : complete_lattice α,\\th : well_founded gt,\\ta : ℕ →ₘ α\\t⊢ ∀ (i : ℕ), ⇑a i ≤ monotonic_sequence_limit a\\n PROOFSTEP ',\n", + " 'completion': ' intros m\\n'},\n", + " {'prompt': \"GOAL α : Type u_1,\\t_inst_1 : semilattice_sup α,\\tf : ℕ → α,\\tn : ℕ\\t⊢ ⇑(partial_sups f) n = (finset.range (n + 1)).sup' _ f\\n PROOFSTEP \",\n", + " 'completion': ' induction n with n ih\\n'},\n", + " {'prompt': 'GOAL A : Type u_1,\\tB : Type u_2,\\t_inst_1 : comm_ring A,\\t_inst_2 : comm_ring B,\\tf : A →+* B,\\thf : function.surjective ⇑f\\t⊢ f.finite\\n PROOFSTEP ',\n", + " 'completion': ' letI := f.to_algebra\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : comm_monoid α,\\t_inst_2 : decidable_rel has_dvd.dvd,\\ta : α,\\tha : multiplicity.finite a 1\\t⊢ (multiplicity a 1).get ha = 0\\n PROOFSTEP ',\n", + " 'completion': ' rw [enat.get_eq_iff_eq_coe, eq_coe_iff, pow_zero]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\tx : R,\\t_inst_1 : ring R,\\tn : ℕ,\\thn : x ^ n = 0\\t⊢ (-x) ^ n = 0\\n PROOFSTEP ',\n", + " 'completion': ' rw [neg_pow, hn, mul_zero]\\n'},\n", + " {'prompt': 'GOAL M : Type u_1,\\t_inst_1 : comm_monoid M,\\tk : ℕ,\\tζ : M,\\th : is_primitive_root ζ k,\\th0 : 0 < k\\t⊢ is_unit ζ\\n PROOFSTEP ',\n", + " 'completion': ' apply is_unit_of_mul_eq_one ζ (ζ ^ (k - 1))\\n'},\n", + " {'prompt': 'GOAL p : ℕ,\\tR : Type u_1,\\thp : fact (nat.prime p),\\t_inst_1 : comm_ring R,\\tx : witt_vector p R\\t⊢ ghost_fun (-x) = -ghost_fun x\\n PROOFSTEP ',\n", + " 'completion': ' ghost_fun_tac (-X 0) ![x.coeff]\\n'},\n", + " {'prompt': 'GOAL p : ℕ,\\tR : Type u_1,\\thp : fact (nat.prime p),\\t_inst_1 : comm_ring R,\\tn : ℕ\\t⊢ (⇑(witt_vector.teichmuller p) 0).coeff n.succ = 0.coeff n.succ\\n PROOFSTEP ',\n", + " 'completion': ' rw zero_coeff\\n'},\n", + " {'prompt': 'GOAL c : cardinal,\\tα : Type u_1\\t⊢ quot.mk setoid.r α < cardinal.succ (quot.mk setoid.r α) → quot.mk setoid.r α + 1 ≤ cardinal.succ (quot.mk setoid.r α)\\n PROOFSTEP ',\n", + " 'completion': ' refine quot.induction_on (succ (quot.mk setoid.r α)) (λ β h, _)\\n'},\n", + " {'prompt': 'GOAL c : cardinal,\\tα β : Type u_1,\\th : quot.mk setoid.r α < quot.mk setoid.r β\\t⊢ quot.mk setoid.r α + 1 ≤ quot.mk setoid.r β\\n PROOFSTEP ',\n", + " 'completion': ' cases h.left with f\\n'},\n", + " {'prompt': 'GOAL h : 0 < ω\\t⊢ dite (0 < ω) (λ (h : 0 < ω), classical.some _) (λ (h : ¬0 < ω), 0) = 0\\n PROOFSTEP ',\n", + " 'completion': ' rw [dif_pos h, ← cardinal.nat_cast_inj, ← classical.some_spec (lt_omega.1 h), nat.cast_zero]\\n'},\n", + " {'prompt': 'GOAL n : ℕ,\\th2a : ¬↑n = 0,\\tm : ℕ,\\thb : ↑m ≠ 0,\\th : ↑n * ↑m = ↑n\\t⊢ ↑m = 1\\n PROOFSTEP ',\n", + " 'completion': ' rw [← ne] at h2a\\n'},\n", + " {'prompt': 'GOAL i : ℤ,\\th1 : 0 < i,\\th2 : 2 * i = (1 + 1) * i\\t⊢ ¬2 * i < i + 1\\n PROOFSTEP ',\n", + " 'completion': ' simpa only [h2, add_mul, one_mul, add_lt_add_iff_left, not_lt] using h1\\n'},\n", + " {'prompt': 'GOAL r : ℝ,\\tx : ereal,\\thx : ↑r < x,\\ty : ereal,\\thy : ↑0 < y\\t⊢ ↑r < (x, y).fst + (x, y).snd\\n PROOFSTEP ',\n", + " 'completion': ' dsimp\\n'},\n", + " {'prompt': \"GOAL f : ℝ → ℝ,\\ta b : ℝ,\\th : continuous_on f [a, b]\\t⊢ has_Inf.Inf (f '' [a, b]) ≤ has_Sup.Sup (f '' [a, b])\\n PROOFSTEP \",\n", + " 'completion': ' rw [real.image_interval_eq_Icc h]\\n'},\n", + " {'prompt': 'GOAL X : Type u_2,\\tY : Type u_3,\\t_inst_1 : emetric_space X,\\t_inst_2 : emetric_space Y,\\te : X ≃ᵢ Y\\t⊢ dimH set.univ = dimH set.univ\\n PROOFSTEP ',\n", + " 'completion': ' rw [← e.dimH_preimage univ, preimage_univ]\\n'},\n", + " {'prompt': 'GOAL f : ℂ →ₗᵢ[ℝ] ℂ,\\th₂ : ∀ (z : ℂ), (⇑f z).re = z.re,\\tz : ℂ\\t⊢ (⇑f z).im = z.im ∨ (⇑f z).im = -z.im\\n PROOFSTEP ',\n", + " 'completion': ' have h₁ := f.norm_map z\\n'},\n", + " {'prompt': 'GOAL C : Type u,\\tX Y : F C,\\tf g : X ⟶ Y,\\tthis : (category_theory.free_monoidal_category.full_normalize C).map f = (category_theory.free_monoidal_category.full_normalize C).map g\\t⊢ f = g\\n PROOFSTEP ',\n", + " 'completion': ' rw [←functor.id_map f, ←functor.id_map g]\\n'},\n", + " {'prompt': 'GOAL C : Type u,\\t_inst_1 : category_theory.category C,\\tX Y : C,\\t_inst_3 : category_theory.limits.has_zero_morphisms C,\\t_inst_4 : category_theory.limits.has_zero_object C\\t⊢ (category_theory.limits.image_subobject_iso 0).hom ≫ category_theory.limits.image.ι 0 = 0\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : primcodable α,\\tβ : Type u_2,\\te : β ≃ α,\\t_inst : primcodable β := primcodable.of_equiv α e\\t⊢ primrec ⇑e\\n PROOFSTEP ',\n", + " 'completion': ' exact encode_iff.1 primrec.encode\\n'},\n", + " {'prompt': 'GOAL Γ : Type ?,\\t_inst_1 : inhabited Γ,\\tl : turing.list_blank Γ,\\ta : list Γ,\\ti : ℕ\\t⊢ a.head = (a ++ list.repeat (inhabited.default Γ) i).head\\n PROOFSTEP ',\n", + " 'completion': ' cases a\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\ta : α,\\ti b_fst : ℕ,\\tb_snd : array b_fst α,\\th : i < buffer.size ⟨b_fst, b_snd⟩\\t⊢ i = ⟨i, h⟩.val\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tb : buffer α,\\ta : α,\\ti : ℕ,\\th : i < b.size\\t⊢ (b.push_back a).read ⟨i, _⟩ = b.read ⟨i, h⟩\\n PROOFSTEP ',\n", + " 'completion': ' cases b\\n'},\n", + " {'prompt': 'GOAL _x : char_buffer,\\t_x _x : ℕ,\\t_x : dlist string\\t⊢ (_x = dlist.empty ∧ buffer.size _x ≤ _x) ∧ _x = _x → _x = _x\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : decidable_eq α,\\ta : α,\\ts : finset α,\\th : a ∉ s\\t⊢ (has_insert.insert a s).val = a ::ₘ s.val\\n PROOFSTEP ',\n", + " 'completion': ' rw [insert_val, ndinsert_of_not_mem h]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : decidable_eq α,\\ts₁ s₂ : finset α,\\tx : α\\t⊢ x ∈ s₁ ∪ s₂ ↔ x ∈ s₂ ∪ s₁\\n PROOFSTEP ',\n", + " 'completion': ' simp only [mem_union, or_comm]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tn : ℕ,\\ts : finset α,\\th : n < s.card,\\t_inst : Π (a : Prop), decidable a\\t⊢ (finset.powerset_len n s).nonempty\\n PROOFSTEP ',\n", + " 'completion': ' induction s using finset.induction_on with x s hx IH generalizing n\\n'},\n", + " {'prompt': 'GOAL β : Type u_2,\\t_inst_1 : comm_monoid β,\\tn : ℕ,\\tf : fin n → β\\t⊢ (list.of_fn f).prod = ∏ (i : fin n), f i\\n PROOFSTEP ',\n", + " 'completion': ' rw [list.of_fn_eq_map, fin.prod_univ_def]\\n'},\n", + " {'prompt': 'GOAL M₀ : Type u_1,\\t_inst_2 : monoid_with_zero M₀,\\t_inst_3 : nontrivial M₀,\\t_inst_4 : no_zero_divisors M₀,\\tL : list M₀\\t⊢ L.prod = 0 ↔ 0 ∈ L\\n PROOFSTEP ',\n", + " 'completion': ' induction L with a L ihL\\n'},\n", + " {'prompt': \"GOAL α : Type u,\\tP : α → Prop,\\t_inst_1 : decidable_pred P,\\tys : list α\\t⊢ list.split_on_p_aux' P list.nil ys = list.split_on_p_aux P list.nil (has_append.append ys)\\n PROOFSTEP \",\n", + " 'completion': ' simp! only [append_nil, eq_self_iff_true, and_self]\\n'},\n", + " {'prompt': \"GOAL α : Type uu,\\tx : α,\\tn : ℕ,\\thn : n < (list.permutations'_aux x list.nil).length\\t⊢ (list.permutations'_aux x list.nil).nth_le n hn = list.insert_nth n x list.nil\\n PROOFSTEP \",\n", + " 'completion': \" simp only [length, permutations'_aux, nat.lt_one_iff] at hn\\n\"},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : decidable_eq α,\\ts t u : multiset α\\t⊢ s ∩ t + u = (s + u) ∩ (t + u)\\n PROOFSTEP ',\n", + " 'completion': ' by_contra h\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\tS₁ : Type v,\\tS₂ : Type w,\\t_inst_1 : comm_semiring R\\t⊢ mv_polynomial (S₁ ⊕ S₂) R ≃+* mv_polynomial S₁ (mv_polynomial S₂ R)\\n PROOFSTEP ',\n", + " 'completion': ' apply @mv_polynomial_equiv_mv_polynomial R (S₁ ⊕ S₂) _ _ _ _ (sum_to_iter R S₁ S₂) (iter_to_sum R S₁ S₂)\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\tS₁ : Type v,\\tS₂ : Type w,\\t_inst_1 : comm_semiring R\\t⊢ ((mv_polynomial.sum_to_iter R S₁ S₂).comp (mv_polynomial.iter_to_sum R S₁ S₂)).comp mv_polynomial.C = mv_polynomial.C\\n PROOFSTEP ',\n", + " 'completion': ' refine ring_hom.ext (λ p, _)\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\tS₁ : Type v,\\tS₂ : Type w,\\t_inst_1 : comm_semiring R\\t⊢ ((mv_polynomial.iter_to_sum R S₁ S₂).comp (mv_polynomial.sum_to_iter R S₁ S₂)).comp mv_polynomial.C = mv_polynomial.C\\n PROOFSTEP ',\n", + " 'completion': ' ext1 a\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\tσ : Type u_1,\\t_inst_1 : comm_semiring R,\\tp q : mv_polynomial σ R,\\th : p.degrees.disjoint q.degrees\\t⊢ p.degrees ≤ (p + q).degrees\\n PROOFSTEP ',\n", + " 'completion': ' apply le_degrees_add h\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\tσ : Type u_1,\\t_inst_1 : comm_semiring R,\\tp q : mv_polynomial σ R,\\th : p.degrees.disjoint q.degrees\\t⊢ q.degrees ≤ (q + p).degrees\\n PROOFSTEP ',\n", + " 'completion': ' apply le_degrees_add h.symm\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\tσ : Type u_1,\\t_inst_1 : comm_semiring R,\\tp q : mv_polynomial σ R,\\th : p.degrees.disjoint q.degrees\\t⊢ q.degrees ≤ (p + q).degrees\\n PROOFSTEP ',\n", + " 'completion': ' rw add_comm\\n'},\n", + " {'prompt': 'GOAL a b : ℕ,\\tᾰ : a / b.succ.succ = a,\\tthis : a / (b + 2) ≤ a / 2\\t⊢ a ≤ a / 2\\n PROOFSTEP ',\n", + " 'completion': ' simp * at *\\n'},\n", + " {'prompt': \"GOAL b b' : ℕ,\\tc : ℤ,\\th : ↑b ∣ ↑b' - c,\\tn : ℕ\\t⊢ b ∣ n ↔ ↑b ∣ nat.of_digits c (b'.digits n)\\n PROOFSTEP \",\n", + " 'completion': ' rw ←int.coe_nat_dvd\\n'},\n", + " {'prompt': \"GOAL b b' : ℕ,\\tc : ℤ,\\th : ↑b ∣ ↑b' - c,\\tn : ℕ\\t⊢ ↑b ∣ ↑n ↔ ↑b ∣ nat.of_digits c (b'.digits n)\\n PROOFSTEP \",\n", + " 'completion': \" exact dvd_iff_dvd_of_dvd_sub (zmodeq_of_digits_digits b b' c (int.modeq_iff_dvd.2 h).symm _).symm.dvd\\n\"},\n", + " {'prompt': 'GOAL decidable_prime : decidable_pred pos_num.prime,\\tn : pos_num\\t⊢ 2 ≤ ↑(n.bit1)\\t\\tdecidable_prime : decidable_pred pos_num.prime,\\tn : pos_num\\t⊢ ↑(n.bit1).min_fac = ↑(n.bit1) ↔ n.bit1.min_fac_aux ↑n 1 = n.bit1\\n PROOFSTEP ',\n", + " 'completion': ' { exact nat.bit0_le_bit1_iff.2 (to_nat_pos _) }\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : ring R\\t⊢ ∀ (n : ℕ) (a : polynomial R), int.of_nat n.succ • a = a + int.of_nat n • a\\n PROOFSTEP ',\n", + " 'completion': ' rintros n ⟨⟩\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tc : R,\\tn : ℕ,\\th : c ≠ 0\\t⊢ (⇑polynomial.C c * polynomial.X ^ n).support = {n}\\n PROOFSTEP ',\n", + " 'completion': ' rw [C_mul_X_pow_eq_monomial]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp q : polynomial R,\\th : (p + q).degree ≤ p.degree\\t⊢ (p + q).nat_degree ≤ linear_order.max p.nat_degree q.nat_degree\\n PROOFSTEP ',\n", + " 'completion': ' simp [nat_degree_le_nat_degree h]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\tp q : polynomial R,\\th : (p + q).degree ≤ q.degree\\t⊢ (p + q).nat_degree ≤ linear_order.max p.nat_degree q.nat_degree\\n PROOFSTEP ',\n", + " 'completion': ' simp [nat_degree_le_nat_degree h]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : semiring R,\\th : 0 ≠ 1,\\tp : polynomial R,\\thp : p.monic,\\t_inst : nontrivial R\\t⊢ p ≠ 0\\n PROOFSTEP ',\n", + " 'completion': ' exact hp.ne_zero\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\t_inst_1 : nontrivial R,\\t_inst_2 : ring R,\\ta : R,\\tthis : (⇑polynomial.C a).degree < polynomial.X.degree\\t⊢ (polynomial.X + ⇑polynomial.C a).degree = 1\\n PROOFSTEP ',\n", + " 'completion': ' rw [degree_add_eq_left_of_degree_lt this, degree_X]\\n'},\n", + " {'prompt': 'GOAL R : Type u,\\ta : R,\\t_inst_1 : semiring R,\\tn : ℕ,\\tha : a ≠ 0\\t⊢ (⇑polynomial.C a * polynomial.X ^ n).trailing_degree = ↑n\\n PROOFSTEP ',\n", + " 'completion': ' rw [C_mul_X_pow_eq_monomial, trailing_degree_monomial ha]\\n'},\n", + " {'prompt': 'GOAL n : ℕ\\t⊢ set.range (λ (x : euclidean_quadrant n), x.val) = {y : euclidean_space ℝ (fin n) | ∀ (i : fin n), 0 ≤ y i}\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL H : subgroup ↥(alternating_group (fin 5)),\\tHn : H.normal,\\tHb : ¬∀ (x : ↥(alternating_group (fin 5))), x ∈ H → x = 1\\t⊢ H = ⊤\\n PROOFSTEP ',\n", + " 'completion': ' push_neg at Hb\\n'},\n", + " {'prompt': 'GOAL H : subgroup ↥(alternating_group (fin 5)),\\tHn : H.normal,\\tHb : ∃ (x : ↥(alternating_group (fin 5))), x ∈ H ∧ x ≠ 1\\t⊢ H = ⊤\\n PROOFSTEP ',\n", + " 'completion': ' obtain ⟨⟨g, gA⟩, gH, g1⟩ : ∃ (x : ↥(alternating_group (fin 5))), x ∈ H ∧ x ≠ 1 := Hb\\n'},\n", + " {'prompt': 'GOAL G : Type u_1,\\t_inst_1 : group G,\\tH : subgroup G\\t⊢ H = ⊥ ↔ ∀ (x : G), x ∈ H → x = 1\\n PROOFSTEP ',\n", + " 'completion': \" rw set_like.ext'_iff\\n\"},\n", + " {'prompt': 'GOAL G : Type u_1,\\t_inst_1 : group G,\\tH : subgroup G\\t⊢ ↑H = ↑⊥ ↔ ∀ (x : G), x ∈ H → x = 1\\n PROOFSTEP ',\n", + " 'completion': ' simp only [coe_bot, set.eq_singleton_iff_unique_mem, set_like.mem_coe, H.one_mem, true_and]\\n'},\n", + " {'prompt': 'GOAL R : Type u_3,\\t_inst_1 : non_assoc_semiring R,\\tS : submonoid R,\\tb : R,\\thb : b ∈ add_submonoid.closure ↑S\\t⊢ b * 0 ∈ add_submonoid.closure ↑S\\n PROOFSTEP ',\n", + " 'completion': ' simp only [mul_zero, (add_submonoid.closure (S : set R)).zero_mem]\\n'},\n", + " {'prompt': 'GOAL R : Type u_3,\\t_inst_1 : non_assoc_semiring R,\\tS : submonoid R\\t⊢ ∀ {a : R}, a ∈ add_submonoid.closure ↑S → a * 0 ∈ add_submonoid.closure ↑S\\n PROOFSTEP ',\n", + " 'completion': ' exact λ b hb, by simp only [mul_zero, (add_submonoid.closure (S : set R)).zero_mem]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tβ : Type u_2,\\t_inst_1 : measurable_space α,\\t_inst_2 : measurable_space β,\\t_inst_4 : is_empty β,\\tf : α → β\\t⊢ measurable f\\n PROOFSTEP ',\n", + " 'completion': ' haveI := function.is_empty f\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tm0 : measurable_space α,\\tμ : measure_theory.measure α,\\ts : set α,\\ths : measurable_set s\\t⊢ (μ.restrict s).ae = μ.ae ⊓ 𝓟 s\\n PROOFSTEP ',\n", + " 'completion': ' ext t\\n'},\n", + " {'prompt': 'GOAL x y z : ℤ,\\th : pythagorean_triple x y z,\\th0 : ¬x.gcd y = 0\\t⊢ (x / ↑(x.gcd y)).gcd (y / ↑(x.gcd y)) = 1\\n PROOFSTEP ',\n", + " 'completion': ' apply int.gcd_div_gcd_div_gcd (nat.pos_of_ne_zero h0)\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : bounded_lattice α,\\ta b : α,\\tw : disjoint a b,\\th : a ⊔ b = a\\t⊢ b = ⊥\\n PROOFSTEP ',\n", + " 'completion': ' rw disjoint_iff at w\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : semilattice_sup α,\\tb c : α\\t⊢ ∃ (b_1 : α) (H : b_1 ∈ option.some b), b_1 ≤ b ⊔ c\\n PROOFSTEP ',\n", + " 'completion': ' exact ⟨_, rfl, le_sup_left⟩\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : semilattice_sup α,\\tb c : α\\t⊢ ∃ (b_1 : α) (H : b_1 ∈ option.some c), b_1 ≤ b ⊔ c\\n PROOFSTEP ',\n", + " 'completion': ' exact ⟨_, rfl, le_sup_right⟩\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tf : filter α,\\tcblb : f.is_countably_generated\\t⊢ ∃ (x : ℕ → set α), f = ⨅ (i : ℕ), 𝓟 (x i)\\n PROOFSTEP ',\n", + " 'completion': ' rcases cblb.exists_countable_infi_principal with ⟨B, Bcbl, rfl⟩\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : semilattice_sup α,\\ta b : α\\t⊢ a ⊔ b ≤ a ∧ a ≤ a ⊔ b ↔ b ≤ a\\n PROOFSTEP ',\n", + " 'completion': ' simp [le_refl]\\n'},\n", + " {'prompt': \"GOAL R : Type u,\\t_inst_1 : comm_ring R,\\tn : ℕ\\t⊢ ↑(finset.range (n + 1)) ⊆ (λ (n : ℕ), polynomial.X ^ n) ⁻¹' ↑(polynomial.degree_le R ↑n)\\n PROOFSTEP \",\n", + " 'completion': ' intros k hk\\n'},\n", + " {'prompt': 'GOAL R : Type u_1,\\t_inst_1 : integral_domain R,\\t_inst_2 : gcd_monoid R,\\tr : R,\\th0 : r = 0\\t⊢ (⇑polynomial.C r).support.gcd (⇑polynomial.C r).coeff = ⇑normalize r\\n PROOFSTEP ',\n", + " 'completion': ' simp [h0]\\n'},\n", + " {'prompt': 'GOAL R : Type u_1,\\t_inst_1 : integral_domain R,\\t_inst_2 : gcd_monoid R,\\tr : R,\\th0 : ¬r = 0\\t⊢ (⇑polynomial.C r).support.gcd (⇑polynomial.C r).coeff = ⇑normalize r\\n PROOFSTEP ',\n", + " 'completion': ' have h : (C r).support = {0} := support_monomial _ _ h0\\n'},\n", + " {'prompt': 'GOAL R : Type u_5,\\t_inst_5 : integral_domain R\\t⊢ 1 ∈ primitive_roots 1 R ∧ ∀ (x : R), x ∈ primitive_roots 1 R → x = 1\\n PROOFSTEP ',\n", + " 'completion': ' split\\n'},\n", + " {'prompt': 'GOAL a b : cardinal\\t⊢ a * b = a ↔ linear_order.max ω b ≤ a ∧ b ≠ 0 ∨ b = 1 ∨ a = 0\\n PROOFSTEP ',\n", + " 'completion': ' rw [max_le_iff]\\n'},\n", + " {'prompt': 'GOAL a b : cardinal,\\th : a * b = a,\\tha : ω ≤ a,\\tthis : a ≠ 0,\\thb : a < b\\t⊢ false\\n PROOFSTEP ',\n", + " 'completion': ' apply ne_of_gt _ h\\n'},\n", + " {'prompt': 'GOAL n m : ℕ,\\th : n * m = n,\\thb : 1 ≤ m,\\th2a : 1 ≤ n,\\th2b : 1 < m\\t⊢ false\\n PROOFSTEP ',\n", + " 'completion': ' apply ne_of_gt _ h\\n'},\n", + " {'prompt': 'GOAL S : Type u,\\t_inst_1 : pgame.state S,\\tn : ℕ,\\ts : S,\\th : pgame.state.turn_bound s ≤ n\\t⊢ fintype {t // t ∈ pgame.state.L s}\\n PROOFSTEP ',\n", + " 'completion': ' apply_instance\\n'},\n", + " {'prompt': \"GOAL α : Type u_1,\\tl₁ l₂ : lists' α bool.tt,\\tH : ∀ (a : lists α), a ∈ l₁.to_list → a ∈ l₂\\t⊢ l₁ ⊆ l₂\\n PROOFSTEP \",\n", + " 'completion': ' rw ← of_to_list l₁\\n'},\n", + " {'prompt': \"GOAL α : Type u_1,\\tl₁ l₂ : lists' α bool.tt\\t⊢ (∀ (a : lists α), a ∈ l₁.to_list → a ∈ l₂) → lists'.of_list l₁.to_list ⊆ l₂\\n PROOFSTEP \",\n", + " 'completion': ' induction to_list l₁\\n'},\n", + " {'prompt': \"GOAL α : Type u_1,\\tl₁ l₂ : lists' α bool.tt\\t⊢ (∀ (a : lists α), a ∈ l₁.to_list → a ∈ l₂) → lists'.of_list l₁.to_list ⊆ l₂\\n PROOFSTEP \",\n", + " 'completion': ' induction to_list l₁; intro\\n'},\n", + " {'prompt': \"GOAL α : Type u_1,\\tl₁ l₂ : lists' α bool.tt\\t⊢ (∀ (a : lists α), a ∈ list.nil → a ∈ l₂) → lists'.of_list list.nil ⊆ l₂\\n PROOFSTEP \",\n", + " 'completion': ' intro\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : ordered_semiring α,\\ta b : α,\\tha : a ≤ 0,\\thb : b = 0\\t⊢ a + b ≤ 0\\n PROOFSTEP ',\n", + " 'completion': ' simp *\\n'},\n", + " {'prompt': 'GOAL X : Type u_1,\\t_inst_1 : topological_space X,\\tα : Type u_2,\\tf : locally_constant X α,\\tx : X\\t⊢ (⇑(f.locally_constant_lift) ∘ f.discrete_quotient.proj) x = ⇑f x\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : topological_space α,\\tβ : Type u_2,\\t_inst_2 : preorder β,\\tf : α → β\\t⊢ upper_semicontinuous_on f set.univ ↔ upper_semicontinuous f\\n PROOFSTEP ',\n", + " 'completion': ' simp [upper_semicontinuous_on, upper_semicontinuous, upper_semicontinuous_within_at_univ_iff]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : uniform_space α,\\ts : set (α × α),\\ths : s ∈ 𝓤 α\\t⊢ 𝓤 α ≤ (𝓟 ∘ closure) s\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\t_inst_1 : uniform_space α,\\ts : set (α × α),\\ths : s ∈ 𝓤 α\\t⊢ 𝓤 α ≤ (𝓟 ∘ closure) s\\n PROOFSTEP ',\n", + " 'completion': ' simp; filter_upwards [hs] subset_closure\\n'},\n", + " {'prompt': 'GOAL x y : ↥circle,\\thxy : x ≠ y\\t⊢ (λ (f : C(↥circle, ℂ)), ⇑f) (fourier 1) x ≠ (λ (f : C(↥circle, ℂ)), ⇑f) (fourier 1) y\\n PROOFSTEP ',\n", + " 'completion': ' simp [hxy]\\n'},\n", + " {'prompt': 'GOAL x y z : ℝ,\\thx : 0 < x,\\thy : 0 < y\\t⊢ x < y ^ z ↔ real.log x < z * real.log y\\n PROOFSTEP ',\n", + " 'completion': ' rw [←real.log_lt_log_iff hx (real.rpow_pos_of_pos hy z), real.log_rpow hy]\\n'},\n", + " {'prompt': 'GOAL C : Type u,\\t_inst_1 : category_theory.category C,\\t_inst_2 : category_theory.limits.has_finite_limits C,\\tJ : Type v,\\t_x : decidable_eq J,\\t_x : fintype J\\t⊢ category_theory.limits.has_limits_of_shape (category_theory.limits.wide_pullback_shape J) C\\n PROOFSTEP ',\n", + " 'completion': ' exactI has_finite_limits.out _\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tσ : Type v,\\tM : NFA α σ,\\tS : set σ,\\ta : σ\\t⊢ a ∈ M.to_ε_NFA.ε_closure S ↔ a ∈ S\\n PROOFSTEP ',\n", + " 'completion': ' split\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tσ : Type v,\\tM : NFA α σ,\\tS : set σ,\\ta : σ\\t⊢ a ∈ M.to_ε_NFA.ε_closure S → a ∈ S\\n PROOFSTEP ',\n", + " 'completion': ' rintro ( ⟨ _, _, h ⟩ | ⟨ _, _, _, h, _ ⟩ )\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tσ : Type v,\\tM : NFA α σ,\\tS : set σ,\\ta : σ\\t⊢ a ∈ S → a ∈ M.to_ε_NFA.ε_closure S\\n PROOFSTEP ',\n", + " 'completion': ' intro h\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\tσ : Type v,\\tM : NFA α σ,\\tS : set σ,\\ta : σ,\\th : a ∈ S\\t⊢ a ∈ S\\n PROOFSTEP ',\n", + " 'completion': ' exact h\\n'},\n", + " {'prompt': 'GOAL α : Sort u,\\tβ : Sort v,\\tp : α → Prop,\\tf : α ≃ β,\\tx : α\\t⊢ p x ↔ p (⇑(f.symm) (⇑f x))\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'},\n", + " {'prompt': 'GOAL α : Type ?,\\tp : α → Prop,\\t_inst_1 : decidable_pred p,\\ta : α,\\th : p a\\t⊢ sum.elim coe coe (sum.inl ⟨a, h⟩) = a\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL α : Type ?,\\tp : α → Prop,\\t_inst_1 : decidable_pred p,\\ta : α,\\th : ¬p a\\t⊢ sum.elim coe coe (sum.inr ⟨a, h⟩) = a\\n PROOFSTEP ',\n", + " 'completion': ' refl\\n'},\n", + " {'prompt': 'GOAL α : Type u,\\t_inst_1 : decidable_eq α,\\txs : list α,\\th : ∀ (x : α), x ∈ xs\\t⊢ ∀ (x : α), x ∈ xs.erase_dup\\n PROOFSTEP ',\n", + " 'completion': ' simp *\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\ts : finset α,\\t_inst_1 : decidable_eq α,\\tthis : s.diag.card + s.off_diag.card = s.card * s.card\\t⊢ s.off_diag.card = s.card * s.card - s.diag.card\\n PROOFSTEP ',\n", + " 'completion': ' finish\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\ts : finset α,\\t_inst_1 : decidable_eq α,\\tthis : s.diag.card + s.off_diag.card = s.card * s.card\\t⊢ s.off_diag.card = s.card * s.card - s.card\\n PROOFSTEP ',\n", + " 'completion': ' nth_rewrite 2 ← s.diag_card\\n'},\n", + " {'prompt': 'GOAL l n m k : ℕ,\\th : n ≤ k\\t⊢ k ∈ finset.Ico n m \\\\ finset.Ico n l ↔ k ∈ finset.Ico (linear_order.max n l) m\\n PROOFSTEP ',\n", + " 'completion': ' simp [h, and_comm]\\n'},\n", + " {'prompt': 'GOAL l n m k : ℕ,\\th : ¬n ≤ k\\t⊢ k ∈ finset.Ico n m \\\\ finset.Ico n l ↔ k ∈ finset.Ico (linear_order.max n l) m\\n PROOFSTEP ',\n", + " 'completion': ' simp [h, and_comm]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tM : Type u_5,\\t_inst_1 : has_zero M,\\ta : α,\\tb : M\\t⊢ ⇑(finsupp.single a b) = {a}.indicator (λ (_x : α), b)\\n PROOFSTEP ',\n", + " 'completion': ' ext\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tm n : α →₀ ℕ,\\th : m < n\\t⊢ m.sum (λ (_x : α), id) < n.sum (λ (_x : α), id)\\n PROOFSTEP ',\n", + " 'completion': ' rw [← card_to_multiset, ← card_to_multiset]\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tM : Type u_5,\\t_inst_1 : has_zero M,\\ta₁ a₂ : α,\\tb : M\\t⊢ ⇑(finsupp.single a₁ b) a₂ = ⇑(finsupp.single a₂ b) a₁\\n PROOFSTEP ',\n", + " 'completion': ' simp only [single_apply]; ac_refl\\n'},\n", + " {'prompt': 'GOAL α : Type u_1,\\tM : Type u_5,\\t_inst_1 : has_zero M,\\ta₁ a₂ : α,\\tb : M\\t⊢ ⇑(finsupp.single a₁ b) a₂ = ⇑(finsupp.single a₂ b) a₁\\n PROOFSTEP ',\n", + " 'completion': ' simp only [single_apply]\\n'},\n", + " {'prompt': 'GOAL a b m n : ℤ,\\thmn : m.nat_abs.coprime n.nat_abs,\\th : m ∣ b - a ∧ n ∣ b - a\\t⊢ m ∣ b - a\\n PROOFSTEP ',\n", + " 'completion': ' tauto\\n'},\n", + " ...]" + ] + }, + "execution_count": 114, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "hard_ones" + ] + }, + { + "cell_type": "code", + "execution_count": 115, + "id": "9e36f8d7", + "metadata": {}, + "outputs": [], + "source": [ + "json_string = json.dumps(hard_ones)\n", + "with open('json_data__hard_ones.json', 'w') as outfile:\n", + " outfile.write(json_string)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "55e194d7", + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "with open('json_data__hard_ones.json', 'r') as outfile:\n", + " f = outfile.readlines()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "a5d40566", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "1" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(f)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "6b1998bc", + "metadata": {}, + "outputs": [], + "source": [ + "data = json.loads(f[0])" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "da1ecb7b", + "metadata": {}, + "outputs": [], + "source": [ + "# hard" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "25ccb998", + "metadata": {}, + "outputs": [], + "source": [ + "all_d = ['<|startoftext|>' + ' ' + i[\"prompt\"].replace(\"GOAL\",\"\").replace(\"PROOFSTEP\",\"\") + ' ' for i in data]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "07baa143", + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import torch" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "4ce23203", + "metadata": {}, + "outputs": [], + "source": [ + "results = []\n", + "hard = []\n", + "model.cuda()\n", + "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "83315a58", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[]" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "results" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "4378a27e", + "metadata": {}, + "outputs": [], + "source": [ + "hard = []" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "eead5bc1", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 500, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 512, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 409, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 405, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 409, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 442, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 418, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 513, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 425, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 402, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 525, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 514, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 428, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 488, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 441, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 402, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 454, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 437, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 545, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 479, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 479, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 479, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 455, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 455, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 452, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 452, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 507, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 443, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 457, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 461, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 517, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 490, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 427, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 464, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 652, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 469, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 469, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 628, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 501, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 411, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 488, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 401, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 436, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 628, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 629, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 418, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 413, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 422, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 703, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 478, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 530, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 422, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 408, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 738, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 524, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 542, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 519, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 580, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 558, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 740, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 752, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 504, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 403, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 714, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 758, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 418, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 594, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 471, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 480, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 620, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 620, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 410, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 791, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 548, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 547, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 447, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 432, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 523, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 405, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 576, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 400, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 405, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 402, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 405, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 409, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 442, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 404, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 488, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 432, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 846, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 456, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 409, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 691, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 417, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 417, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 417, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 417, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 436, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 403, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 635, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 436, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 679, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 856, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 422, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 487, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 450, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 408, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 868, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 456, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 636, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 413, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 489, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 471, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 453, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 428, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 428, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 407, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 696, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 454, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 454, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 472, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 405, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 419, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 747, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 441, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 417, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 406, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 498, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 416, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 412, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 704, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 422, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 471, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 479, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 471, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 406, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 406, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 419, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 437, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 443, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 413, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 474, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 494, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 418, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 452, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 451, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 447, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 410, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 457, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 881, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 882, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 899, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 900, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 454, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 520, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 487, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Input length of input_ids is 519, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 430, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 413, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 471, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 427, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 726, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 408, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 445, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 494, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 503, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 421, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 909, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 492, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 461, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 455, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 741, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 428, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 494, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 409, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 440, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 473, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 506, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 506, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 545, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 427, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 487, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 494, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 465, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 470, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 477, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 430, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 424, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 486, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 489, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 511, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 479, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 404, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 421, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 511, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 739, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 621, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 489, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 504, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 678, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 492, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 492, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 474, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 420, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 424, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 415, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 460, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 460, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 401, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 550, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 475, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 413, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 411, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 414, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 449, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 416, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 526, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 515, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 406, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 427, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 518, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 425, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 514, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 442, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 410, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 460, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 546, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 838, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 460, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 597, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 527, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 527, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 412, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 529, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 511, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 518, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 516, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 433, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 557, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Input length of input_ids is 852, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 443, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 472, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 434, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 421, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 438, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 529, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 430, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 599, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 428, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 527, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 451, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 433, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 434, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 422, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 421, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 599, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 532, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 618, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 621, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 553, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 451, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 437, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 442, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 441, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 553, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 533, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 448, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 547, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 521, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 532, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 731, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 564, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 503, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 860, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 748, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 603, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 407, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 552, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 948, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 433, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 624, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 411, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 542, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 555, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 443, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 466, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 506, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 507, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 518, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 518, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 452, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 459, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 877, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 426, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 461, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 630, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 410, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 459, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 458, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 834, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 834, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 450, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 460, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 420, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 513, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 602, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 536, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 436, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 479, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 490, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 468, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 581, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 526, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 440, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 444, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 443, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 977, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 452, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1003, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 604, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 658, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 658, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 470, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 474, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 582, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 488, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 515, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 480, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 501, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 497, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 431, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 493, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 603, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 420, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 614, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 670, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 474, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 497, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 481, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 436, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 471, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 678, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 663, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 824, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 503, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 973, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 603, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 480, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 639, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 559, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 479, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 550, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 526, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1062, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 695, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 487, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 609, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 612, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 599, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 847, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 462, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 489, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 484, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 500, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 667, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 618, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 632, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 725, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 494, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 496, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 495, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 490, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 963, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 544, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 489, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 628, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 597, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 704, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 501, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 578, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 590, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 529, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 529, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 529, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 529, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 529, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 528, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 528, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 528, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 528, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 529, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 531, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 606, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 606, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 714, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 521, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 505, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 636, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 649, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 576, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 589, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 518, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 520, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 518, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 640, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 534, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 640, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 636, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 603, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 539, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 539, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 539, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 539, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 633, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 632, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 548, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 524, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 684, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 544, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 544, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 544, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 544, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 544, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 544, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 544, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 544, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 550, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 613, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 724, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 578, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 578, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 578, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 510, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 526, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 479, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 547, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 614, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 526, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 639, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 638, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 655, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 554, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1115, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 528, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 530, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 551, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 551, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 551, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 551, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 551, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 551, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 551, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 551, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 658, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 518, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 653, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 581, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 647, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 540, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 561, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 542, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 543, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 547, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 566, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 600, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 741, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 577, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 674, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 677, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 663, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 531, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 646, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 629, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 742, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 683, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 657, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 596, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 596, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 591, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 648, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 592, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 593, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 585, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 595, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 634, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 601, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 691, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 696, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 672, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 433, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 486, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 486, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 488, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 488, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 599, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 488, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 484, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 483, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 600, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 602, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 645, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 708, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1021, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 554, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 730, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 629, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 683, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 657, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 688, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 530, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 530, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 713, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 618, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 628, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1289, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 683, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 636, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 722, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 629, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 632, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 724, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 718, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 724, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 712, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 723, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 723, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 676, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 659, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 675, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 718, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 741, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 705, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 715, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 645, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 639, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 641, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 706, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 707, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 675, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 679, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 637, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 932, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 722, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 767, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 745, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 739, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 739, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 663, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 555, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 637, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 739, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 610, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 741, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 643, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 679, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 761, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 603, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 648, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 653, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 767, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 775, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 655, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 676, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 676, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 767, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 643, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 768, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 669, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 663, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 665, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 808, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 779, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 755, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 619, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 600, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 785, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 900, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 812, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 812, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 697, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 588, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 900, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 682, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 821, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 919, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 776, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 828, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 515, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 919, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 865, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 594, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 834, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 789, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 699, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 699, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 580, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 690, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 698, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 892, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 743, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 743, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 743, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 610, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 651, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 699, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1298, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 787, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 646, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1339, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 719, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 833, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 876, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 733, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 866, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 866, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 574, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 713, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1518, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 875, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 869, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 875, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 661, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 743, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 743, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 743, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 743, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 884, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 872, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 910, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 730, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 732, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 845, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 531, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 865, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 833, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 755, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 743, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1298, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 753, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 753, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 838, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 762, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 742, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 745, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 737, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 694, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 557, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 702, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 704, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 927, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 914, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 883, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 932, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 865, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 765, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 865, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 865, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 707, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 934, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 934, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 864, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 883, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 706, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 873, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 869, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 875, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 620, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 881, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 954, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 723, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 881, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 894, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 804, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 804, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 903, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 902, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 897, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 724, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 771, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 769, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 729, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 746, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 753, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 942, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 910, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 779, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 791, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 781, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 920, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 924, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 929, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 888, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 923, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 929, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 951, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 956, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1007, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 996, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1747, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 960, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 993, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 951, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 955, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 973, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 769, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 974, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 961, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 714, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 979, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1007, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 982, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1060, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1184, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1045, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 685, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 984, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 970, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 999, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 833, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 990, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1026, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 874, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 816, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1029, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 852, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1006, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1008, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 986, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1111, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1097, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 857, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 860, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 858, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1022, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1244, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1045, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1045, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 880, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 868, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 870, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1048, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1040, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 757, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1976, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1186, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1242, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1289, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1188, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 752, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1188, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1060, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 965, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1190, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1064, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 879, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1322, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 735, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1343, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 937, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 924, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 945, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 944, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1173, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 936, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 966, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 954, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 956, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 964, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 900, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 970, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 1243, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 988, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 987, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 980, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 975, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 985, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "Token indices sequence length is longer than the specified maximum sequence length for this model (2259 > 2048). Running this sequence through the model will result in indexing errors\n", + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n", + "Input length of input_ids is 2259, but ``max_length`` is set to 400. This can lead to unexpected behavior. You should consider increasing ``config.max_length`` or ``max_length``.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [96,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [97,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [98,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [99,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [100,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [101,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [102,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [103,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [104,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [105,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [106,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [107,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [108,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [109,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [110,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [111,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [112,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [113,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [114,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [115,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [116,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [117,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [118,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [119,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [120,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [121,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [122,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [123,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [124,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [125,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [126,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n", + "/opt/conda/conda-bld/pytorch_1646755903507/work/aten/src/ATen/native/cuda/Indexing.cu:703: indexSelectLargeIndex: block: [130,0,0], thread: [127,0,0] Assertion `srcIndex < srcSelectDimSize` failed.\n" + ] + } + ], + "source": [ + "for i,j in zip(all_d,data):\n", + " tr = tokenizer(i, return_tensors=\"pt\",padding=True).input_ids\n", + "# print(len(hard),len(results))\n", + " try:\n", + " tr = tr.to(device)\n", + " gen_tokens = model.generate(\n", + " tr,\n", + " do_sample=True,\n", + " temperature=0.9,\n", + " max_length=400, \n", + " num_return_sequences=15, \n", + " )\n", + " gen_text = tokenizer.batch_decode(gen_tokens)\n", + " results.append((gen_text,j))\n", + " except:\n", + " hard.append(j)\n", + "# break" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "42c42555", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(136, 6233)" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fa4d5512", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "24fc9eb1", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9bc6cd5e", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 29, + "id": "6eb8025b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['<|startoftext|> ⊢ fourier_subalgebra.separates_points\\n choose hf0 hf1 using fourier_subalgebra.one_subalgebra\\n<|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|>',\n", + " '<|startoftext|> ⊢ fourier_subalgebra.separates_points\\n convert fourier_subalgebra.div_add_div_self_submodule fourier_subalgebra.zero_mem\\n<|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|>',\n", + " '<|startoftext|> ⊢ fourier_subalgebra.separates_points\\n rw [mul_comm, subalgebra.prod_mk, mul_comm]\\n<|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|>',\n", + " \"<|startoftext|> ⊢ fourier_subalgebra.separates_points\\n agram_norm_subsubalgebra_eq_zero_iff_separates_points_eq_div' (@zero_subalgebra ℝ _ ⊤ _)\\n<|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|>\",\n", + " '<|startoftext|> ⊢ fourier_subalgebra.separates_points\\n convert fourier_subalgebra.subalgebra_closure_subalgebra\\n<|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|>',\n", + " '<|startoftext|> ⊢ fourier_subalgebra.separates_points\\n apply_instance\\n<|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|>',\n", + " '<|startoftext|> ⊢ fourier_subalgebra.separates_points\\n omorph.bijective (mul_self_ne_zero ∘ ρ : subalgebra ℂ (fin (fintype (by norm_num))))\\n<|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|>',\n", + " '<|startoftext|> ⊢ fourier_subalgebra.separates_points\\n rw ← subalgebra.separates_points_iff_right\\n<|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|>',\n", + " '<|startoftext|> ⊢ fourier_subalgebra.separates_points\\n exact fourier_separates_points\\n<|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|>',\n", + " '<|startoftext|> ⊢ fourier_subalgebra.separates_points\\n exact subalgebra.separates_points_of_separate_eq\\n<|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|>',\n", + " \"<|startoftext|> ⊢ fourier_subalgebra.separates_points\\n choose s hs using fourier_subalgebra.to_subalgebra.map_separates_points'\\n<|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|>\",\n", + " '<|startoftext|> ⊢ fourier_subalgebra.separates_points\\n choice : subalgebra ℝ ℂ :=clear_all_squares ℝ (λ x y, subalgebra.ι ℂ (x : ℂ × ℂ)) (λ (p : ℂ × ℂ), p.fst) (λ (i : fin (p.2.2 + 1)), (λ (i : fin 1), (x, -y)) (i.2.2, i.2))\\n<|endoftext|>',\n", + " '<|startoftext|> ⊢ fourier_subalgebra.separates_points\\n have subalgebra : (finset.univ : finset ℕ) ⊆ ℤ := subalgebra.of_finset_attach\\n<|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|>',\n", + " '<|startoftext|> ⊢ fourier_subalgebra.separates_points\\n rw [eq_one_of_is_algebraic_one_set, one_subalgebra]; exact fourier_subalgebra_eq_one\\n<|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|>',\n", + " '<|startoftext|> ⊢ fourier_subalgebra.separates_points\\n apply_instance\\n<|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|><|endoftext|>']" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "results[0][0] " + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "db5e7574", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(6369, 895)" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(hard),len(results)" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "id": "51e0ba88", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'prompt': 'GOAL ⊢ fourier_subalgebra.separates_points\\n PROOFSTEP ',\n", + " 'completion': ' intros x y hxy\\n'}" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "results[0][1] " + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "id": "138909b9", + "metadata": {}, + "outputs": [], + "source": [ + "final_results = [[i.split(\"\")[-1].replace(\"<|endoftext|>\",\"\")for i in j[0] ] for j in results]" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "id": "5f88d823", + "metadata": {}, + "outputs": [], + "source": [ + "all_final = [ {**{\"results\":j}, **i[1]} for i,j in zip(results,final_results)]" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "id": "5f449674", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'results': [' rw [map_smul, smul_re, I2, smul_im, smul_re, smul_im, one_smul, smul_re, smul_im, ← map_smul, ← one_im, ← one_im, mul_smul, ← mul_smul, ← one_im, ← mul_re, ← mul_re, ← eq_comm]\\n',\n", + " ' simp [smul_re, smul_im, mul_add, add_comm, add_left_comm, smul_eq_mul, smul_eq_mul, im, one_smul, smul_eq_mul]\\n',\n", + " ' simp [smul_re, smul_im, one_smul, mul_assoc]\\n',\n", + " ' simp only [smul_re, smul_im, smul_I, smul_re_im, smul_im_symm, one_smul]\\n',\n", + " ' simp\\n',\n", + " ' simp only [smul_eq_mul, smul_eq_mul, Ival_re_im, smul_im, smul_re]\\n',\n", + " ' rw [smul_re, smul_im, smul_re, smul_im, smul_im, mul_one]\\n',\n", + " ' simp\\n',\n", + " ' rw [smul_re, smul_im, ←smul_re, ←smul_im, ←smul_re, add_assoc, ring_hom.map_add]\\n',\n", + " ' rw [smul_one, smul_re, smul_im, smul_im, mul_smul, smul_two, one_smul, smul_two, one_smul]\\n',\n", + " ' simp [smul_re, smul_im, mul_assoc, mul_comm z, smul_im]\\n',\n", + " ' simp only [mul_left_comm r, one_smul]\\n',\n", + " ' simp only [smul_re, one_smul, im_im, one_smul]\\n',\n", + " ' rw [one_smul, smul_left, ← smul_assoc, ← one_smul]\\n',\n", + " ' rw one_smul\\n'],\n", + " 'prompt': 'GOAL K : Type u_1,\\t_inst_1 : is_R_or_C K,\\tr : ℝ,\\tz : K\\t⊢ r • 1 * ⇑is_R_or_C.re z • 1 - r • 1 * ⇑is_R_or_C.im z • 1 * is_R_or_C.I = r • ⇑is_R_or_C.re z • 1 - r • (⇑is_R_or_C.im z • 1 * is_R_or_C.I)\\n PROOFSTEP ',\n", + " 'completion': ' simp\\n'}" + ] + }, + "execution_count": 38, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "all_final[2500]" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "id": "30778f39", + "metadata": {}, + "outputs": [], + "source": [ + "json_string = json.dumps(all_final)\n", + "with open('json_data_1.json', 'w') as outfile:\n", + " outfile.write(json_string)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0d4d52bc", + "metadata": {}, + "outputs": [], + "source": [ + "np.zeros()" + ] + }, + { + "cell_type": "code", + "execution_count": 92, + "id": "e228c251", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[' convert read_inj b\\n',\n", + " ' dsimp [read] at h\\n',\n", + " ' \\n',\n", + " ' contradiction\\n',\n", + " ' subst i\\n',\n", + " ' dsimp [read]\\n',\n", + " ' dsimp only [read, dsimp] at h\\n',\n", + " ' subst i\\n',\n", + " ' contradiction\\n',\n", + " \" rw [array.read_write', array.write'_write, buffer.write_mem, array.write_mem, h]\\n\",\n", + " \" convert b.read'\\n\",\n", + " ' delta read\\n',\n", + " ' dsimp [read]\\n',\n", + " ' simp! * at *\\n',\n", + " ' subst i\\n']" + ] + }, + "execution_count": 92, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "final_results[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 91, + "id": "c5d82bfb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'prompt': \"GOAL α : Type u,\\t_inst_1 : inhabited α,\\tb : buffer α,\\ti : ℕ,\\th : i < b.size\\t⊢ b.read ⟨i, h⟩ = b.read' i\\n PROOFSTEP \",\n", + " 'completion': \" cases b; unfold read read'; simp [array.read_eq_read']\\n\"}" + ] + }, + "execution_count": 91, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "results[0][1]" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "id": "982d8779", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'prompt': \"GOAL α : Type u_1,\\t_inst_1 : measurable_space α,\\tμ : measure_theory.measure α,\\tE : Type u_2,\\t_inst_2 : normed_group E,\\t_inst_3 : normed_space ℝ E,\\t_inst_4 : complete_space E,\\t_inst_5 : topological_space.second_countable_topology E,\\t_inst_6 : measurable_space E,\\t_inst_7 : borel_space E,\\tH : Type u_3,\\t_inst_8 : normed_group H,\\t_inst_9 : normed_space ℝ H,\\t_inst_10 : topological_space.second_countable_topology (H →L[ℝ] E),\\tF : H → α → E,\\tF' : α → (H →L[ℝ] E),\\tx₀ : H,\\tbound : α → ℝ,\\tε : ℝ,\\tε_pos : 0 < ε,\\thF_meas : ∀ (x : H), x ∈ metric.ball x₀ ε → ae_measurable (F x) μ,\\thF_int : measure_theory.integrable (F x₀) μ,\\thF'_meas : ae_measurable F' μ,\\th_lipsch : ∀ᵐ (a : α) ∂μ, lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε),\\tbound_integrable : measure_theory.integrable bound μ,\\th_diff : ∀ᵐ (a : α) ∂μ, has_fderiv_at (λ (x : H), F x a) (F' a) x₀,\\tx₀_in : x₀ ∈ metric.ball x₀ ε,\\tnneg : ∀ (x : H), 0 ≤ ∥x - x₀∥⁻¹,\\tb : α → ℝ := λ (a : α), abs (bound a),\\tb_int : measure_theory.integrable b μ,\\tb_nonneg : ∀ (a : α), 0 ≤ b a,\\tx : H,\\tx_in : x ∈ metric.ball x₀ ε\\t⊢ ∀ (x_1 : α), (λ (a : α), lipschitz_on_with (⇑real.nnabs (bound a)) (λ (x : H), F x a) (metric.ball x₀ ε)) x_1 → ∥F x₀ x_1 - F x x_1∥ ≤ ε * ∥bound x_1∥\\n PROOFSTEP \",\n", + " 'completion': ' intros a ha\\n'}" + ] + }, + "execution_count": 38, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "hard[55" + ] + }, + { + "cell_type": "code", + "execution_count": 78, + "id": "1d01e0ec", + "metadata": {}, + "outputs": [], + "source": [ + "i = hard[1000]\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 80, + "id": "31a556ca", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'prompt': 'GOAL α : Type u_1,\\tβ : Type u_2,\\tσ : Type u_3,\\t_inst_1 : primcodable α,\\t_inst_2 : primcodable β,\\t_inst_3 : primcodable σ,\\tf : α → β → σ\\t⊢ ∀ (a : option α) (b : option β), option.map (λ (p : α × β), f p.fst p.snd) (a.bind (λ (a : α), option.map (prod.mk a) b)) = a.bind (λ (a : α), option.map (f a) b)\\n PROOFSTEP ',\n", + " 'completion': ' intros; cases a\\n'}" + ] + }, + "execution_count": 80, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "i" + ] + }, + { + "cell_type": "code", + "execution_count": 81, + "id": "465bbef7", + "metadata": {}, + "outputs": [], + "source": [ + "tr = '<|startoftext|>' + ' ' + i[\"prompt\"].replace(\"GOAL\",\"\").replace(\"PROOFSTEP\",\"\") + ' '" + ] + }, + { + "cell_type": "code", + "execution_count": 82, + "id": "fc56d3b5", + "metadata": {}, + "outputs": [], + "source": [ + "tr = tokenizer(tr, return_tensors=\"pt\",padding=True).input_ids" + ] + }, + { + "cell_type": "code", + "execution_count": 83, + "id": "81f3d7b9", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Setting `pad_token_id` to `eos_token_id`:50256 for open-end generation.\n" + ] + } + ], + "source": [ + "gen_tokens = model.generate(\n", + " tr,\n", + " do_sample=True,\n", + " temperature=0.5,\n", + " max_length=200, \n", + " num_return_sequences=15, \n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": 84, + "id": "0710cfd8", + "metadata": {}, + "outputs": [], + "source": [ + "gen_text = tokenizer.batch_decode(gen_tokens)" + ] + }, + { + "cell_type": "code", + "execution_count": 85, + "id": "aaf2f2e5", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " finish\n", + "\n", + " simp [f, map]\n", + "\n", + " rintros a b; simp [f, map_map]\n", + "\n", + " rw [option.bind, option.map_none']\n", + "\n", + " intros\n", + "\n", + " simp [pure_eq_map]\n", + "\n", + " intros\n", + "\n", + " intros\n", + "\n", + " rw [option.bind, option.map_none', option.bind]\n", + "\n", + " xt\n", + "\n", + " intros\n", + "\n", + " rintros a b; simp [fst_id, map_id]\n", + "\n", + " rintros a b ⟨a', b', h⟩\n", + "\n", + " intros\n", + "\n", + " rw [option.bind_eq_map, option.bind_option_map]\n", + "\n" + ] + } + ], + "source": [ + "for i in gen_text:\n", + " print(i.split(\"\")[-1].replace(\"<|endoftext|>\",\"\"))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dde932cf", + "metadata": {}, + "outputs": [], + "source": [ + "1+1" + ] + }, + { + "cell_type": "code", + "execution_count": 103, + "id": "388ac197", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[\"<|startoftext|> GOAL α : Type u,\\tb : buffer α,\\ti : ℕ,\\th : i < b.size,\\tv : α\\t⊢ b.write ⟨i, h⟩ v = b.write' i v\\n PROOFSTEP refl\\n<|endoftext|>\"]" + ] + }, + "execution_count": 103, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "gen_text = tokenizer.batch_decode(gen_tokens)\n", + "gen_text" + ] + }, + { + "cell_type": "code", + "execution_count": 140, + "id": "7134a85e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "files_upload/data_test.jsonl\r\n" + ] + } + ], + "source": [ + "ls files_upload/data_test.jsonl" + ] + }, + { + "cell_type": "code", + "execution_count": 143, + "id": "e09bdc22", + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "\n", + "with open('files_upload/data_test.jsonl', 'r') as json_file:\n", + " json_list = [json.loads(i) for i in list(json_file)]" + ] + }, + { + "cell_type": "code", + "execution_count": 144, + "id": "7630818a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'prompt': \"GOAL α : Type u,\\t_inst_1 : inhabited α,\\tb : buffer α,\\ti : ℕ,\\th : i < b.size\\t⊢ b.read ⟨i, h⟩ = b.read' i\\n PROOFSTEP \",\n", + " 'completion': \"cases b; unfold read read'; simp [array.read_eq_read']\\n\"}" + ] + }, + "execution_count": 144, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "json_list[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cd903fec", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/models/predict_model.py b/src/models/predict_model.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/data/.DS_Store b/src/models/reranker/.DS_Store similarity index 89% rename from src/data/.DS_Store rename to src/models/reranker/.DS_Store index aa0b90f..5008ddf 100644 Binary files a/src/data/.DS_Store and b/src/models/reranker/.DS_Store differ diff --git a/src/models/reranker/reranker-show.ipynb b/src/models/reranker/reranker-show.ipynb new file mode 100644 index 0000000..28717c9 --- /dev/null +++ b/src/models/reranker/reranker-show.ipynb @@ -0,0 +1,4027 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 6, + "id": "ded10377", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "__init__.py arguments.py \u001b[0m\u001b[01;34mdist\u001b[0m/ trainer.py\r\n", + "\u001b[01;34m__pycache__\u001b[0m/ data.py modeling.py\r\n" + ] + } + ], + "source": [ + "ls Reranker/src/reranker" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "8f2eca39", + "metadata": {}, + "outputs": [], + "source": [ + "from Reranker.src.reranker.arguments import ModelArguments, DataArguments, \\\n", + " RerankerTrainingArguments as TrainingArguments" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "efff1461", + "metadata": {}, + "outputs": [], + "source": [ + "from transformers import AutoTokenizer\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "e2fa027b", + "metadata": {}, + "outputs": [], + "source": [ + "from reranker import Reranker \n", + "from transformers import AutoTokenizer" + ] + }, + { + "cell_type": "code", + "execution_count": 48, + "id": "015dfe05", + "metadata": {}, + "outputs": [], + "source": [ + "from typing import Optional\n", + "\n", + "import torch\n", + "import torch.functional as F\n", + "import copy\n", + "from transformers import AutoModelForSequenceClassification, AutoTokenizer,\\\n", + " PreTrainedModel, PreTrainedTokenizer, GPTNeoForSequenceClassification\n", + "\n", + "from transformers.modeling_outputs import SequenceClassifierOutput, BaseModelOutputWithPooling\n", + "from torch import nn\n", + "import torch.distributed as dist\n", + "\n", + "class RerankerForInference(nn.Module):\n", + " def __init__(\n", + " self,\n", + " hf_model: Optional[PreTrainedModel] = None,\n", + " tokenizer: Optional[PreTrainedTokenizer] = None\n", + " ):\n", + " super().__init__()\n", + " self.hf_model = hf_model\n", + " self.tokenizer = tokenizer\n", + "\n", + " def tokenize(self, *args, **kwargs):\n", + " return self.tokenizer(*args, **kwargs)\n", + "\n", + " def forward(self, batch):\n", + " return self.hf_model(**batch)\n", + "\n", + " @classmethod\n", + " def from_pretrained(cls, pretrained_model_name_or_path: str):\n", + " hf_model = GPTNeoForSequenceClassification.from_pretrained(\n", + " pretrained_model_name_or_path,num_labels=1)\n", + " hf_tokenizer = AutoTokenizer.from_pretrained(\"EleutherAI/gpt-neo-125M\", \n", + " bos_token=\"<|startoftext|>\",\n", + " eos_token=\"<|endoftext|>\",\n", + " pad_token=\"<|pad|>\")\n", + " hf_model.cuda()\n", + " hf_model.eval()\n", + " return cls(hf_model, hf_tokenizer)\n", + "\n", + " def load_pretrained_model(self, pretrained_model_name_or_path, *model_args, **kwargs):\n", + " self.hf_model = AutoModelForSequenceClassification.from_pretrained(\n", + " pretrained_model_name_or_path, *model_args, **kwargs\n", + " )\n", + "\n", + " def load_pretrained_tokenizer(self, pretrained_model_name_or_path, *inputs, **kwargs):\n", + " self.tokenizer = AutoTokenizer.from_pretrained(\n", + " pretrained_model_name_or_path, *inputs, **kwargs\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "id": "01041eba", + "metadata": {}, + "outputs": [], + "source": [ + "data_args ={\n", + " \"train_dir\"\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": 50, + "id": "477fae25", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Some weights of GPTNeoForSequenceClassification were not initialized from the model checkpoint at results/checkpoint-140000/ and are newly initialized: ['score.weight']\n", + "You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", + "Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n", + "Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n" + ] + } + ], + "source": [ + "rk = RerankerForInference.from_pretrained(\"results/checkpoint-140000/\") " + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "3aa5a124", + "metadata": {}, + "outputs": [], + "source": [ + "# rk.hf_model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ecac7e65", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "d88c1af0", + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "\n", + "with open('all_gpt_combined.json', 'r') as json_file:\n", + " json_c = [json.loads(i) for i in list(json_file)][0]" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "7cfc0b5f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "7128" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(json_c)" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "b558698c", + "metadata": {}, + "outputs": [], + "source": [ + "exa = json_c[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "4ec633bf", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0.5" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "exa[\"score\"]" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "3a330205", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'cases i'" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "exa[\"best_match\"]" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "e44d9562", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'cases b'" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "exa[\"real\"]" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "e4a27c2f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'1'" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "str(hash(1))" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "id": "3d44b938", + "metadata": {}, + "outputs": [], + "source": [ + "def get_embed(exa):\n", + " if exa[\"score\"]!=0:\n", + " positive = [exa[\"best_match\"],exa[\"real\"]]\n", + " negative = [i for i in exa[\"all\"] if i not in positive]\n", + " else:\n", + " negative = exa[\"all\"]\n", + " positive = [exa[\"best_match\"]]\n", + "\n", + " positive_token = [rk.tokenize(i, return_tensors='pt') for i in positive]\n", + " negative_token = [rk.tokenize(i, return_tensors='pt') for i in negative]\n", + " prompt = exa[\"prompt\"].replace(\"GOAL\",\"\").replace(\"PROOFSTEP\",\"\")\n", + " prompt_token = rk.tokenize(prompt, return_tensors='pt').input_ids\n", + " idi = str(hash(prompt))\n", + " return { \"qry\": { \"qid\": idi, \"query\": prompt_token, }, \"pos\": [ {\"pid\": str(hash(i)), \"passage\": j } for i,j in zip(positive,positive_token)] ,\"neg\": [ {\"pid\": str(hash(i)), \"passage\": j } for i,j in zip(negative,negative_token)]}" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "id": "522b13be", + "metadata": {}, + "outputs": [], + "source": [ + "from tqdm import tqdm" + ] + }, + { + "cell_type": "code", + "execution_count": 102, + "id": "3cb57ff0", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100% 7128/7128 [00:13<00:00, 519.24it/s]\n" + ] + } + ], + "source": [ + "json_c_train = []\n", + "\n", + "for i in tqdm(json_c):\n", + " json_c_train.append(get_embed(i))\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0ef34bf3", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 39, + "id": "5fe0d464", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'best_match': 'cases i',\n", + " 'score': 0.5,\n", + " 'real': 'cases b',\n", + " 'all': ['cases i',\n", + " \"apply b.reverse_corec'\",\n", + " 'apply b.append_right_',\n", + " 'apply b',\n", + " 'apply fin',\n", + " 'dsimp [read] at h',\n", + " 'apply_instance',\n", + " 'simp! * at *',\n", + " 'dsimp only [read, dsimp] at h',\n", + " \"set i := b.reverse_core i with h'\",\n", + " 'apply b.append_left_inj',\n", + " 'rw [size, h]',\n", + " '⊢ b.read',\n", + " 'apply mem',\n", + " 'simp [append_right]',\n", + " 'delta read',\n", + " 'simp [size, h]',\n", + " 'subst i',\n", + " \"rw [array.read_write', array.write'_write, buffer.write_mem, array.write_mem, h]\",\n", + " 'rw [←append_right_inj]',\n", + " 'simp',\n", + " 'dsimp [read]',\n", + " \"apply b.append_right'\",\n", + " 'set_of_eq_eq_succ_of_eq',\n", + " \"convert b.read'\",\n", + " 'contradiction',\n", + " 'rw [←append_right_inverse_iff, ←',\n", + " \"apply b.read'_eq_of_mem\",\n", + " '_match : ∀ (_a _a_1 :append (list α) ⟨i,',\n", + " 'apply b.append_right',\n", + " 'apply b.append_left_injective',\n", + " '_inst : inhabited α :=',\n", + " 'apply b.reverse_core_eq',\n", + " 'convert read_inj b',\n", + " '⊢ b.reverse_'],\n", + " 'prompt': \"GOAL α : Type u,\\t_inst_1 : inhabited α,\\tb : buffer α,\\ti : ℕ,\\th : i < b.size\\t⊢ b.read ⟨i, h⟩ = b.read' i\\n PROOFSTEP \"}" + ] + }, + "execution_count": 39, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "json_c[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ad4b293b", + "metadata": {}, + "outputs": [], + "source": [ + "inputs = rk.tokenize(json_c[\"promp\"], 'it is cold today in new york', return_tensors='pt')" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "id": "8c40b74e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'qry': {'qid': '-6574462426148653399',\n", + " 'query': tensor([[ 27, 11230, 1847, 29, 26367, 1058, 5994, 334, 11, 197,\n", + " 62, 8625, 62, 16, 1058, 30671, 26367, 11, 197, 65,\n", + " 1058, 11876, 26367, 11, 197, 72, 1058, 2343, 226, 243,\n", + " 11, 197, 71, 1058, 1312, 1279, 275, 13, 7857, 197,\n", + " 158, 232, 95, 275, 13, 961, 2343, 253, 101, 72,\n", + " 11, 289, 158, 253, 102, 796, 275, 13, 961, 6,\n", + " 1312, 198, 1279, 4805, 6684, 37, 42135, 29, 220]])},\n", + " 'pos': [{'pid': '8592327939438459529',\n", + " 'passage': {'input_ids': tensor([[33964, 1312]]), 'attention_mask': tensor([[1, 1]])}},\n", + " {'pid': '-6997269615945621261',\n", + " 'passage': {'input_ids': tensor([[33964, 275]]), 'attention_mask': tensor([[1, 1]])}}],\n", + " 'neg': [{'pid': '-1059230821898650523',\n", + " 'passage': {'input_ids': tensor([[39014, 275, 13, 50188, 62, 7295, 66, 6]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '5979920950827081735',\n", + " 'passage': {'input_ids': tensor([[39014, 275, 13, 33295, 62, 3506, 62]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '5006218284396414035',\n", + " 'passage': {'input_ids': tensor([[39014, 275]]), 'attention_mask': tensor([[1, 1]])}},\n", + " {'pid': '8519025108267685379',\n", + " 'passage': {'input_ids': tensor([[39014, 957]]), 'attention_mask': tensor([[1, 1]])}},\n", + " {'pid': '-2029295515473291484',\n", + " 'passage': {'input_ids': tensor([[ 9310, 11011, 685, 961, 60, 379, 289]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '-7086168812316445213',\n", + " 'passage': {'input_ids': tensor([[39014, 62, 39098]]), 'attention_mask': tensor([[1, 1, 1]])}},\n", + " {'pid': '2194103192624150216',\n", + " 'passage': {'input_ids': tensor([[ 82, 11011, 0, 1635, 379, 1635]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '675688915442226575',\n", + " 'passage': {'input_ids': tensor([[ 9310, 11011, 691, 685, 961, 11, 288, 82, 11011, 60,\n", + " 379, 289]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '-532060175982832960',\n", + " 'passage': {'input_ids': tensor([[ 2617, 1312, 19039, 275, 13, 50188, 62, 7295, 1312, 351,\n", + " 289, 6]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '-425050867452080408',\n", + " 'passage': {'input_ids': tensor([[39014, 275, 13, 33295, 62, 9464, 62, 259, 73]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '-566168252729545530',\n", + " 'passage': {'input_ids': tensor([[31653, 685, 7857, 11, 289, 60]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '-7671123778674527926',\n", + " 'passage': {'input_ids': tensor([[158, 232, 95, 275, 13, 961]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '1079449569097250766',\n", + " 'passage': {'input_ids': tensor([[39014, 1066]]), 'attention_mask': tensor([[1, 1]])}},\n", + " {'pid': '-6162255274703520873',\n", + " 'passage': {'input_ids': tensor([[ 82, 11011, 685, 33295, 62, 3506, 60]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '1504670812205882816',\n", + " 'passage': {'input_ids': tensor([[ 67, 12514, 1100]]), 'attention_mask': tensor([[1, 1, 1]])}},\n", + " {'pid': '-7837678799220824417',\n", + " 'passage': {'input_ids': tensor([[ 82, 11011, 685, 7857, 11, 289, 60]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '-5594607461795628077',\n", + " 'passage': {'input_ids': tensor([[7266, 301, 1312]]), 'attention_mask': tensor([[1, 1, 1]])}},\n", + " {'pid': '846899338491892079',\n", + " 'passage': {'input_ids': tensor([[31653, 685, 18747, 13, 961, 62, 13564, 3256, 7177, 13,\n", + " 13564, 6, 62, 13564, 11, 11876, 13, 13564, 62, 11883,\n", + " 11, 7177, 13, 13564, 62, 11883, 11, 289, 60]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1]])}},\n", + " {'pid': '34868178422759404',\n", + " 'passage': {'input_ids': tensor([[31653, 685, 29705, 238, 33295, 62, 3506, 62, 259, 73,\n", + " 60]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '-4291723548435929039',\n", + " 'passage': {'input_ids': tensor([[ 82, 11011]]), 'attention_mask': tensor([[1, 1]])}},\n", + " {'pid': '5905105816470661935',\n", + " 'passage': {'input_ids': tensor([[ 9310, 11011, 685, 961, 60]]), 'attention_mask': tensor([[1, 1, 1, 1, 1]])}},\n", + " {'pid': '-240506235358478009',\n", + " 'passage': {'input_ids': tensor([[39014, 275, 13, 33295, 62, 3506, 6]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '-7014387072194972932',\n", + " 'passage': {'input_ids': tensor([[ 2617, 62, 1659, 62, 27363, 62, 27363, 62, 2385, 535,\n", + " 62, 1659, 62, 27363]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '-9163172994500598878',\n", + " 'passage': {'input_ids': tensor([[1102, 1851, 275, 13, 961, 6]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '8180726172081474272',\n", + " 'passage': {'input_ids': tensor([[3642, 6335, 2867]]), 'attention_mask': tensor([[1, 1, 1]])}},\n", + " {'pid': '-5168536336062798448',\n", + " 'passage': {'input_ids': tensor([[31653, 685, 29705, 238, 33295, 62, 3506, 62, 259, 4399,\n", + " 62, 733, 11, 17804, 238]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '3276683918839931364',\n", + " 'passage': {'input_ids': tensor([[39014, 275, 13, 961, 6, 62, 27363, 62, 1659, 62,\n", + " 11883]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '-3015987546996079227',\n", + " 'passage': {'input_ids': tensor([[ 62, 15699, 1058, 18872, 222, 44104, 64, 4808, 64, 62,\n", + " 16, 1058, 33295, 357, 4868, 26367, 8, 2343, 253, 101,\n", + " 72, 11]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '4991927682312661887',\n", + " 'passage': {'input_ids': tensor([[39014, 275, 13, 33295, 62, 3506]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '3280197074985918113',\n", + " 'passage': {'input_ids': tensor([[39014, 275, 13, 33295, 62, 9464, 62, 259, 752, 425]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '-553055782530395591',\n", + " 'passage': {'input_ids': tensor([[ 62, 8625, 1058, 30671, 26367, 19039]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '8547743317388738779',\n", + " 'passage': {'input_ids': tensor([[39014, 275, 13, 50188, 62, 7295, 62, 27363]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '-2125485750154732633',\n", + " 'passage': {'input_ids': tensor([[1102, 1851, 1100, 62, 259, 73, 275]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1]])}},\n", + " {'pid': '-7360264406821898777',\n", + " 'passage': {'input_ids': tensor([[ 158, 232, 95, 275, 13, 50188, 62]]), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1]])}}]}" + ] + }, + "execution_count": 38, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "json_c_train[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "de050b25", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1490d724", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e85189a4", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e6a76609", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 237, + "id": "7f88b760", + "metadata": {}, + "outputs": [], + "source": [ + "from sklearn.model_selection import train_test_split" + ] + }, + { + "cell_type": "code", + "execution_count": 238, + "id": "9198827f", + "metadata": {}, + "outputs": [], + "source": [ + "X_train,x_dev = train_test_split(json_c_train,test_size= 0.15)" + ] + }, + { + "cell_type": "code", + "execution_count": 239, + "id": "6f0d0adf", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "6058" + ] + }, + "execution_count": 239, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(X_train)" + ] + }, + { + "cell_type": "code", + "execution_count": 240, + "id": "e3e6353b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'qry': {'qid': '918643313112262782',\n", + " 'query': [27,\n", + " 11230,\n", + " 1847,\n", + " 29,\n", + " 299,\n", + " 1058,\n", + " 2343,\n", + " 226,\n", + " 243,\n", + " 11,\n", + " 197,\n", + " 37,\n", + " 1058,\n", + " 2099,\n", + " 35138,\n", + " 357,\n", + " 77,\n", + " 1343,\n", + " 352,\n", + " 8,\n", + " 15168,\n", + " 5994,\n", + " 334,\n", + " 11,\n", + " 197,\n", + " 62,\n", + " 8625,\n", + " 62,\n", + " 16,\n", + " 1058,\n", + " 285,\n", + " 85,\n", + " 12543,\n", + " 2715,\n", + " 376,\n", + " 11,\n", + " 197,\n", + " 80,\n", + " 1058,\n", + " 285,\n", + " 85,\n", + " 80,\n", + " 79,\n", + " 69,\n", + " 376,\n", + " 11,\n", + " 197,\n", + " 17394,\n", + " 1058,\n", + " 2099,\n", + " 35138,\n", + " 299,\n", + " 11,\n", + " 197,\n", + " 81,\n", + " 1058,\n", + " 285,\n", + " 85,\n", + " 80,\n", + " 79,\n", + " 69,\n", + " 13,\n", + " 1073,\n", + " 13049,\n", + " 376,\n", + " 26367,\n", + " 15168,\n", + " 285,\n", + " 85,\n", + " 80,\n", + " 79,\n", + " 69,\n", + " 13,\n", + " 1073,\n", + " 13049,\n", + " 376,\n", + " 26367,\n", + " 15168,\n", + " 8772,\n", + " 11,\n", + " 197,\n", + " 71,\n", + " 1058,\n", + " 18872,\n", + " 222,\n", + " 357,\n", + " 87,\n", + " 331,\n", + " 1058,\n", + " 285,\n", + " 85,\n", + " 80,\n", + " 79,\n", + " 69,\n", + " 13,\n", + " 1073,\n", + " 13049,\n", + " 376,\n", + " 26367,\n", + " 828,\n", + " 374,\n", + " 2124,\n", + " 331,\n", + " 15168,\n", + " 285,\n", + " 85,\n", + " 12543,\n", + " 2715,\n", + " 13,\n", + " 26282,\n", + " 81,\n", + " 357,\n", + " 17394,\n", + " 13,\n", + " 2411,\n", + " 62,\n", + " 12957,\n", + " 374,\n", + " 8,\n", + " 2124,\n", + " 13,\n", + " 16520,\n", + " 331,\n", + " 13,\n", + " 16520,\n", + " 11,\n", + " 197,\n", + " 87,\n", + " 331,\n", + " 1058,\n", + " 285,\n", + " 85,\n", + " 80,\n", + " 79,\n", + " 69,\n", + " 13,\n", + " 1073,\n", + " 13049,\n", + " 376,\n", + " 26367,\n", + " 11,\n", + " 197,\n", + " 81,\n", + " 5431,\n", + " 1058,\n", + " 374,\n", + " 2124,\n", + " 331,\n", + " 11,\n", + " 197,\n", + " 64,\n", + " 1058,\n", + " 357,\n", + " 76,\n", + " 85,\n", + " 80,\n", + " 79,\n", + " 69,\n", + " 13,\n", + " 47,\n", + " 376,\n", + " 737,\n", + " 32,\n", + " 11,\n", + " 197,\n", + " 69,\n", + " 158,\n", + " 224,\n", + " 222,\n", + " 277,\n", + " 158,\n", + " 224,\n", + " 223,\n", + " 1058,\n", + " 357,\n", + " 76,\n", + " 85,\n", + " 80,\n", + " 79,\n", + " 69,\n", + " 13,\n", + " 47,\n", + " 376,\n", + " 737,\n", + " 33,\n", + " 257,\n", + " 2343,\n", + " 253,\n", + " 117,\n", + " 7377,\n", + " 119,\n", + " 357,\n", + " 72,\n", + " 1058,\n", + " 957,\n", + " 17,\n", + " 357,\n", + " 77,\n", + " 1343,\n", + " 352,\n", + " 36911,\n", + " 357,\n", + " 17394,\n", + " 1058,\n", + " 3712,\n", + " 285,\n", + " 85,\n", + " 80,\n", + " 79,\n", + " 69,\n", + " 13,\n", + " 1073,\n", + " 13049,\n", + " 376,\n", + " 26367,\n", + " 8,\n", + " 1312,\n", + " 11,\n", + " 197,\n", + " 34350,\n", + " 27363,\n", + " 1058,\n", + " 2124,\n", + " 13,\n", + " 16520,\n", + " 796,\n", + " 285,\n", + " 85,\n", + " 80,\n", + " 79,\n", + " 69,\n", + " 13,\n", + " 8937,\n", + " 2343,\n", + " 253,\n", + " 101,\n", + " 64,\n", + " 11,\n", + " 277,\n", + " 158,\n", + " 224,\n", + " 222,\n", + " 158,\n", + " 253,\n", + " 102,\n", + " 11,\n", + " 197,\n", + " 67,\n", + " 5948,\n", + " 80,\n", + " 1058,\n", + " 331,\n", + " 13,\n", + " 16520,\n", + " 796,\n", + " 285,\n", + " 85,\n", + " 80,\n", + " 79,\n", + " 69,\n", + " 13,\n", + " 8937,\n", + " 2343,\n", + " 253,\n", + " 101,\n", + " 64,\n", + " 11,\n", + " 277,\n", + " 158,\n", + " 224,\n", + " 223,\n", + " 158,\n", + " 253,\n", + " 102,\n", + " 11,\n", + " 197,\n", + " 71,\n", + " 6,\n", + " 1058,\n", + " 18872,\n", + " 222,\n", + " 357,\n", + " 72,\n", + " 1058,\n", + " 957,\n", + " 17,\n", + " 357,\n", + " 77,\n", + " 1343,\n", + " 352,\n", + " 4008,\n", + " 357,\n", + " 73,\n", + " 1058,\n", + " 357,\n", + " 76,\n", + " 85,\n", + " 80,\n", + " 79,\n", + " 69,\n", + " 13,\n", + " 47,\n", + " 376,\n", + " 737,\n", + " 33,\n", + " 257,\n", + " 1312,\n", + " 828,\n", + " 26367,\n", + " 13,\n", + " 2411,\n", + " 62,\n", + " 12957,\n", + " 374,\n", + " 357,\n", + " 69,\n", + " 158,\n", + " 224,\n", + " 222,\n", + " 1312,\n", + " 474,\n", + " 8,\n", + " 357,\n", + " 69,\n", + " 158,\n", + " 224,\n", + " 223,\n", + " 1312,\n", + " 474,\n", + " 8,\n", + " 197,\n", + " 158,\n", + " 232,\n", + " 95,\n", + " 357,\n", + " 4906,\n", + " 35138,\n", + " 13,\n", + " 312,\n", + " 1058,\n", + " 3712,\n", + " 23611,\n", + " 13,\n", + " 28015,\n", + " 357,\n", + " 39377,\n", + " 357,\n", + " 87,\n", + " 331,\n", + " 1058,\n", + " 285,\n", + " 85,\n", + " 80,\n", + " 79,\n", + " 69,\n", + " 13,\n", + " 1073,\n", + " 13049,\n", + " 376,\n", + " 26367,\n", + " 828,\n", + " 374,\n", + " 2124,\n", + " 331,\n", + " 4008,\n", + " 1279,\n", + " 13702,\n", + " 29,\n", + " 2124,\n", + " 13,\n", + " 16520,\n", + " 796,\n", + " 357,\n", + " 4906,\n", + " 35138,\n", + " 13,\n", + " 312,\n", + " 1058,\n", + " 3712,\n", + " 23611,\n", + " 13,\n", + " 28015,\n", + " 357,\n", + " 39377,\n", + " 357,\n", + " 87,\n", + " 331,\n", + " 1058,\n", + " 285,\n", + " 85,\n", + " 80,\n", + " 79,\n", + " 69,\n", + " 13,\n", + " 1073,\n", + " 13049,\n", + " 376,\n", + " 26367,\n", + " 828,\n", + " 374,\n", + " 2124,\n", + " 331,\n", + " 4008,\n", + " 1279,\n", + " 13702,\n", + " 29,\n", + " 331,\n", + " 13,\n", + " 16520,\n", + " 198,\n", + " 1279,\n", + " 4805,\n", + " 6684,\n", + " 37,\n", + " 42135,\n", + " 29,\n", + " 220]},\n", + " 'pos': [{'pid': '6579643507496699425',\n", + " 'passage': [31653, 685, 34350, 27363, 11, 288]},\n", + " {'pid': '-2978845321307905187',\n", + " 'passage': [31653,\n", + " 685,\n", + " 34350,\n", + " 27363,\n", + " 11,\n", + " 30121,\n", + " 80,\n", + " 11,\n", + " 17804,\n", + " 238,\n", + " 8937,\n", + " 62,\n", + " 8899,\n", + " 11,\n", + " 17804,\n", + " 238,\n", + " 8937,\n", + " 62,\n", + " 8899,\n", + " 11,\n", + " 285,\n", + " 36133,\n", + " 12543,\n", + " 2715,\n", + " 13,\n", + " 8899,\n", + " 62,\n", + " 27363,\n", + " 11,\n", + " 285,\n", + " 36133,\n", + " 12543,\n", + " 2715,\n", + " 13,\n", + " 8899,\n", + " 62,\n", + " 27363,\n", + " 60]}],\n", + " 'neg': [{'pid': '6575927600422562800',\n", + " 'passage': [31653,\n", + " 685,\n", + " 29705,\n", + " 238,\n", + " 23611,\n", + " 13,\n", + " 23661,\n", + " 357,\n", + " 421,\n", + " 313,\n", + " 13,\n", + " 28015,\n", + " 62,\n", + " 27363,\n", + " 62,\n", + " 28015,\n", + " 62,\n", + " 1659,\n", + " 62,\n", + " 27363,\n", + " 4808,\n", + " 828,\n", + " 17804,\n", + " 238,\n", + " 23611,\n", + " 13,\n", + " 12543,\n", + " 62,\n", + " 15699,\n", + " 62,\n", + " 16]},\n", + " {'pid': '928431572601992639',\n", + " 'passage': [31653,\n", + " 685,\n", + " 29705,\n", + " 238,\n", + " 23611,\n", + " 13,\n", + " 23661,\n", + " 357,\n", + " 421,\n", + " 313,\n", + " 13,\n", + " 28015,\n", + " 62,\n", + " 27363,\n", + " 62,\n", + " 28015,\n", + " 62,\n", + " 1659,\n", + " 62,\n", + " 27363,\n", + " 4808,\n", + " 828,\n", + " 17804,\n", + " 238,\n", + " 23611,\n", + " 13,\n", + " 26282,\n", + " 62,\n", + " 261,\n", + " 6,\n", + " 62]},\n", + " {'pid': '7552767570238416096',\n", + " 'passage': [31653,\n", + " 685,\n", + " 29705,\n", + " 238,\n", + " 23611,\n", + " 13,\n", + " 23661,\n", + " 357,\n", + " 421,\n", + " 313,\n", + " 13,\n", + " 28015,\n", + " 62,\n", + " 27363,\n", + " 62,\n", + " 28015,\n", + " 4808,\n", + " 4808,\n", + " 828,\n", + " 17804,\n", + " 238,\n", + " 23611,\n", + " 13,\n", + " 28015,\n", + " 62,\n", + " 27363,\n", + " 62,\n", + " 28015,\n", + " 11,\n", + " 17804,\n", + " 238]},\n", + " {'pid': '-2234788984240462035', 'passage': [1069, 529]},\n", + " {'pid': '-4411941448410955259',\n", + " 'passage': [31653,\n", + " 685,\n", + " 29705,\n", + " 238,\n", + " 23611,\n", + " 13,\n", + " 23661,\n", + " 357,\n", + " 421,\n", + " 313,\n", + " 13,\n", + " 28015,\n", + " 62,\n", + " 27363,\n", + " 62,\n", + " 28015,\n", + " 62,\n", + " 1659,\n", + " 62,\n", + " 27363,\n", + " 4808,\n", + " 828,\n", + " 17804,\n", + " 238,\n", + " 23611,\n", + " 13,\n", + " 23661,\n", + " 357,\n", + " 421,\n", + " 313,\n", + " 13]},\n", + " {'pid': '3122899436888549272', 'passage': [742]},\n", + " {'pid': '-8839975914427080847', 'passage': [90]},\n", + " {'pid': '-2432619728502266055', 'passage': [67]},\n", + " {'pid': '-7361431018201337745', 'passage': [7266, 301]},\n", + " {'pid': '4194638930665964425', 'passage': [81]}]}" + ] + }, + "execution_count": 240, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X_train[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 273, + "id": "210138fd", + "metadata": {}, + "outputs": [], + "source": [ + "# with open('train.json', 'w') as fout:\n", + "# json.dump(X_train , fout)\n", + " \n", + "with open('data/train/train.json', 'w') as outfile:\n", + " for entry in X_train:\n", + " json.dump(entry, outfile)\n", + " outfile.write('\\n')" + ] + }, + { + "cell_type": "code", + "execution_count": 274, + "id": "22622cae", + "metadata": {}, + "outputs": [], + "source": [ + "# with open('dev.json', 'w') as fout:\n", + "# json.dump(x_dev , fout)\n", + " \n", + "with open('data/dev/dev.json', 'w') as outfile:\n", + " for entry in x_dev:\n", + " json.dump(entry, outfile)\n", + " outfile.write('\\n')" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "70f8d8b1", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "432K\tReranker\n", + "6.2M\tall_gpt-ada.json\n", + "11M\tall_gpt_combined.json\n", + "538M\tclass\n", + "26M\tdata\n", + "562M\tfiles_upload\n", + "7.9M\tgdrive\n", + "20K\tgptNEO_eval.ipynb\n", + "1012K\tjson_data_0.json\n", + "7.9M\tjson_data_1.json\n", + "4.3M\tjson_data__hard_ones.json\n", + "740K\tneo-Copy1.ipynb\n", + "908K\tneo.ipynb\n", + "0\tonstart.log\n", + "4.0K\tonstart.sh\n", + "116K\treranker.ipynb\n", + "11G\tresults\n", + "0\ttest.json\n", + "0\ttmp_trainer\n" + ] + } + ], + "source": [ + "!du -sh *" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c8e19348", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 90, + "id": "a3d0e5ff", + "metadata": {}, + "outputs": [], + "source": [ + "def get_best_ranking(each):\n", + " test = each[\"prompt\"].replace(\"GOAL\",\"\").replace(\"PROOFSTEP\",\"\")\n", + " \n", + " results = [i for i in each[\"all\"]]\n", + " l = []\n", + " for i in results+[each[\"real\"]]:\n", + " inputs = rk.tokenize(test, i, return_tensors='pt')\n", + " inputs.to(torch.device(\"cuda:0\"))\n", + " score = rk(inputs).logits\n", + " l.append((score.cpu().detach().numpy()[0][0],i))\n", + " l.sort(key=lambda x:x[0])\n", + " l.reverse()\n", + " l_e = [i[1] for i in l]\n", + " return l_e.index(each[\"real\"]),l_e.index(each[\"best_match\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 103, + "id": "8eb03e1e", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100% 1000/1000 [03:01<00:00, 5.52it/s]\n" + ] + } + ], + "source": [ + "resi_search = []\n", + " \n", + "for i in tqdm(json_c[:1000]):\n", + " resi_search.append(get_best_ranking(i))" + ] + }, + { + "cell_type": "code", + "execution_count": 104, + "id": "aeb1d717", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[(15, 23),\n", + " (17, 25),\n", + " (16, 15),\n", + " (15, 5),\n", + " (13, 20),\n", + " (9, 4),\n", + " (20, 20),\n", + " (2, 12),\n", + " (11, 16),\n", + " (0, 6),\n", + " (15, 15),\n", + " (2, 12),\n", + " (8, 8),\n", + " (2, 8),\n", + " (2, 3),\n", + " (8, 8),\n", + " (14, 14),\n", + " (0, 6),\n", + " (1, 1),\n", + " (11, 11),\n", + " (13, 13),\n", + " (4, 10),\n", + " (3, 3),\n", + " (16, 16),\n", + " (5, 5),\n", + " (10, 1),\n", + " (5, 5),\n", + " (5, 5),\n", + " (6, 19),\n", + " (6, 6),\n", + " (5, 5),\n", + " (8, 21),\n", + " (3, 10),\n", + " (4, 11),\n", + " (10, 16),\n", + " (6, 6),\n", + " (8, 12),\n", + " (14, 17),\n", + " (21, 9),\n", + " (2, 15),\n", + " (21, 12),\n", + " (5, 6),\n", + " (7, 6),\n", + " (17, 14),\n", + " (2, 15),\n", + " (21, 12),\n", + " (12, 4),\n", + " (3, 3),\n", + " (22, 2),\n", + " (7, 0),\n", + " (15, 15),\n", + " (15, 10),\n", + " (14, 10),\n", + " (11, 13),\n", + " (1, 11),\n", + " (20, 16),\n", + " (1, 12),\n", + " (11, 12),\n", + " (15, 16),\n", + " (7, 7),\n", + " (12, 12),\n", + " (5, 5),\n", + " (14, 15),\n", + " (8, 8),\n", + " (6, 10),\n", + " (15, 4),\n", + " (9, 14),\n", + " (15, 2),\n", + " (12, 17),\n", + " (14, 14),\n", + " (15, 15),\n", + " (11, 6),\n", + " (5, 6),\n", + " (14, 14),\n", + " (12, 13),\n", + " (9, 18),\n", + " (8, 8),\n", + " (0, 20),\n", + " (13, 21),\n", + " (1, 14),\n", + " (10, 11),\n", + " (9, 13),\n", + " (31, 29),\n", + " (16, 11),\n", + " (25, 24),\n", + " (4, 4),\n", + " (3, 20),\n", + " (5, 20),\n", + " (4, 17),\n", + " (11, 11),\n", + " (2, 10),\n", + " (4, 16),\n", + " (3, 7),\n", + " (15, 6),\n", + " (11, 11),\n", + " (8, 7),\n", + " (10, 9),\n", + " (2, 2),\n", + " (0, 12),\n", + " (10, 8),\n", + " (17, 16),\n", + " (14, 11),\n", + " (0, 7),\n", + " (5, 4),\n", + " (6, 4),\n", + " (13, 20),\n", + " (3, 20),\n", + " (12, 6),\n", + " (17, 15),\n", + " (10, 10),\n", + " (19, 10),\n", + " (0, 0),\n", + " (0, 0),\n", + " (11, 9),\n", + " (1, 1),\n", + " (14, 16),\n", + " (16, 16),\n", + " (0, 0),\n", + " (2, 2),\n", + " (0, 0),\n", + " (11, 13),\n", + " (0, 0),\n", + " (2, 10),\n", + " (14, 18),\n", + " (9, 18),\n", + " (5, 14),\n", + " (4, 2),\n", + " (0, 5),\n", + " (8, 24),\n", + " (14, 12),\n", + " (4, 13),\n", + " (4, 4),\n", + " (6, 8),\n", + " (4, 13),\n", + " (0, 12),\n", + " (16, 6),\n", + " (3, 3),\n", + " (4, 10),\n", + " (14, 14),\n", + " (3, 13),\n", + " (3, 1),\n", + " (11, 6),\n", + " (2, 2),\n", + " (11, 0),\n", + " (2, 11),\n", + " (2, 25),\n", + " (1, 2),\n", + " (12, 7),\n", + " (14, 20),\n", + " (22, 8),\n", + " (1, 3),\n", + " (15, 1),\n", + " (8, 15),\n", + " (1, 22),\n", + " (2, 7),\n", + " (5, 14),\n", + " (4, 3),\n", + " (12, 15),\n", + " (19, 18),\n", + " (12, 19),\n", + " (5, 1),\n", + " (8, 13),\n", + " (5, 9),\n", + " (15, 22),\n", + " (7, 16),\n", + " (10, 13),\n", + " (11, 15),\n", + " (0, 5),\n", + " (11, 14),\n", + " (18, 15),\n", + " (22, 12),\n", + " (13, 10),\n", + " (5, 6),\n", + " (6, 4),\n", + " (0, 11),\n", + " (11, 5),\n", + " (21, 9),\n", + " (14, 19),\n", + " (14, 15),\n", + " (12, 7),\n", + " (8, 8),\n", + " (15, 15),\n", + " (19, 6),\n", + " (12, 9),\n", + " (2, 2),\n", + " (0, 0),\n", + " (0, 0),\n", + " (22, 22),\n", + " (13, 13),\n", + " (5, 5),\n", + " (7, 7),\n", + " (15, 15),\n", + " (8, 8),\n", + " (14, 14),\n", + " (4, 3),\n", + " (4, 4),\n", + " (0, 8),\n", + " (11, 11),\n", + " (10, 12),\n", + " (11, 5),\n", + " (4, 4),\n", + " (0, 0),\n", + " (1, 12),\n", + " (9, 11),\n", + " (11, 11),\n", + " (22, 8),\n", + " (3, 5),\n", + " (4, 11),\n", + " (2, 13),\n", + " (5, 16),\n", + " (13, 5),\n", + " (13, 13),\n", + " (4, 3),\n", + " (24, 1),\n", + " (20, 13),\n", + " (20, 17),\n", + " (0, 11),\n", + " (0, 8),\n", + " (5, 4),\n", + " (3, 1),\n", + " (10, 5),\n", + " (7, 8),\n", + " (10, 9),\n", + " (8, 12),\n", + " (6, 5),\n", + " (1, 2),\n", + " (6, 10),\n", + " (9, 1),\n", + " (10, 0),\n", + " (1, 6),\n", + " (8, 4),\n", + " (8, 0),\n", + " (5, 0),\n", + " (6, 9),\n", + " (12, 9),\n", + " (8, 8),\n", + " (7, 7),\n", + " (7, 5),\n", + " (8, 7),\n", + " (9, 2),\n", + " (12, 13),\n", + " (9, 8),\n", + " (6, 11),\n", + " (8, 10),\n", + " (16, 13),\n", + " (3, 15),\n", + " (15, 20),\n", + " (12, 11),\n", + " (4, 1),\n", + " (11, 20),\n", + " (11, 17),\n", + " (13, 11),\n", + " (13, 15),\n", + " (11, 7),\n", + " (12, 11),\n", + " (19, 16),\n", + " (16, 7),\n", + " (17, 11),\n", + " (3, 3),\n", + " (9, 15),\n", + " (21, 11),\n", + " (7, 0),\n", + " (14, 14),\n", + " (4, 2),\n", + " (7, 1),\n", + " (8, 6),\n", + " (15, 19),\n", + " (18, 15),\n", + " (6, 17),\n", + " (14, 20),\n", + " (2, 8),\n", + " (12, 19),\n", + " (8, 8),\n", + " (19, 12),\n", + " (9, 10),\n", + " (8, 16),\n", + " (4, 10),\n", + " (11, 9),\n", + " (6, 9),\n", + " (13, 5),\n", + " (16, 1),\n", + " (19, 19),\n", + " (20, 21),\n", + " (5, 14),\n", + " (3, 11),\n", + " (3, 18),\n", + " (3, 19),\n", + " (0, 18),\n", + " (20, 20),\n", + " (1, 3),\n", + " (17, 9),\n", + " (22, 24),\n", + " (14, 13),\n", + " (18, 3),\n", + " (11, 15),\n", + " (18, 12),\n", + " (9, 5),\n", + " (7, 16),\n", + " (13, 6),\n", + " (5, 0),\n", + " (9, 16),\n", + " (11, 0),\n", + " (4, 4),\n", + " (12, 12),\n", + " (7, 7),\n", + " (0, 2),\n", + " (15, 3),\n", + " (7, 15),\n", + " (8, 11),\n", + " (17, 16),\n", + " (9, 25),\n", + " (14, 3),\n", + " (5, 17),\n", + " (4, 8),\n", + " (4, 5),\n", + " (6, 10),\n", + " (12, 13),\n", + " (5, 4),\n", + " (8, 6),\n", + " (5, 5),\n", + " (5, 8),\n", + " (15, 14),\n", + " (5, 9),\n", + " (2, 1),\n", + " (6, 2),\n", + " (2, 6),\n", + " (6, 8),\n", + " (5, 0),\n", + " (13, 14),\n", + " (0, 0),\n", + " (13, 1),\n", + " (9, 11),\n", + " (6, 6),\n", + " (3, 3),\n", + " (6, 6),\n", + " (8, 8),\n", + " (5, 5),\n", + " (8, 8),\n", + " (4, 4),\n", + " (5, 4),\n", + " (6, 2),\n", + " (3, 3),\n", + " (13, 20),\n", + " (1, 6),\n", + " (21, 21),\n", + " (9, 11),\n", + " (23, 20),\n", + " (6, 6),\n", + " (6, 19),\n", + " (5, 0),\n", + " (2, 15),\n", + " (10, 12),\n", + " (3, 11),\n", + " (12, 9),\n", + " (9, 5),\n", + " (14, 10),\n", + " (6, 18),\n", + " (4, 1),\n", + " (6, 2),\n", + " (17, 15),\n", + " (8, 15),\n", + " (3, 6),\n", + " (3, 4),\n", + " (8, 18),\n", + " (8, 7),\n", + " (13, 7),\n", + " (21, 24),\n", + " (15, 13),\n", + " (8, 11),\n", + " (15, 16),\n", + " (3, 11),\n", + " (20, 5),\n", + " (10, 13),\n", + " (12, 11),\n", + " (8, 8),\n", + " (6, 5),\n", + " (12, 7),\n", + " (13, 13),\n", + " (14, 11),\n", + " (19, 13),\n", + " (15, 15),\n", + " (19, 11),\n", + " (0, 3),\n", + " (4, 4),\n", + " (7, 12),\n", + " (8, 16),\n", + " (11, 11),\n", + " (9, 6),\n", + " (12, 12),\n", + " (12, 12),\n", + " (13, 13),\n", + " (7, 7),\n", + " (6, 6),\n", + " (10, 10),\n", + " (2, 2),\n", + " (6, 1),\n", + " (14, 0),\n", + " (1, 16),\n", + " (13, 11),\n", + " (12, 12),\n", + " (2, 12),\n", + " (20, 10),\n", + " (8, 10),\n", + " (0, 11),\n", + " (0, 0),\n", + " (3, 3),\n", + " (3, 3),\n", + " (11, 3),\n", + " (8, 8),\n", + " (11, 10),\n", + " (16, 10),\n", + " (7, 1),\n", + " (19, 19),\n", + " (12, 10),\n", + " (11, 12),\n", + " (10, 10),\n", + " (6, 6),\n", + " (7, 13),\n", + " (5, 9),\n", + " (13, 5),\n", + " (8, 16),\n", + " (7, 7),\n", + " (13, 1),\n", + " (2, 17),\n", + " (15, 2),\n", + " (14, 17),\n", + " (5, 13),\n", + " (2, 2),\n", + " (3, 15),\n", + " (10, 5),\n", + " (11, 11),\n", + " (2, 9),\n", + " (11, 4),\n", + " (16, 18),\n", + " (9, 12),\n", + " (4, 11),\n", + " (15, 12),\n", + " (0, 0),\n", + " (4, 3),\n", + " (5, 4),\n", + " (12, 10),\n", + " (19, 19),\n", + " (18, 18),\n", + " (0, 17),\n", + " (0, 0),\n", + " (0, 7),\n", + " (5, 6),\n", + " (3, 4),\n", + " (13, 8),\n", + " (15, 15),\n", + " (17, 17),\n", + " (0, 0),\n", + " (0, 0),\n", + " (3, 1),\n", + " (3, 7),\n", + " (9, 19),\n", + " (12, 9),\n", + " (18, 18),\n", + " (9, 9),\n", + " (18, 0),\n", + " (3, 19),\n", + " (12, 3),\n", + " (11, 11),\n", + " (3, 3),\n", + " (11, 11),\n", + " (2, 17),\n", + " (11, 11),\n", + " (9, 9),\n", + " (10, 10),\n", + " (16, 16),\n", + " (0, 0),\n", + " (13, 2),\n", + " (6, 6),\n", + " (2, 4),\n", + " (11, 12),\n", + " (20, 15),\n", + " (15, 0),\n", + " (0, 0),\n", + " (20, 18),\n", + " (15, 12),\n", + " (8, 7),\n", + " (13, 4),\n", + " (15, 10),\n", + " (3, 3),\n", + " (7, 15),\n", + " (17, 11),\n", + " (17, 11),\n", + " (22, 14),\n", + " (22, 23),\n", + " (4, 1),\n", + " (8, 18),\n", + " (2, 0),\n", + " (3, 1),\n", + " (10, 3),\n", + " (5, 4),\n", + " (0, 0),\n", + " (2, 3),\n", + " (0, 0),\n", + " (1, 1),\n", + " (9, 8),\n", + " (7, 10),\n", + " (20, 10),\n", + " (13, 11),\n", + " (12, 15),\n", + " (8, 12),\n", + " (19, 10),\n", + " (15, 19),\n", + " (27, 27),\n", + " (26, 26),\n", + " (19, 12),\n", + " (9, 9),\n", + " (10, 5),\n", + " (18, 1),\n", + " (8, 2),\n", + " (19, 18),\n", + " (4, 14),\n", + " (6, 7),\n", + " (15, 16),\n", + " (8, 6),\n", + " (14, 22),\n", + " (6, 13),\n", + " (19, 22),\n", + " (5, 16),\n", + " (10, 1),\n", + " (5, 11),\n", + " (5, 3),\n", + " (6, 16),\n", + " (22, 9),\n", + " (0, 7),\n", + " (15, 15),\n", + " (21, 6),\n", + " (15, 4),\n", + " (17, 22),\n", + " (12, 6),\n", + " (4, 11),\n", + " (7, 13),\n", + " (5, 10),\n", + " (22, 19),\n", + " (23, 16),\n", + " (6, 8),\n", + " (7, 0),\n", + " (6, 8),\n", + " (4, 4),\n", + " (17, 17),\n", + " (15, 8),\n", + " (0, 0),\n", + " (17, 13),\n", + " (0, 0),\n", + " (10, 14),\n", + " (7, 6),\n", + " (2, 11),\n", + " (9, 12),\n", + " (21, 8),\n", + " (15, 12),\n", + " (16, 14),\n", + " (16, 16),\n", + " (0, 0),\n", + " (17, 21),\n", + " (2, 2),\n", + " (7, 7),\n", + " (17, 13),\n", + " (10, 11),\n", + " (13, 13),\n", + " (10, 10),\n", + " (10, 10),\n", + " (22, 4),\n", + " (19, 20),\n", + " (16, 17),\n", + " (6, 2),\n", + " (5, 15),\n", + " (20, 21),\n", + " (16, 15),\n", + " (16, 1),\n", + " (16, 8),\n", + " (15, 4),\n", + " (4, 12),\n", + " (5, 9),\n", + " (7, 8),\n", + " (15, 9),\n", + " (16, 16),\n", + " (3, 1),\n", + " (3, 13),\n", + " (4, 4),\n", + " (15, 15),\n", + " (18, 5),\n", + " (11, 9),\n", + " (8, 12),\n", + " (16, 7),\n", + " (1, 6),\n", + " (13, 14),\n", + " (13, 13),\n", + " (3, 4),\n", + " (13, 13),\n", + " (11, 11),\n", + " (15, 10),\n", + " (8, 8),\n", + " (4, 5),\n", + " (7, 18),\n", + " (9, 1),\n", + " (18, 10),\n", + " (0, 0),\n", + " (14, 7),\n", + " (11, 12),\n", + " (13, 10),\n", + " (13, 17),\n", + " (13, 6),\n", + " (5, 12),\n", + " (3, 8),\n", + " (21, 9),\n", + " (6, 2),\n", + " (14, 15),\n", + " (14, 2),\n", + " (9, 11),\n", + " (14, 17),\n", + " (11, 13),\n", + " (3, 7),\n", + " (2, 0),\n", + " (4, 3),\n", + " (3, 0),\n", + " (5, 5),\n", + " (7, 7),\n", + " (5, 15),\n", + " (7, 7),\n", + " (4, 4),\n", + " (5, 5),\n", + " (6, 6),\n", + " (0, 0),\n", + " (2, 2),\n", + " (0, 8),\n", + " (3, 5),\n", + " (4, 5),\n", + " (4, 5),\n", + " (3, 7),\n", + " (3, 16),\n", + " (5, 13),\n", + " (3, 2),\n", + " (2, 8),\n", + " (5, 9),\n", + " (4, 6),\n", + " (5, 5),\n", + " (3, 2),\n", + " (0, 0),\n", + " (0, 0),\n", + " (13, 12),\n", + " (17, 2),\n", + " (19, 0),\n", + " (18, 2),\n", + " (1, 1),\n", + " (1, 1),\n", + " (0, 0),\n", + " (7, 6),\n", + " (5, 5),\n", + " (3, 18),\n", + " (8, 14),\n", + " (16, 13),\n", + " (12, 3),\n", + " (12, 5),\n", + " (11, 6),\n", + " (1, 16),\n", + " (14, 4),\n", + " (10, 10),\n", + " (7, 2),\n", + " (20, 13),\n", + " (2, 4),\n", + " (1, 4),\n", + " (5, 8),\n", + " (11, 12),\n", + " (7, 16),\n", + " (14, 13),\n", + " (10, 0),\n", + " (8, 10),\n", + " (9, 23),\n", + " (18, 11),\n", + " (4, 2),\n", + " (8, 4),\n", + " (7, 7),\n", + " (7, 7),\n", + " (5, 5),\n", + " (0, 0),\n", + " (5, 5),\n", + " (7, 7),\n", + " (7, 7),\n", + " (6, 6),\n", + " (4, 4),\n", + " (9, 9),\n", + " (5, 5),\n", + " (2, 2),\n", + " (7, 10),\n", + " (5, 4),\n", + " (4, 4),\n", + " (2, 2),\n", + " (3, 3),\n", + " (5, 5),\n", + " (2, 2),\n", + " (1, 1),\n", + " (3, 3),\n", + " (6, 6),\n", + " (2, 2),\n", + " (9, 9),\n", + " (9, 9),\n", + " (5, 5),\n", + " (6, 6),\n", + " (6, 6),\n", + " (2, 2),\n", + " (3, 3),\n", + " (4, 4),\n", + " (2, 2),\n", + " (3, 3),\n", + " (4, 4),\n", + " (5, 5),\n", + " (7, 13),\n", + " (17, 22),\n", + " (0, 0),\n", + " (10, 6),\n", + " (18, 18),\n", + " (12, 8),\n", + " (8, 9),\n", + " (21, 10),\n", + " (24, 2),\n", + " (3, 12),\n", + " (3, 10),\n", + " (5, 13),\n", + " (1, 14),\n", + " (12, 3),\n", + " (13, 16),\n", + " (6, 3),\n", + " (12, 11),\n", + " (5, 5),\n", + " (19, 23),\n", + " (0, 12),\n", + " (13, 21),\n", + " (2, 2),\n", + " (9, 12),\n", + " (6, 13),\n", + " (10, 19),\n", + " (3, 1),\n", + " (7, 22),\n", + " (4, 14),\n", + " (7, 19),\n", + " (12, 12),\n", + " (2, 0),\n", + " (9, 1),\n", + " (7, 7),\n", + " (14, 3),\n", + " (4, 17),\n", + " (6, 16),\n", + " (3, 8),\n", + " (16, 9),\n", + " (4, 4),\n", + " (6, 10),\n", + " (7, 7),\n", + " (1, 1),\n", + " (2, 4),\n", + " (5, 15),\n", + " (17, 14),\n", + " (11, 11),\n", + " (8, 0),\n", + " (5, 5),\n", + " (2, 2),\n", + " (4, 5),\n", + " (5, 4),\n", + " (12, 0),\n", + " (6, 7),\n", + " (4, 7),\n", + " (19, 18),\n", + " (1, 1),\n", + " (9, 0),\n", + " (4, 10),\n", + " (1, 3),\n", + " (7, 1),\n", + " (6, 12),\n", + " (5, 13),\n", + " (11, 17),\n", + " (2, 2),\n", + " (8, 13),\n", + " (10, 1),\n", + " (8, 10),\n", + " (26, 12),\n", + " (11, 20),\n", + " (16, 17),\n", + " (8, 16),\n", + " (12, 20),\n", + " (6, 15),\n", + " (1, 1),\n", + " (6, 17),\n", + " (15, 16),\n", + " (21, 22),\n", + " (2, 13),\n", + " (3, 17),\n", + " (10, 16),\n", + " (18, 15),\n", + " (21, 14),\n", + " (8, 14),\n", + " (15, 4),\n", + " (4, 11),\n", + " (17, 16),\n", + " (5, 5),\n", + " (5, 5),\n", + " (3, 3),\n", + " (3, 7),\n", + " (6, 4),\n", + " (7, 12),\n", + " (15, 18),\n", + " (2, 20),\n", + " (1, 11),\n", + " (14, 15),\n", + " (3, 15),\n", + " (17, 24),\n", + " (2, 0),\n", + " (1, 0),\n", + " (9, 9),\n", + " (16, 8),\n", + " (7, 0),\n", + " (13, 6),\n", + " (5, 1),\n", + " (14, 6),\n", + " (9, 17),\n", + " (10, 20),\n", + " (5, 4),\n", + " (19, 1),\n", + " (3, 6),\n", + " (4, 3),\n", + " (19, 12),\n", + " (5, 7),\n", + " (8, 11),\n", + " (8, 6),\n", + " (2, 1),\n", + " (3, 0),\n", + " (1, 7),\n", + " (1, 0),\n", + " (3, 4),\n", + " (7, 1),\n", + " (5, 5),\n", + " (10, 8),\n", + " (4, 8),\n", + " (2, 0),\n", + " (7, 11),\n", + " (11, 7),\n", + " (12, 17),\n", + " (7, 14),\n", + " (9, 17),\n", + " (4, 5),\n", + " (5, 16),\n", + " (9, 21),\n", + " (8, 10),\n", + " (4, 6),\n", + " (8, 9),\n", + " (4, 5),\n", + " (19, 18),\n", + " (5, 0),\n", + " (8, 3),\n", + " (5, 6),\n", + " (7, 1),\n", + " (3, 2),\n", + " (6, 7),\n", + " (5, 7),\n", + " (12, 9),\n", + " (11, 8),\n", + " (0, 3),\n", + " (10, 2),\n", + " (18, 18),\n", + " (3, 3),\n", + " (0, 4),\n", + " (20, 20),\n", + " (22, 1),\n", + " (1, 9),\n", + " (1, 2),\n", + " (1, 12),\n", + " (7, 21),\n", + " (6, 2),\n", + " (1, 1),\n", + " (3, 5),\n", + " (4, 10),\n", + " (2, 1),\n", + " (15, 9),\n", + " (11, 2),\n", + " (15, 13),\n", + " (6, 4),\n", + " (11, 13),\n", + " (5, 5),\n", + " (7, 4),\n", + " (9, 4),\n", + " (10, 5),\n", + " (6, 7),\n", + " (6, 6),\n", + " (5, 6),\n", + " (5, 11),\n", + " (0, 0),\n", + " (7, 1),\n", + " (8, 0),\n", + " (2, 6),\n", + " (7, 9),\n", + " (3, 5),\n", + " (3, 2),\n", + " (4, 5),\n", + " (3, 15),\n", + " (0, 5),\n", + " (14, 14),\n", + " (1, 10),\n", + " (25, 25),\n", + " (23, 23),\n", + " (20, 20),\n", + " (23, 23),\n", + " (15, 12),\n", + " (13, 9),\n", + " (16, 9),\n", + " (12, 18),\n", + " (9, 9),\n", + " (10, 10),\n", + " (18, 15),\n", + " (0, 3),\n", + " (2, 8),\n", + " (0, 0),\n", + " (21, 14),\n", + " (1, 4),\n", + " (17, 17),\n", + " (11, 4),\n", + " (6, 6),\n", + " (14, 14),\n", + " (17, 6),\n", + " (0, 0),\n", + " (4, 3),\n", + " (12, 8),\n", + " (11, 10),\n", + " (1, 1),\n", + " (3, 16),\n", + " (10, 10),\n", + " (14, 14),\n", + " (13, 10),\n", + " (11, 11),\n", + " (1, 16),\n", + " (10, 10),\n", + " (4, 4),\n", + " (14, 10),\n", + " (3, 4),\n", + " (5, 20),\n", + " (12, 12),\n", + " (8, 8),\n", + " (17, 0),\n", + " (12, 12),\n", + " (8, 8),\n", + " (24, 14),\n", + " (14, 16),\n", + " (11, 9),\n", + " (11, 7),\n", + " (0, 6),\n", + " (11, 7),\n", + " (15, 6),\n", + " (3, 3),\n", + " (5, 5),\n", + " (25, 25),\n", + " (24, 24),\n", + " (12, 24),\n", + " (9, 16),\n", + " (10, 7),\n", + " (1, 1),\n", + " (6, 6),\n", + " (6, 6),\n", + " (7, 3),\n", + " (11, 4),\n", + " (15, 20),\n", + " (14, 18),\n", + " (18, 19),\n", + " (18, 12),\n", + " (26, 20),\n", + " (14, 16),\n", + " (13, 14),\n", + " (11, 2),\n", + " (13, 8),\n", + " (1, 16),\n", + " (14, 14),\n", + " (6, 8),\n", + " (15, 17),\n", + " (19, 21),\n", + " (0, 6),\n", + " (6, 17),\n", + " (3, 11),\n", + " (21, 3),\n", + " (3, 11),\n", + " (10, 14),\n", + " (3, 4),\n", + " (8, 13),\n", + " (10, 4),\n", + " (5, 11),\n", + " (8, 8),\n", + " (21, 9),\n", + " (9, 4),\n", + " (7, 11),\n", + " (18, 6),\n", + " (10, 13),\n", + " (4, 7),\n", + " (0, 3),\n", + " (20, 21),\n", + " (19, 20),\n", + " (18, 19),\n", + " (2, 15),\n", + " (14, 17),\n", + " (11, 11),\n", + " (21, 10),\n", + " (21, 12)]" + ] + }, + "execution_count": 104, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "resi_search" + ] + }, + { + "cell_type": "code", + "execution_count": 106, + "id": "190124c6", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
01
count1000.0000001000.000000
mean8.8780009.300000
std6.1245926.173177
min0.0000000.000000
25%4.0000004.000000
50%8.0000009.000000
75%13.00000014.000000
max31.00000029.000000
\n", + "
" + ], + "text/plain": [ + " 0 1\n", + "count 1000.000000 1000.000000\n", + "mean 8.878000 9.300000\n", + "std 6.124592 6.173177\n", + "min 0.000000 0.000000\n", + "25% 4.000000 4.000000\n", + "50% 8.000000 9.000000\n", + "75% 13.000000 14.000000\n", + "max 31.000000 29.000000" + ] + }, + "execution_count": 106, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import pandas as pd\n", + "pd.DataFrame(resi_search).describe()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3d436c06", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 57, + "id": "02152bac", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'best_match': 'let k := (has_reflect l).arrow',\n", + " 'score': 0.3333333333333333,\n", + " 'real': 'haveI := has_reflect',\n", + " 'all': ['exact (has_lift_',\n", + " 'rw [mirror_eq_rel.2 _]',\n", + " \"rcases exists_mul_eq_mul_aux' (reflect tactic.rcases_patt).exists with ⟨b⟩|rfl\",\n", + " 'exact is_colimit.reflect_rel (TM1.reflect l)',\n", + " \"exact pnat.reflect.reflect' (λ (e : l = 0), by rw [this, (mt (reflect e) l, eq_comm)] )\",\n", + " 'induction n with n IH',\n", + " 'exact (has',\n", + " 'exact rfl',\n", + " 'rw ← reflection_patt',\n", + " 'apply_instance',\n", + " 'induction l with n l ih',\n", + " 'exact reflect_reflect tactic.rcases_patt',\n", + " 'induction n with n l IH',\n", + " 'exact refl_reflect_aux l l',\n", + " 'exact (reverse_rec _ _',\n", + " 'exact [rcases_patt, diff_empty.elim]',\n", + " 'exact (reflect_eq_self',\n", + " 'conv { to_lhs | (_root_.has_reflect tactic.rcases_patt) \").replace(\"PROOFSTEP\",\"\")" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "id": "ae99ed61", + "metadata": {}, + "outputs": [], + "source": [ + "results = [i for i in each[\"all\"]]" + ] + }, + { + "cell_type": "code", + "execution_count": 84, + "id": "b607bdf4", + "metadata": {}, + "outputs": [], + "source": [ + "l = []\n", + "for i in results+[each[\"real\"]]:\n", + " inputs = rk.tokenize(test, i, return_tensors='pt')\n", + " inputs.to(torch.device(\"cuda:0\"))\n", + " score = rk(inputs).logits\n", + " l.append((score.cpu().detach().numpy()[0][0],i))\n", + " l.sort(key=lambda x:x[0])\n", + " l.reverse()" + ] + }, + { + "cell_type": "code", + "execution_count": 89, + "id": "d5c2f434", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[(1.0750328, 'exact (has_lift_t'),\n", + " (0.51294786, 'exact (has_lift_'),\n", + " (-0.49501634, 'rw ← reflection_patt'),\n", + " (-0.5962713, 'induction l with n l IH'),\n", + " (-0.88799334, 'let k := (has_reflect l).arrow'),\n", + " (-0.8978929, 'exact reflect_reflect tactic.rcases_patt'),\n", + " (-0.98143125, 'induction n with n IH'),\n", + " (-1.0099363, 'apply_instance'),\n", + " (-1.0381949, 'rcases l with (_ | ⟨x, hx⟩)'),\n", + " (-1.0494938, 'induction n with n l IH'),\n", + " (-1.0732956, 'rw ←_root_.reflect_comp'),\n", + " (-1.0913246, 'substs l l'),\n", + " (-1.0934861, 'rw [←reflect_out, ←reflect]'),\n", + " (-1.1548848, 'exact (reflect_eq_self'),\n", + " (-1.1796035, 'induction l with n l ih'),\n", + " (-1.1827998, 'haveI := has_reflect'),\n", + " (-1.2198576, 'exact (has'),\n", + " (-1.2420163, 'induction ih'),\n", + " (-1.2687602, 'exact reflection_patt_reflect tactic.rcases_patt'),\n", + " (-1.3217957, 'rw [mirror_eq_rel.2 _]'),\n", + " (-1.4752369, 'exact refl_reflect_aux l l'),\n", + " (-1.5429989, 'exact (l.'),\n", + " (-1.6189396,\n", + " \"rcases exists_mul_eq_mul_aux' (reflect tactic.rcases_patt).exists with ⟨b⟩|rfl\"),\n", + " (-1.9771941, 'exact [rcases_patt, diff_empty.elim]'),\n", + " (-2.3252175, 'exact is_colimit.reflect_rel (TM1.reflect l)'),\n", + " (-2.509611,\n", + " \"exact pnat.reflect.reflect' (λ (e : l = 0), by rw [this, (mt (reflect e) l, eq_comm)] )\"),\n", + " (-2.5570722, 'induction n with n IH generalizing l'),\n", + " (-2.742371, 'coe : listΣ (append_nil : ℕ →'),\n", + " (-2.7648082, 'exact (reverse_rec _ _'),\n", + " (-3.0234487, 'induction n with n l IH generalizing l'),\n", + " (-3.05518,\n", + " 'conv { to_lhs | (_root_.has_reflect tactic.rcases_patt) cases \"" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "id": "7a05ff29", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "To disable this warning, you can either:\n", + "\t- Avoid using `tokenizers` before the fork if possible\n", + "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n", + "data_test.jsonl data_train_prepared.jsonl \u001b[0m\u001b[01;34mwandb\u001b[0m/\r\n", + "data_test_prepared.jsonl data_valid.jsonl\r\n", + "data_train.jsonl data_valid_prepared.jsonl\r\n" + ] + } + ], + "source": [ + "ls files_upload" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "4909b495", + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "\n", + "with open('files_upload/data_train_prepared.jsonl', 'r') as json_file:\n", + " json_c_all = [json.loads(i) for i in list(json_file)]" + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "id": "64230281", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "168590" + ] + }, + "execution_count": 65, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(json_c_all)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "969ed9ea", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f0ac6ff6", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5de484a2", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "e304e0a2", + "metadata": {}, + "outputs": [], + "source": [ + "from transformers import AutoTokenizer" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "f6198d05", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n", + "Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n" + ] + } + ], + "source": [ + "\n", + "hf_tokenizer = AutoTokenizer.from_pretrained(\"EleutherAI/gpt-neo-125M\", \n", + " bos_token=\"<|startoftext|>\",\n", + " eos_token=\"<|endoftext|>\",\n", + " pad_token=\"<|pad|>\")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "b9f0b9ee", + "metadata": {}, + "outputs": [], + "source": [ + "from transformers import GPTNeoModel" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "064cf265", + "metadata": {}, + "outputs": [], + "source": [ + "data_args = {\n", + " \"train_dir\":\"data/train\",\n", + " \"train_path\":\"data/train/train.json\",\n", + " \"pred_dir\":\"data/dev\",\n", + " \"pred_path\":\"data/train/dev.json\",\n", + " \"train_group_size\":8\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "dd04a9f5", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using custom data configuration default-21c912046bcb6d4c\n", + "Reusing dataset json (/root/.cache/huggingface/datasets/json/default-21c912046bcb6d4c/0.0.0/ac0ca5f5289a6cf108e706efcf040422dbbfa8e658dee6a819f20d76bb84d26b)\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "4ebdb9d53f1441018c6bdf00699582b3", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/1 [00:00\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mtrain_dataset\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m]\u001b[49m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/reranker/data.py:103\u001b[0m, in \u001b[0;36mGroupedTrainDataset.__getitem__\u001b[0;34m(self, item)\u001b[0m\n\u001b[1;32m 100\u001b[0m examples \u001b[38;5;241m=\u001b[39m examples[\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mchunk_start: \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mchunk_end]\n\u001b[1;32m 102\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m e \u001b[38;5;129;01min\u001b[39;00m examples:\n\u001b[0;32m--> 103\u001b[0m group_batch\u001b[38;5;241m.\u001b[39mappend(\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcreate_one_example\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43me\u001b[49m\u001b[43m)\u001b[49m)\n\u001b[1;32m 104\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m group_batch\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/reranker/data.py:68\u001b[0m, in \u001b[0;36mGroupedTrainDataset.create_one_example\u001b[0;34m(self, qry_encoding, doc_encoding)\u001b[0m\n\u001b[1;32m 67\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcreate_one_example\u001b[39m(\u001b[38;5;28mself\u001b[39m, qry_encoding: List[\u001b[38;5;28mint\u001b[39m], doc_encoding: List[\u001b[38;5;28mint\u001b[39m]):\n\u001b[0;32m---> 68\u001b[0m item \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtok\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mencode_plus\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 69\u001b[0m \u001b[43m \u001b[49m\u001b[43mqry_encoding\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 70\u001b[0m \u001b[43m \u001b[49m\u001b[43mdoc_encoding\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 71\u001b[0m \u001b[43m \u001b[49m\u001b[43mtruncation\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43monly_second\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 72\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_length\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m128\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 73\u001b[0m \u001b[43m \u001b[49m\u001b[43mpadding\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 74\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 75\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m item\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/transformers/tokenization_utils_base.py:2548\u001b[0m, in \u001b[0;36mPreTrainedTokenizerBase.encode_plus\u001b[0;34m(self, text, text_pair, add_special_tokens, padding, truncation, max_length, stride, is_split_into_words, pad_to_multiple_of, return_tensors, return_token_type_ids, return_attention_mask, return_overflowing_tokens, return_special_tokens_mask, return_offsets_mapping, return_length, verbose, **kwargs)\u001b[0m\n\u001b[1;32m 2538\u001b[0m \u001b[38;5;66;03m# Backward compatibility for 'truncation_strategy', 'pad_to_max_length'\u001b[39;00m\n\u001b[1;32m 2539\u001b[0m padding_strategy, truncation_strategy, max_length, kwargs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_get_padding_truncation_strategies(\n\u001b[1;32m 2540\u001b[0m padding\u001b[38;5;241m=\u001b[39mpadding,\n\u001b[1;32m 2541\u001b[0m truncation\u001b[38;5;241m=\u001b[39mtruncation,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 2545\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs,\n\u001b[1;32m 2546\u001b[0m )\n\u001b[0;32m-> 2548\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_encode_plus\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 2549\u001b[0m \u001b[43m \u001b[49m\u001b[43mtext\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtext\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2550\u001b[0m \u001b[43m \u001b[49m\u001b[43mtext_pair\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtext_pair\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2551\u001b[0m \u001b[43m \u001b[49m\u001b[43madd_special_tokens\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43madd_special_tokens\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2552\u001b[0m \u001b[43m \u001b[49m\u001b[43mpadding_strategy\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpadding_strategy\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2553\u001b[0m \u001b[43m \u001b[49m\u001b[43mtruncation_strategy\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtruncation_strategy\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2554\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_length\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmax_length\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2555\u001b[0m \u001b[43m \u001b[49m\u001b[43mstride\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstride\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2556\u001b[0m \u001b[43m \u001b[49m\u001b[43mis_split_into_words\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mis_split_into_words\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2557\u001b[0m \u001b[43m \u001b[49m\u001b[43mpad_to_multiple_of\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpad_to_multiple_of\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2558\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_tensors\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_tensors\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2559\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_token_type_ids\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_token_type_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2560\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_attention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_attention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2561\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_overflowing_tokens\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_overflowing_tokens\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2562\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_special_tokens_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_special_tokens_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2563\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_offsets_mapping\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_offsets_mapping\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2564\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_length\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_length\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2565\u001b[0m \u001b[43m \u001b[49m\u001b[43mverbose\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mverbose\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2566\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 2567\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/transformers/models/gpt2/tokenization_gpt2_fast.py:174\u001b[0m, in \u001b[0;36mGPT2TokenizerFast._encode_plus\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 167\u001b[0m is_split_into_words \u001b[38;5;241m=\u001b[39m kwargs\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mis_split_into_words\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28;01mFalse\u001b[39;00m)\n\u001b[1;32m 169\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39madd_prefix_space \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m is_split_into_words, (\n\u001b[1;32m 170\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mYou need to instantiate \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__class__\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__name__\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m with add_prefix_space=True \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 171\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mto use it with pretokenized inputs.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 172\u001b[0m )\n\u001b[0;32m--> 174\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_encode_plus\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/transformers/tokenization_utils_fast.py:498\u001b[0m, in \u001b[0;36mPreTrainedTokenizerFast._encode_plus\u001b[0;34m(self, text, text_pair, add_special_tokens, padding_strategy, truncation_strategy, max_length, stride, is_split_into_words, pad_to_multiple_of, return_tensors, return_token_type_ids, return_attention_mask, return_overflowing_tokens, return_special_tokens_mask, return_offsets_mapping, return_length, verbose, **kwargs)\u001b[0m\n\u001b[1;32m 475\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_encode_plus\u001b[39m(\n\u001b[1;32m 476\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 477\u001b[0m text: Union[TextInput, PreTokenizedInput],\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 494\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs\n\u001b[1;32m 495\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m BatchEncoding:\n\u001b[1;32m 497\u001b[0m batched_input \u001b[38;5;241m=\u001b[39m [(text, text_pair)] \u001b[38;5;28;01mif\u001b[39;00m text_pair \u001b[38;5;28;01melse\u001b[39;00m [text]\n\u001b[0;32m--> 498\u001b[0m batched_output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_batch_encode_plus\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 499\u001b[0m \u001b[43m \u001b[49m\u001b[43mbatched_input\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 500\u001b[0m \u001b[43m \u001b[49m\u001b[43mis_split_into_words\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mis_split_into_words\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 501\u001b[0m \u001b[43m \u001b[49m\u001b[43madd_special_tokens\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43madd_special_tokens\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 502\u001b[0m \u001b[43m \u001b[49m\u001b[43mpadding_strategy\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpadding_strategy\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 503\u001b[0m \u001b[43m \u001b[49m\u001b[43mtruncation_strategy\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtruncation_strategy\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 504\u001b[0m \u001b[43m \u001b[49m\u001b[43mmax_length\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmax_length\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 505\u001b[0m \u001b[43m \u001b[49m\u001b[43mstride\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstride\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 506\u001b[0m \u001b[43m \u001b[49m\u001b[43mpad_to_multiple_of\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpad_to_multiple_of\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 507\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_tensors\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_tensors\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 508\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_token_type_ids\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_token_type_ids\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 509\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_attention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_attention_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 510\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_overflowing_tokens\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_overflowing_tokens\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 511\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_special_tokens_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_special_tokens_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 512\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_offsets_mapping\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_offsets_mapping\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 513\u001b[0m \u001b[43m \u001b[49m\u001b[43mreturn_length\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_length\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 514\u001b[0m \u001b[43m \u001b[49m\u001b[43mverbose\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mverbose\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 515\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 516\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 518\u001b[0m \u001b[38;5;66;03m# Return tensor is None, then we can remove the leading batch axis\u001b[39;00m\n\u001b[1;32m 519\u001b[0m \u001b[38;5;66;03m# Overflowing tokens are returned as a batch of output so we keep them in this case\u001b[39;00m\n\u001b[1;32m 520\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m return_tensors \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m return_overflowing_tokens:\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/transformers/models/gpt2/tokenization_gpt2_fast.py:164\u001b[0m, in \u001b[0;36mGPT2TokenizerFast._batch_encode_plus\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 158\u001b[0m is_split_into_words \u001b[38;5;241m=\u001b[39m kwargs\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mis_split_into_words\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28;01mFalse\u001b[39;00m)\n\u001b[1;32m 159\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39madd_prefix_space \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m is_split_into_words, (\n\u001b[1;32m 160\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mYou need to instantiate \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__class__\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__name__\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m with add_prefix_space=True \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 161\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mto use it with pretokenized inputs.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 162\u001b[0m )\n\u001b[0;32m--> 164\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_batch_encode_plus\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/transformers/tokenization_utils_fast.py:425\u001b[0m, in \u001b[0;36mPreTrainedTokenizerFast._batch_encode_plus\u001b[0;34m(self, batch_text_or_text_pairs, add_special_tokens, padding_strategy, truncation_strategy, max_length, stride, is_split_into_words, pad_to_multiple_of, return_tensors, return_token_type_ids, return_attention_mask, return_overflowing_tokens, return_special_tokens_mask, return_offsets_mapping, return_length, verbose)\u001b[0m\n\u001b[1;32m 416\u001b[0m \u001b[38;5;66;03m# Set the truncation and padding strategy and restore the initial configuration\u001b[39;00m\n\u001b[1;32m 417\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mset_truncation_and_padding(\n\u001b[1;32m 418\u001b[0m padding_strategy\u001b[38;5;241m=\u001b[39mpadding_strategy,\n\u001b[1;32m 419\u001b[0m truncation_strategy\u001b[38;5;241m=\u001b[39mtruncation_strategy,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 422\u001b[0m pad_to_multiple_of\u001b[38;5;241m=\u001b[39mpad_to_multiple_of,\n\u001b[1;32m 423\u001b[0m )\n\u001b[0;32m--> 425\u001b[0m encodings \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_tokenizer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mencode_batch\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 426\u001b[0m \u001b[43m \u001b[49m\u001b[43mbatch_text_or_text_pairs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 427\u001b[0m \u001b[43m \u001b[49m\u001b[43madd_special_tokens\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43madd_special_tokens\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 428\u001b[0m \u001b[43m \u001b[49m\u001b[43mis_pretokenized\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mis_split_into_words\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 429\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 431\u001b[0m \u001b[38;5;66;03m# Convert encoding to dict\u001b[39;00m\n\u001b[1;32m 432\u001b[0m \u001b[38;5;66;03m# `Tokens` has type: Tuple[\u001b[39;00m\n\u001b[1;32m 433\u001b[0m \u001b[38;5;66;03m# List[Dict[str, List[List[int]]]] or List[Dict[str, 2D-Tensor]],\u001b[39;00m\n\u001b[1;32m 434\u001b[0m \u001b[38;5;66;03m# List[EncodingFast]\u001b[39;00m\n\u001b[1;32m 435\u001b[0m \u001b[38;5;66;03m# ]\u001b[39;00m\n\u001b[1;32m 436\u001b[0m \u001b[38;5;66;03m# with nested dimensions corresponding to batch, overflows, sequence length\u001b[39;00m\n\u001b[1;32m 437\u001b[0m tokens_and_encodings \u001b[38;5;241m=\u001b[39m [\n\u001b[1;32m 438\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_convert_encoding(\n\u001b[1;32m 439\u001b[0m encoding\u001b[38;5;241m=\u001b[39mencoding,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 448\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m encoding \u001b[38;5;129;01min\u001b[39;00m encodings\n\u001b[1;32m 449\u001b[0m ]\n", + "\u001b[0;31mTypeError\u001b[0m: TextInputSequence must be str" + ] + } + ], + "source": [ + "train_dataset[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "8008a3b2", + "metadata": {}, + "outputs": [], + "source": [ + "from reranker import RerankerTrainer" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "04a5caf7", + "metadata": {}, + "outputs": [], + "source": [ + "from reranker.data import GroupedTrainDataset, PredictionDataset, GroupCollator" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "adf3c64d", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "No `TrainingArguments` passed, using `output_dir=tmp_trainer`.\n", + "PyTorch: setting up devices\n", + "The default value for the training argument `--report_to` will change in v5 (from all installed integrations to none). In v5, you will need to use `--report_to all` to get the same behavior as now. You should start updating your code and make this info disappear :-).\n" + ] + } + ], + "source": [ + "trainer = RerankerTrainer(\n", + " model=model,\n", + " train_dataset=train_dataset,\n", + " data_collator=GroupCollator(hf_tokenizer)\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "483f19cc", + "metadata": {}, + "outputs": [], + "source": [ + "# training_args = TrainingArguments(\"test-trainer\",fp16=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "7b95edc5", + "metadata": {}, + "outputs": [], + "source": [ + "# training_args" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "175e9777", + "metadata": {}, + "outputs": [], + "source": [ + "# from transformers import TrainingArguments" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "e617b023", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "***** Running training *****\n", + " Num examples = 6058\n", + " Num Epochs = 3\n", + " Instantaneous batch size per device = 8\n", + " Total train batch size (w. parallel, distributed & accumulation) = 8\n", + " Gradient Accumulation steps = 1\n", + " Total optimization steps = 2271\n" + ] + }, + { + "ename": "AttributeError", + "evalue": "'dict' object has no attribute 'max_len'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", + "Input \u001b[0;32mIn [27]\u001b[0m, in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mtrainer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtrain\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/transformers/trainer.py:1396\u001b[0m, in \u001b[0;36mTrainer.train\u001b[0;34m(self, resume_from_checkpoint, trial, ignore_keys_for_eval, **kwargs)\u001b[0m\n\u001b[1;32m 1393\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcontrol \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcallback_handler\u001b[38;5;241m.\u001b[39mon_epoch_begin(args, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstate, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcontrol)\n\u001b[1;32m 1395\u001b[0m step \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m\n\u001b[0;32m-> 1396\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m step, inputs \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28menumerate\u001b[39m(epoch_iterator):\n\u001b[1;32m 1397\u001b[0m \n\u001b[1;32m 1398\u001b[0m \u001b[38;5;66;03m# Skip past any already trained steps if resuming training\u001b[39;00m\n\u001b[1;32m 1399\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m steps_trained_in_current_epoch \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m0\u001b[39m:\n\u001b[1;32m 1400\u001b[0m steps_trained_in_current_epoch \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/utils/data/dataloader.py:530\u001b[0m, in \u001b[0;36m_BaseDataLoaderIter.__next__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 528\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sampler_iter \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 529\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reset()\n\u001b[0;32m--> 530\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_next_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 531\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m 532\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_dataset_kind \u001b[38;5;241m==\u001b[39m _DatasetKind\u001b[38;5;241m.\u001b[39mIterable \u001b[38;5;129;01mand\u001b[39;00m \\\n\u001b[1;32m 533\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m \\\n\u001b[1;32m 534\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m>\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called:\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/utils/data/dataloader.py:570\u001b[0m, in \u001b[0;36m_SingleProcessDataLoaderIter._next_data\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 568\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_next_data\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[1;32m 569\u001b[0m index \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_next_index() \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[0;32m--> 570\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_dataset_fetcher\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfetch\u001b[49m\u001b[43m(\u001b[49m\u001b[43mindex\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[1;32m 571\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pin_memory:\n\u001b[1;32m 572\u001b[0m data \u001b[38;5;241m=\u001b[39m _utils\u001b[38;5;241m.\u001b[39mpin_memory\u001b[38;5;241m.\u001b[39mpin_memory(data)\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/utils/data/_utils/fetch.py:49\u001b[0m, in \u001b[0;36m_MapDatasetFetcher.fetch\u001b[0;34m(self, possibly_batched_index)\u001b[0m\n\u001b[1;32m 47\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mfetch\u001b[39m(\u001b[38;5;28mself\u001b[39m, possibly_batched_index):\n\u001b[1;32m 48\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mauto_collation:\n\u001b[0;32m---> 49\u001b[0m data \u001b[38;5;241m=\u001b[39m [\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset[idx] \u001b[38;5;28;01mfor\u001b[39;00m idx \u001b[38;5;129;01min\u001b[39;00m possibly_batched_index]\n\u001b[1;32m 50\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 51\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset[possibly_batched_index]\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/torch/utils/data/_utils/fetch.py:49\u001b[0m, in \u001b[0;36m\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 47\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mfetch\u001b[39m(\u001b[38;5;28mself\u001b[39m, possibly_batched_index):\n\u001b[1;32m 48\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mauto_collation:\n\u001b[0;32m---> 49\u001b[0m data \u001b[38;5;241m=\u001b[39m [\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdataset\u001b[49m\u001b[43m[\u001b[49m\u001b[43midx\u001b[49m\u001b[43m]\u001b[49m \u001b[38;5;28;01mfor\u001b[39;00m idx \u001b[38;5;129;01min\u001b[39;00m possibly_batched_index]\n\u001b[1;32m 50\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 51\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset[possibly_batched_index]\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/reranker/data.py:103\u001b[0m, in \u001b[0;36m__getitem__\u001b[0;34m(self, item)\u001b[0m\n\u001b[1;32m 101\u001b[0m padding=False,\n\u001b[1;32m 102\u001b[0m ) @dataclass class GroupCollator(DataCollatorWithPadding):\n\u001b[0;32m--> 103\u001b[0m \"\"\"\n\u001b[1;32m 104\u001b[0m Wrapper that does conversion from List[Tuple[encode_qry, encode_psg]] to List[qry], List[psg]\n\u001b[1;32m 105\u001b[0m and pass batch separately to the actual collator.\n", + "File \u001b[0;32m/opt/conda/lib/python3.8/site-packages/reranker/data.py:72\u001b[0m, in \u001b[0;36mcreate_one_example\u001b[0;34m(self, qry_encoding, doc_encoding)\u001b[0m\n\u001b[1;32m 70\u001b[0m negs = random.choices(group['neg'], k=8 - 1)\n\u001b[1;32m 71\u001b[0m else:\n\u001b[0;32m---> 72\u001b[0m negs = random.sample(group['neg'], k=8- 1)\n\u001b[1;32m 73\u001b[0m for neg_entry in negs:\n\u001b[1;32m 74\u001b[0m _, neg_psg = [neg_entry[k] for k in self.document_columns]\n", + "\u001b[0;31mAttributeError\u001b[0m: 'dict' object has no attribute 'max_len'" + ] + } + ], + "source": [ + "trainer.train()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d4cb3c52", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6d542f90", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "eee24662", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6e9d4e42", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 51, + "id": "3b1ca9b7", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['last_hidden_state', 'past_key_values']" + ] + }, + "execution_count": 51, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "[i for i in hf_model(**inputs).last_hidden_state]" + ] + }, + { + "cell_type": "code", + "execution_count": 79, + "id": "3f165b5a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "GPTNeoModel(\n", + " (wte): Embedding(50259, 768)\n", + " (wpe): Embedding(2048, 768)\n", + " (drop): Dropout(p=0, inplace=False)\n", + " (h): ModuleList(\n", + " (0): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (1): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (2): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (3): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (4): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (5): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (6): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (7): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (8): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (9): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (10): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (11): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " )\n", + " (ln_f): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + ")" + ] + }, + "execution_count": 79, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "hf_model.cuda()" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "id": "28337770", + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'json_c_all' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "Input \u001b[0;32mIn [30]\u001b[0m, in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mjson_c_all\u001b[49m[\u001b[38;5;241m0\u001b[39m]\n", + "\u001b[0;31mNameError\u001b[0m: name 'json_c_all' is not defined" + ] + } + ], + "source": [ + "json_c_all[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 70, + "id": "b780dc8c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'input_ids': tensor([[11230, 1847, 371, 1058, 5994, 334, 11, 197, 43, 1058,\n", + " 5994, 410, 11, 197, 44, 1058, 5994, 266, 11, 197,\n", + " 62, 8625, 62, 16, 1058, 725, 62, 1806, 371, 11,\n", + " 197, 62, 8625, 62, 17, 1058, 6486, 62, 1806, 406,\n", + " 11, 197, 62, 8625, 62, 18, 1058, 6486, 62, 282,\n", + " 29230, 371, 406, 11, 197, 62, 8625, 62, 19, 1058,\n", + " 751, 62, 9503, 62, 8094, 337, 11, 197, 62, 8625,\n", + " 62, 20, 1058, 8265, 371, 337, 11, 197, 62, 8625,\n", + " 62, 21, 1058, 6486, 62, 1806, 62, 21412, 406, 337,\n", + " 11, 197, 62, 8625, 62, 22, 1058, 6486, 62, 21412,\n", + " 371, 406, 337, 11, 197, 45, 399, 6, 1058, 6486,\n", + " 62, 7266, 21412, 371, 406, 337, 11, 197, 71, 1058,\n", + " 24935, 45, 796, 24935, 45, 3256, 197, 76, 1058, 337,\n", + " 197, 158, 232, 95, 285, 18872, 230, 399, 17804, 242,\n", + " 285, 18872, 230, 399, 6, 198, 1279, 4805, 6684, 37,\n", + " 42135, 29, 220, 31653, 685, 29705, 238, 1066, 62, 49270,\n", + " 62, 7266, 21412, 11, 289, 60, 198]], device='cuda:0'), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]],\n", + " device='cuda:0')}" + ] + }, + "execution_count": 70, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "inputs.to(torch.device(\"cuda:0\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "id": "9ceb2b6b", + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 80, + "id": "32ef40f1", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 34.9 ms, sys: 20.5 ms, total: 55.3 ms\n", + "Wall time: 51.9 ms\n" + ] + }, + { + "data": { + "text/plain": [ + "tensor([[ 2.5791e-01, 2.2195e-01, 6.1754e-01, -4.1989e-01, -1.1693e+00,\n", + " -4.2416e-01, -1.2921e-02, -6.3473e-02, 1.8195e-01, -2.2530e-02,\n", + " 6.7583e-01, -5.1123e-01, 8.9316e-02, 7.8981e-01, -2.5015e-01,\n", + " 1.9408e-01, 6.3822e-01, 9.7251e-01, -7.8701e-02, -6.0445e-01,\n", + " -9.8325e-01, 2.0833e-01, -1.9024e+00, 9.8087e-01, 2.8801e-02,\n", + " -5.0874e-01, 1.5393e+00, -3.0677e-01, 1.4771e-01, -1.6751e+00,\n", + " -1.6980e+00, 8.8647e-01, 8.6847e-01, 8.0590e-01, -1.2548e+00,\n", + " -1.8401e-02, -5.4263e-01, -6.5954e-01, -3.4339e-01, -4.9939e-01,\n", + " 5.0794e-01, 4.8216e-01, 6.5291e-02, 6.9481e-02, 9.3943e-01,\n", + " 1.1056e-01, -9.0834e-01, 5.9255e-01, 6.8692e-01, 2.9180e-01,\n", + " 3.6947e-01, 7.4422e-01, 1.4877e-01, 4.4374e-01, 1.0055e+00,\n", + " -7.2418e-01, 1.5478e-01, 2.2642e-01, 2.0349e-01, 6.3065e-01,\n", + " -4.8900e-01, -1.1986e+00, -2.5607e-01, 9.0008e-01, 1.2902e-01,\n", + " 5.3663e+00, -1.3429e-01, 7.9810e-01, -9.2527e-01, 4.1761e-01,\n", + " -2.6186e-01, -2.0151e-01, -3.0563e-01, -2.9163e-01, 4.9363e-01,\n", + " -1.0187e+00, 8.9439e-02, -1.5086e-01, 2.4725e-01, -1.8182e-01,\n", + " -5.4884e-01, 1.3904e+00, -7.3445e-01, 1.7192e-01, -8.2985e-01,\n", + " 3.2465e-02, -5.8382e+00, -1.2886e-01, 1.2982e+00, -2.2217e-01,\n", + " 1.2597e+00, -2.0946e-01, -1.3153e+00, 1.8629e-01, 5.6036e-01,\n", + " -8.0274e-03, 3.7031e-02, -7.2689e-02, -4.2259e-01, -7.3990e-01,\n", + " -7.2865e-01, -3.5358e-01, -1.0636e+00, -7.8831e-02, -1.8759e-01,\n", + " 8.3637e-01, 3.1117e-01, 6.1989e-01, -1.2414e-01, 5.0989e-01,\n", + " 1.1214e+00, -1.0783e+00, -5.9599e-01, 7.6019e-01, -7.4424e-01,\n", + " 7.1560e-02, 2.7303e-01, -9.0587e-01, 4.4073e-01, 1.9910e-01,\n", + " -6.6675e-02, 3.5972e-01, 2.7237e-01, 4.4705e-01, 1.5214e+00,\n", + " 2.8673e-01, -9.4406e-01, -1.5607e-01, -8.6961e-01, 3.1393e-01,\n", + " 1.2868e-01, 7.9200e-02, -2.5765e-01, -5.2737e-01, 2.6680e-02,\n", + " 1.1275e+00, 1.4169e-01, -4.6904e-01, 4.5877e-01, -1.5315e-01,\n", + " 1.8020e-01, 2.1740e-01, -5.3287e-01, -2.6836e-01, 2.9287e-01,\n", + " -4.4783e-01, 1.4212e-01, 4.7490e-01, 6.1778e-02, 4.4987e-01,\n", + " 1.7372e-01, 5.6562e-02, -1.5217e+00, 1.1232e+00, 5.5890e-01,\n", + " 4.6813e-01, -7.9550e-01, -1.7125e+00, 5.0035e-01, -4.3188e-01,\n", + " -1.6289e+00, 1.5501e-02, -1.0270e+00, 2.3431e-01, -3.7337e-01,\n", + " -3.9971e-01, 5.9703e-02, 5.1988e-01, 3.0298e-01, -9.9593e-01,\n", + " 9.9599e-02, -1.8770e-01, 1.0976e-03, 3.1389e-01, -6.7635e-02,\n", + " -2.5654e-01, -1.0079e+00, -3.3847e-01, 1.0830e+00, -8.5420e-02,\n", + " -6.8329e-01, -2.2209e-01, 9.8029e-01, -3.5572e-01, -9.7739e-02,\n", + " 3.1339e-01, -7.5790e-01, -1.1114e-01, 1.3195e+00, -2.6651e-01,\n", + " -8.6284e-02, -6.1514e-02, -9.6365e-01, 5.8111e-01, -7.9026e-01,\n", + " 3.4359e-02, 1.4805e-01, -3.9151e-01, -3.3965e-01, 1.3782e+00,\n", + " 1.5073e-01, 5.0220e-01, 9.1980e-02, -1.2238e-01, -5.1866e-02,\n", + " -1.1727e-01, -8.5541e-01, -3.1244e-01, 1.1770e-01, 1.7958e+00,\n", + " -2.2563e-01, -5.6103e-01, -1.1069e+00, -9.7789e-01, -1.2189e+00,\n", + " -1.0113e+00, 3.3071e-01, 1.4034e+00, -5.0933e-01, -6.8410e-02,\n", + " 3.9110e-01, -2.6448e-01, -1.6042e-01, -6.9321e-01, 3.4150e-01,\n", + " -5.1736e-01, 1.5081e-01, -7.2690e-02, -4.4258e-01, 9.6587e-01,\n", + " 1.1243e+00, 3.6307e-01, -8.3291e-02, 9.4453e-01, 5.9471e-01,\n", + " -1.9810e-01, -1.2600e+00, 1.1084e-01, 6.1746e-01, -4.3833e-01,\n", + " 4.9749e-01, -6.7068e-01, 7.3220e-01, -3.6291e-01, 3.4117e+00,\n", + " 4.2510e-01, -1.3726e+00, 9.1664e-01, 3.2722e-01, 4.9647e-02,\n", + " 2.7723e-01, 1.3886e+00, 2.0157e+00, 3.6911e-01, 2.2851e-01,\n", + " -1.3195e+00, -2.0280e-01, 2.3085e-01, -1.2566e+00, -6.1733e-02,\n", + " -4.9790e-01, -4.2601e-01, -8.4731e-01, -1.2078e-01, -8.5055e-01,\n", + " 8.4002e-02, -8.0585e-01, -8.6329e-01, 5.9033e-02, 5.8254e-01,\n", + " -7.6936e-01, 1.0379e+00, 1.8664e-01, 2.5744e-01, -3.4262e-01,\n", + " 1.7607e+00, -2.5542e-01, -5.9538e-01, 4.8452e+00, 5.2938e-01,\n", + " 4.5253e-01, 5.4308e-01, 7.8852e-01, -8.5713e-01, 5.7179e-01,\n", + " -2.5038e-02, 4.0092e-01, 1.6203e+00, 8.9025e-01, -3.7051e-01,\n", + " 2.8878e-01, -4.0672e-01, -2.8992e-01, 1.2436e+00, 8.4281e-01,\n", + " 3.3404e-01, 3.5212e+00, -6.2464e-01, 1.6889e-01, 5.0950e-01,\n", + " 6.0882e-01, -1.0021e+00, -1.4234e+00, -1.8274e-01, -1.1205e+00,\n", + " 5.8818e-01, 2.0395e-01, -1.5636e-01, -2.7134e-01, 1.7388e-01,\n", + " 3.6655e-01, 3.5106e-02, 1.1893e+00, -9.5504e-01, -1.7335e+00,\n", + " 2.6121e-01, 1.5959e-01, 2.1152e-01, 4.6638e-01, 3.0343e-02,\n", + " 1.1501e+00, 7.6918e-01, -1.0978e-01, 2.6012e-01, 7.4091e-01,\n", + " -4.8964e-02, -2.2023e-01, 3.8122e-01, 5.8171e-01, -3.0145e-01,\n", + " -2.5431e-01, -5.6066e-02, -1.4672e-02, -7.7137e-01, -7.6175e-02,\n", + " 7.0992e-03, 3.3074e-01, 2.0417e-01, -2.2275e-02, -1.8799e+00,\n", + " 3.2200e-01, -3.4532e-01, -1.2397e+00, 7.1686e-01, -9.5655e-01,\n", + " 6.4862e-01, -1.4755e-01, -1.1373e+00, -4.0796e-01, -9.8583e-01,\n", + " -7.0172e-01, 3.0339e-02, 9.0085e-01, -5.3020e-01, 1.2186e-01,\n", + " 1.3400e-01, 3.0865e-01, 4.4937e-01, -9.9567e-01, 1.1413e+00,\n", + " -9.1110e-01, 4.7118e-01, 4.6427e-01, -1.9850e+00, -4.4892e-02,\n", + " -8.3049e-01, -1.2660e-01, -5.3937e-02, 6.8228e-01, -7.8966e-01,\n", + " 4.0273e-01, -7.6949e-02, -4.4711e-01, -3.7368e-02, -1.4817e-01,\n", + " -2.0367e-01, 6.8044e-01, -2.3592e-01, 2.1292e-02, -1.2443e-02,\n", + " -2.8481e-01, -1.0849e+00, 2.0934e-02, -9.8099e-01, -2.8614e-01,\n", + " -1.1888e-01, 2.3289e+00, -8.4848e-01, 1.0650e+00, -4.8800e-01,\n", + " -7.3174e-01, 1.7405e+00, -1.1867e-01, 6.0654e-01, -3.2369e-01,\n", + " 4.2891e-03, 1.3777e-01, -8.7465e-01, 3.9065e-01, 5.8552e-01,\n", + " -1.1822e-01, -2.8735e-01, 4.5932e-01, -1.0558e+00, -2.0773e-01,\n", + " -4.2257e-02, -7.4723e-01, -1.0418e-01, 1.1046e-01, 2.5200e-01,\n", + " -7.7008e-01, -6.9124e-01, 1.2328e-02, 9.2941e-01, -9.5823e-01,\n", + " 7.8868e-01, 4.8766e-01, 1.2000e+00, 2.0337e+00, 1.3819e+00,\n", + " -7.0696e-01, -3.9163e+00, -4.0065e-01, 2.4839e-01, 6.8717e-01,\n", + " 2.4711e-01, -2.3006e-01, 8.9260e-01, -1.1988e-01, -1.0582e+00,\n", + " -6.1881e-01, 1.9576e-01, -3.5435e-01, 3.7132e-01, -1.0532e+00,\n", + " 5.1836e-01, 1.4250e-01, 3.1409e-01, 6.4463e-01, -1.0462e+00,\n", + " 6.6189e-02, -1.4131e-02, -5.6984e-01, 7.3621e-01, 4.5947e-01,\n", + " -3.4809e-02, -5.7257e-01, -2.4561e-01, -3.6593e-01, -2.4191e-02,\n", + " -8.4636e-01, -2.9382e-01, 8.2950e-01, -2.8704e-01, -6.1543e-01,\n", + " -2.8003e-02, 4.3479e-01, 1.0016e+00, 4.4122e-01, -6.2453e-02,\n", + " -1.7907e+00, 1.3726e+00, -1.7483e-01, -6.3347e-01, -3.6389e-01,\n", + " 5.5712e-01, 1.7147e-01, -1.7688e-02, 6.7811e-01, 4.3906e-02,\n", + " -5.6973e-01, -4.2983e-01, 7.6825e-01, 1.2913e+00, -4.2006e-01,\n", + " 5.0041e-01, -5.7768e-01, 1.0845e-01, -5.6376e-01, -3.3116e-01,\n", + " 1.9473e-01, 1.6148e+00, -7.9727e-01, 1.8664e-01, 6.9911e-01,\n", + " -7.8633e-01, 3.7915e+00, -1.5934e-01, 1.3869e-01, -4.3771e-01,\n", + " -6.2996e-01, 2.0706e+00, 2.8263e-01, -4.5241e-01, -5.7089e-02,\n", + " 1.1213e+00, -4.9894e-02, 2.0719e-01, -9.7199e-01, -1.4038e-01,\n", + " 1.8437e+00, -5.0838e-01, -4.5824e-01, 1.8325e-02, 1.6349e-01,\n", + " 2.3760e-01, 9.2985e-01, 4.2633e-01, 6.1916e-01, -4.9182e-01,\n", + " 1.0441e-01, 1.4710e+00, -1.0186e+00, 6.7099e-02, 1.1068e+00,\n", + " 3.1317e-01, -7.6338e-02, 8.3072e-01, 2.1650e-02, -1.7168e-01,\n", + " 6.7508e-01, 3.4145e-01, -1.5327e+00, 5.8343e-01, -1.1839e+00,\n", + " -6.2361e-02, 5.3014e-02, 1.2807e+00, 1.0915e+00, -8.6758e-01,\n", + " 1.0344e+00, 3.5148e-01, -9.8511e-01, -7.7864e-01, 1.2014e-02,\n", + " -8.4390e-01, -1.0858e+00, 5.2799e-01, 2.7797e-01, 8.7393e-01,\n", + " 1.6785e+00, 4.9440e-01, 2.5744e-02, -1.8004e+00, -5.2505e-01,\n", + " -1.7587e-01, -6.3750e-01, 3.1973e-01, -7.9784e-02, -2.1100e-01,\n", + " 8.2759e-01, -1.2025e+00, -1.4338e+00, 1.4730e+00, -1.0535e+00,\n", + " 4.8772e-01, 8.9435e-01, -1.0030e+00, 3.5396e-01, -3.7726e-01,\n", + " -4.0844e-01, -6.5378e-01, -2.3089e-01, -1.0119e-01, 4.6434e-01,\n", + " -9.2134e-01, 4.6914e-01, 3.7056e-01, -7.9002e-01, 4.0602e-03,\n", + " -7.7099e-01, 6.6663e-01, 3.4323e-01, -1.0957e+00, 9.0008e-01,\n", + " -4.0340e-01, -8.0557e-01, 2.0100e+00, -1.1193e+00, -3.3462e-01,\n", + " -4.5713e-01, 1.7430e+00, -1.3358e+00, -7.0499e-02, -5.5699e-01,\n", + " 4.2485e-01, 5.6796e-01, 3.1242e-01, 1.5269e+00, -1.2979e+00,\n", + " -5.4608e-02, 1.1424e+00, 1.0122e+00, 6.8035e-01, 1.3340e+00,\n", + " 3.0875e-01, 6.4539e-01, -3.2952e-01, -1.0114e+00, 7.5653e-01,\n", + " -5.5787e-01, -1.6231e+00, -4.5545e-01, 6.2937e-02, 7.6943e-01,\n", + " -1.0360e-01, -7.2392e-01, 1.4437e+00, 3.0526e-02, 1.0234e-01,\n", + " -4.8389e-02, 1.0221e+00, 2.0669e-01, -2.4083e+00, 8.9863e-01,\n", + " 4.7944e-01, -8.4005e-01, -6.3425e-01, -7.0860e-01, -3.2115e-01,\n", + " 6.3741e-01, -1.9570e-01, -4.6066e-01, 5.6871e-01, 3.1966e-01,\n", + " 6.6062e-01, -2.6750e-01, 8.9014e-01, -5.9421e-01, 1.2518e+00,\n", + " -6.4939e-01, 9.3352e-01, 1.8120e-01, -2.7597e-01, 1.2792e-01,\n", + " -3.4255e-01, 7.5101e-01, -8.0686e-01, 8.6254e-01, -9.3071e-01,\n", + " 5.9072e-01, 5.2779e-01, 7.6271e-01, -3.3893e-01, 3.8799e-01,\n", + " -4.3208e-01, -1.6288e-01, 5.6881e-02, 1.9117e-01, 2.8365e-02,\n", + " 9.5397e-01, -1.8657e-01, 3.1514e-01, -5.9065e-01, 1.5293e+00,\n", + " 3.0394e-01, -5.9831e-01, -4.2810e-01, 7.7838e-01, -7.6792e-01,\n", + " -7.1487e-01, -1.4372e-01, -1.2557e+00, 7.7492e-02, -5.5653e-01,\n", + " 1.7257e-01, 6.0696e-01, 4.0132e-01, -4.6850e-01, -3.3263e-01,\n", + " -1.7245e+00, 1.2748e+00, 3.4657e-01, -2.1751e-01, 4.7821e-01,\n", + " 8.0060e-01, 2.2041e-01, 4.5081e-01, 1.1787e+00, 4.2144e-02,\n", + " -6.8987e-01, 8.5284e-01, 8.6362e-01, 5.5671e-01, -3.9498e-01,\n", + " -8.1284e-01, 3.5733e-01, 2.3513e-01, 8.8358e-01, 7.1591e-01,\n", + " 2.1957e-01, 2.4697e-01, -7.2402e-01, -3.5483e-01, -2.1268e+00,\n", + " 1.0866e+00, -7.8985e-01, -1.0879e-01, 7.2387e-02, 1.6729e+00,\n", + " 1.5013e-01, 5.7198e-01, 4.3690e-01, -7.7675e-01, -1.0156e+00,\n", + " 7.6743e-01, -9.8260e-02, -9.4869e-01, 9.8678e-01, 5.2385e-01,\n", + " 3.7922e-01, 2.1336e-01, -1.0158e-01, 1.2670e+00, 5.4585e-01,\n", + " 7.0972e-01, -1.7041e+00, 1.3264e-01, -2.2960e-01, 8.2514e-01,\n", + " -1.0594e+00, -9.2191e-01, -2.6381e+00, 4.5535e-01, -1.5036e+00,\n", + " -1.3049e+00, 3.2866e-01, -6.8583e-01, -4.3822e-02, -1.0406e+00,\n", + " 1.4157e+00, 1.4056e+00, 2.3406e-01, -1.4198e+00, -4.1428e-01,\n", + " 7.1460e-01, -9.9952e-01, -8.9575e-02, -9.3440e-01, 2.4660e-01,\n", + " -1.5474e-02, 1.0905e+00, -2.7923e-01, 7.2442e-02, -1.7743e-01,\n", + " 7.7267e-03, -2.7213e-01, 2.9660e-02, -5.0046e-01, -1.1490e+00,\n", + " 9.7945e-01, -1.2734e+00, -6.2709e-01, -3.5033e-01, 2.5188e-01,\n", + " 3.4297e-01, 1.1887e+00, -2.2985e-01, 8.3060e-01, 5.9193e-02,\n", + " -1.3219e+00, -7.2062e-01, 7.3293e-02, -1.1011e+00, -2.7101e-01,\n", + " 8.7101e-02, 2.4765e-01, -2.5952e-01]], device='cuda:0',\n", + " grad_fn=)" + ] + }, + "execution_count": 80, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "%%time\n", + "inputs = rk.tokenize(\"GOAL R : Type u,\\tL : Type v,\\tM : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\t_inst_4 : add_comm_group M,\\t_inst_5 : module R M,\\t_inst_6 : lie_ring_module L M,\\t_inst_7 : lie_module R L M,\\tN N' : lie_submodule R L M,\\th : ↑N = ↑N',\\tm : M\\t⊢ m ∈ N ↔ m ∈ N'\\n \", \"rw [\\u2190 mem_coe_submodule, h]\\n\", return_tensors='pt')\n", + "inputs.to(torch.device(\"cuda:0\"))\n", + "torch.mean(hf_model(**inputs).last_hidden_state,axis=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 75, + "id": "984526f3", + "metadata": {}, + "outputs": [ + { + "ename": "TypeError", + "evalue": "expected Tensor as element 0 in argument 0, but got BatchEncoding", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", + "Input \u001b[0;32mIn [75]\u001b[0m, in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcat\u001b[49m\u001b[43m(\u001b[49m\u001b[43m(\u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43minputs\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[0;31mTypeError\u001b[0m: expected Tensor as element 0 in argument 0, but got BatchEncoding" + ] + } + ], + "source": [ + "torch.cat((inputs,inputs))" + ] + }, + { + "cell_type": "code", + "execution_count": 76, + "id": "32d6c8c6", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "GPTNeoModel(\n", + " (wte): Embedding(50259, 768)\n", + " (wpe): Embedding(2048, 768)\n", + " (drop): Dropout(p=0, inplace=False)\n", + " (h): ModuleList(\n", + " (0): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (1): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (2): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (3): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (4): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (5): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (6): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (7): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (8): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (9): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (10): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (11): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " )\n", + " (ln_f): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + ")" + ] + }, + "execution_count": 76, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "hf_model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "182200bf", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/models/reranker/reranker-train.ipynb b/src/models/reranker/reranker-train.ipynb new file mode 100644 index 0000000..509b1c8 --- /dev/null +++ b/src/models/reranker/reranker-train.ipynb @@ -0,0 +1,1541 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "id": "6f114ed9", + "metadata": {}, + "outputs": [], + "source": [ + "# from reranker import RerankerForInference\n", + "# rk = RerankerForInference.from_pretrained(\"results/checkpoint-140000/\") # load checkpoint\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "7d737664", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "__init__.py arguments.py \u001b[0m\u001b[01;34mdist\u001b[0m/ trainer.py\r\n", + "\u001b[01;34m__pycache__\u001b[0m/ data.py modeling.py\r\n" + ] + } + ], + "source": [ + "ls Reranker/src/reranker" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "f40dc37e", + "metadata": {}, + "outputs": [], + "source": [ + "from Reranker.src.reranker.arguments import ModelArguments, DataArguments, \\\n", + " RerankerTrainingArguments as TrainingArguments" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "fa286904", + "metadata": {}, + "outputs": [], + "source": [ + "from transformers import AutoTokenizer\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "19b97b80", + "metadata": {}, + "outputs": [], + "source": [ + "from reranker import Reranker \n", + "from transformers import AutoTokenizer" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "74ab46c1", + "metadata": {}, + "outputs": [], + "source": [ + "from typing import Optional\n", + "\n", + "import torch\n", + "import torch.functional as F\n", + "import copy\n", + "from transformers import AutoModelForSequenceClassification, AutoTokenizer,\\\n", + " PreTrainedModel, PreTrainedTokenizer, GPTNeoForSequenceClassification\n", + "\n", + "from transformers.modeling_outputs import SequenceClassifierOutput, BaseModelOutputWithPooling\n", + "from torch import nn\n", + "import torch.distributed as dist\n", + "\n", + "class Reranker(nn.Module):\n", + " def __init__(\n", + " self,\n", + " hf_model: Optional[PreTrainedModel] = None,\n", + " tokenizer: Optional[PreTrainedTokenizer] = None\n", + " ):\n", + " super().__init__()\n", + " self.hf_model = hf_model\n", + " self.tokenizer = tokenizer\n", + "\n", + " def tokenize(self, *args, **kwargs):\n", + " return self.tokenizer(*args, **kwargs)\n", + "\n", + " def forward(self, batch):\n", + " return self.hf_model(**batch)\n", + "\n", + " @classmethod\n", + " def from_pretrained(cls, pretrained_model_name_or_path: str):\n", + " hf_model = GPTNeoForSequenceClassification.from_pretrained(\n", + " pretrained_model_name_or_path,num_labels=1)\n", + " hf_tokenizer = AutoTokenizer.from_pretrained(\"EleutherAI/gpt-neo-125M\", \n", + " bos_token=\"<|startoftext|>\",\n", + " eos_token=\"<|endoftext|>\",\n", + " pad_token=\"<|pad|>\")\n", + " \n", + " hf_model.eval()\n", + " return cls(hf_model, hf_tokenizer)\n", + "\n", + " def load_pretrained_model(self, pretrained_model_name_or_path, *model_args, **kwargs):\n", + " self.hf_model = AutoModelForSequenceClassification.from_pretrained(\n", + " pretrained_model_name_or_path, *model_args, **kwargs\n", + " )\n", + "\n", + " def load_pretrained_tokenizer(self, pretrained_model_name_or_path, *inputs, **kwargs):\n", + " self.tokenizer = AutoTokenizer.from_pretrained(\n", + " pretrained_model_name_or_path, *inputs, **kwargs\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "62b981fa", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "3c31f0fb", + "metadata": {}, + "outputs": [], + "source": [ + "data_args ={\n", + " \"train_dir\"\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "071f9d9d", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Some weights of GPTNeoForSequenceClassification were not initialized from the model checkpoint at results/checkpoint-140000/ and are newly initialized: ['score.weight']\n", + "You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", + "Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n", + "Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n" + ] + } + ], + "source": [ + "model = Reranker.from_pretrained(\"results/checkpoint-140000/\") " + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "7781c5d2", + "metadata": {}, + "outputs": [], + "source": [ + "# rk.hf_model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9597f2cb", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4a27ffc0", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "75bb4d0c", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 150, + "id": "dcb00a2f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'best_match': 'coe : list΄ pfun := λ n, (list',\n", + " 'score': 0.3333333333333333,\n", + " 'real': 'haveI := has_reflect',\n", + " 'all': ['exact (has_lift_',\n", + " 'exact (reflect l).symm',\n", + " 'assumption',\n", + " 'induction n with n IH',\n", + " 'induction l with',\n", + " 'convert to_sublist_of_reflect_nat_abs',\n", + " 'apply_instance',\n", + " 'induction l with n l ih',\n", + " 'coe : list΄ pfun := λ n, (list',\n", + " 'convert reflect_reflect l with t ht',\n", + " \"rw [mirror_eq_write' l]\",\n", + " 'exact rintro ⟨_, rfl⟩',\n", + " 'rw ←@reflect.reflect tactic.rcases_patt',\n", + " 'exact (reflect_eq_',\n", + " 'convert reflection_reflect tactic.rcases_patt',\n", + " 'induction n with n l IH',\n", + " 'exact trans (mfld_trans_gen.mpr (reflect_reflect_reflect_modeq_right t ht)) rfl',\n", + " 'rw [← reflect_dvd_erson_aux, ← reflect_dvd_iff]',\n", + " 'exact rintro ⟨a, b, ⟨rfl⟩, rfl⟩',\n", + " 'exact (reflect_eq_self',\n", + " 'xt',\n", + " 'induction ih',\n", + " 'exact (reverse_rec_',\n", + " 'rcases this.exists_inv with ⟨R, hR⟩',\n", + " 'induction l with n l IH',\n", + " 'induction n with n l IH generalizing l',\n", + " 'exact (has_coe_to',\n", + " 'exact ⟨λ pos h1, forall_path h1, pos.reflect, diff h1.symm, diff pos.fst⟩',\n", + " 'rw [← forall_and_distrib, ←reflecting_C]',\n", + " 'exact (reflect_iff_mod'],\n", + " 'prompt': 'GOAL has_reflect : _root_.has_reflect tactic.rcases_patt,\\tl : listΠ tactic.rcases_patt\\t⊢ reflected l\\n PROOFSTEP '}" + ] + }, + "execution_count": 150, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "each = json_c[999]\n", + "each" + ] + }, + { + "cell_type": "code", + "execution_count": 151, + "id": "b4d585e6", + "metadata": {}, + "outputs": [], + "source": [ + "test = each[\"prompt\"].replace(\"GOAL\",\"\").replace(\"PROOFSTEP\",\"\")" + ] + }, + { + "cell_type": "code", + "execution_count": 152, + "id": "47fa1511", + "metadata": {}, + "outputs": [], + "source": [ + "results = [i for i in each[\"all\"]]" + ] + }, + { + "cell_type": "code", + "execution_count": 156, + "id": "9d01f286", + "metadata": {}, + "outputs": [], + "source": [ + "l = []\n", + "for i in results+[each[\"real\"]]:\n", + " inputs = rk.tokenize(test, i, return_tensors='pt')\n", + " inputs.to(torch.device(\"cuda:0\"))\n", + " score = rk(inputs).logits\n", + " l.append((score.cpu().detach().numpy()[0][1],i))\n", + " l.sort(key=lambda x:x[0])" + ] + }, + { + "cell_type": "code", + "execution_count": 157, + "id": "24f28a1c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[(-2.0707211, 'rw [← reflect_dvd_erson_aux, ← reflect_dvd_iff]'),\n", + " (-1.9039781, 'assumption'),\n", + " (-1.7388834, 'rw [← forall_and_distrib, ←reflecting_C]'),\n", + " (-1.6454703,\n", + " 'exact trans (mfld_trans_gen.mpr (reflect_reflect_reflect_modeq_right t ht)) rfl'),\n", + " (-1.3055459, 'exact rintro ⟨_, rfl⟩'),\n", + " (-1.235331, 'exact rintro ⟨a, b, ⟨rfl⟩, rfl⟩'),\n", + " (-1.2266147, 'convert to_sublist_of_reflect_nat_abs'),\n", + " (-1.1743462,\n", + " 'exact ⟨λ pos h1, forall_path h1, pos.reflect, diff h1.symm, diff pos.fst⟩'),\n", + " (-1.0940642, \"rw [mirror_eq_write' l]\"),\n", + " (-0.9648125, 'rcases this.exists_inv with ⟨R, hR⟩'),\n", + " (-0.9581935, 'induction n with n l IH generalizing l'),\n", + " (-0.92618155, 'rw ←@reflect.reflect tactic.rcases_patt'),\n", + " (-0.82797396, 'exact (reflect_eq_self'),\n", + " (-0.8064505, 'convert reflection_reflect tactic.rcases_patt'),\n", + " (-0.43701363, 'induction n with n l IH'),\n", + " (-0.38078067, 'induction l with n l IH'),\n", + " (-0.29419905, 'exact (reflect l).symm'),\n", + " (-0.24308807, 'induction n with n IH'),\n", + " (-0.14588475, 'haveI := has_reflect'),\n", + " (-0.11893135, 'convert reflect_reflect l with t ht'),\n", + " (-0.04986143, 'exact (has_lift_'),\n", + " (-0.024886109, 'induction l with n l ih'),\n", + " (0.02763924, 'induction ih'),\n", + " (0.031445414, 'coe : list΄ pfun := λ n, (list'),\n", + " (0.11387861, 'exact (reflect_iff_mod'),\n", + " (0.14731786, 'exact (has_coe_to'),\n", + " (0.1782412, 'xt'),\n", + " (0.21090153, 'apply_instance'),\n", + " (0.4576639, 'exact (reflect_eq_'),\n", + " (0.57259375, 'induction l with'),\n", + " (1.2164512, 'exact (reverse_rec_')]" + ] + }, + "execution_count": 157, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "l" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f39d0e6c", + "metadata": {}, + "outputs": [], + "source": [ + "\"GOAL α : Type u,\\t_inst_1 : inhabited α,\\tb : buffer α,\\ti : ℕ,\\th : i < b.size\\t⊢ b.read ⟨i, h⟩ = b.read i\\n cases \"" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "id": "36659dea", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "To disable this warning, you can either:\n", + "\t- Avoid using `tokenizers` before the fork if possible\n", + "\t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n", + "data_test.jsonl data_train_prepared.jsonl \u001b[0m\u001b[01;34mwandb\u001b[0m/\r\n", + "data_test_prepared.jsonl data_valid.jsonl\r\n", + "data_train.jsonl data_valid_prepared.jsonl\r\n" + ] + } + ], + "source": [ + "ls files_upload" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "025d4138", + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "\n", + "with open('files_upload/data_train_prepared.jsonl', 'r') as json_file:\n", + " json_c_all = [json.loads(i) for i in list(json_file)]" + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "id": "8757814d", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "168590" + ] + }, + "execution_count": 65, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(json_c_all)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b7c4c9c5", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1d2943cf", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6b82f0d9", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "ab3ad558", + "metadata": {}, + "outputs": [], + "source": [ + "from transformers import AutoTokenizer" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "9c253b6f", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n", + "Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n" + ] + } + ], + "source": [ + "\n", + "hf_tokenizer = AutoTokenizer.from_pretrained(\"EleutherAI/gpt-neo-125M\", \n", + " bos_token=\"<|startoftext|>\",\n", + " eos_token=\"<|endoftext|>\",\n", + " pad_token=\"<|pad|>\")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "058c5d59", + "metadata": {}, + "outputs": [], + "source": [ + "from transformers import GPTNeoModel" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "abda3208", + "metadata": {}, + "outputs": [], + "source": [ + "data_args = {\n", + " \"train_dir\":\"data/train\",\n", + " \"train_path\":\"data/train/train.json\",\n", + " \"pred_dir\":\"data/dev\",\n", + " \"pred_path\":\"data/train/dev.json\",\n", + " \"train_group_size\":8\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "d8494197", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using custom data configuration default-21c912046bcb6d4c\n", + "Reusing dataset json (/root/.cache/huggingface/datasets/json/default-21c912046bcb6d4c/0.0.0/ac0ca5f5289a6cf108e706efcf040422dbbfa8e658dee6a819f20d76bb84d26b)\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "4ebdb9d53f1441018c6bdf00699582b3", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/1 [00:00\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m trainer \u001b[38;5;241m=\u001b[39m RerankerTrainer(\n\u001b[0;32m----> 2\u001b[0m model\u001b[38;5;241m=\u001b[39m\u001b[43mmodel\u001b[49m,\n\u001b[1;32m 3\u001b[0m train_dataset\u001b[38;5;241m=\u001b[39mtrain_dataset,\n\u001b[1;32m 4\u001b[0m data_collator\u001b[38;5;241m=\u001b[39mGroupCollator(hf_tokenizer)\n\u001b[1;32m 5\u001b[0m )\n", + "\u001b[0;31mNameError\u001b[0m: name 'model' is not defined" + ] + } + ], + "source": [ + "trainer = RerankerTrainer(\n", + " model=model,\n", + " train_dataset=train_dataset,\n", + " data_collator=GroupCollator(hf_tokenizer)\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "c99cedba", + "metadata": {}, + "outputs": [], + "source": [ + "# training_args = TrainingArguments(\"test-trainer\",fp16=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "5c902c76", + "metadata": {}, + "outputs": [], + "source": [ + "# training_args" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "37dcdae8", + "metadata": {}, + "outputs": [], + "source": [ + "# from transformers import TrainingArguments" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "bae11f81", + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'trainer' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "Input \u001b[0;32mIn [13]\u001b[0m, in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mtrainer\u001b[49m\u001b[38;5;241m.\u001b[39mtrain()\n", + "\u001b[0;31mNameError\u001b[0m: name 'trainer' is not defined" + ] + } + ], + "source": [ + "trainer.train()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fa6231f1", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "021cb691", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0a138f88", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "68db64c8", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 51, + "id": "13779610", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['last_hidden_state', 'past_key_values']" + ] + }, + "execution_count": 51, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "[i for i in hf_model(**inputs).last_hidden_state]" + ] + }, + { + "cell_type": "code", + "execution_count": 79, + "id": "08221fdd", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "GPTNeoModel(\n", + " (wte): Embedding(50259, 768)\n", + " (wpe): Embedding(2048, 768)\n", + " (drop): Dropout(p=0, inplace=False)\n", + " (h): ModuleList(\n", + " (0): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (1): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (2): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (3): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (4): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (5): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (6): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (7): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (8): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (9): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (10): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (11): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " )\n", + " (ln_f): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + ")" + ] + }, + "execution_count": 79, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "hf_model.cuda()" + ] + }, + { + "cell_type": "code", + "execution_count": 81, + "id": "436712aa", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'prompt': \"GOAL α : Type u,\\tb : buffer α,\\ti : ℕ,\\th : i < b.size,\\tv : α\\t⊢ b.write ⟨i, h⟩ v = b.write' i v\\n PROOFSTEP \",\n", + " 'completion': \" cases b; unfold write write'; simp [array.write_eq_write']\\n\"}" + ] + }, + "execution_count": 81, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "json_c_all[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 70, + "id": "94581431", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'input_ids': tensor([[11230, 1847, 371, 1058, 5994, 334, 11, 197, 43, 1058,\n", + " 5994, 410, 11, 197, 44, 1058, 5994, 266, 11, 197,\n", + " 62, 8625, 62, 16, 1058, 725, 62, 1806, 371, 11,\n", + " 197, 62, 8625, 62, 17, 1058, 6486, 62, 1806, 406,\n", + " 11, 197, 62, 8625, 62, 18, 1058, 6486, 62, 282,\n", + " 29230, 371, 406, 11, 197, 62, 8625, 62, 19, 1058,\n", + " 751, 62, 9503, 62, 8094, 337, 11, 197, 62, 8625,\n", + " 62, 20, 1058, 8265, 371, 337, 11, 197, 62, 8625,\n", + " 62, 21, 1058, 6486, 62, 1806, 62, 21412, 406, 337,\n", + " 11, 197, 62, 8625, 62, 22, 1058, 6486, 62, 21412,\n", + " 371, 406, 337, 11, 197, 45, 399, 6, 1058, 6486,\n", + " 62, 7266, 21412, 371, 406, 337, 11, 197, 71, 1058,\n", + " 24935, 45, 796, 24935, 45, 3256, 197, 76, 1058, 337,\n", + " 197, 158, 232, 95, 285, 18872, 230, 399, 17804, 242,\n", + " 285, 18872, 230, 399, 6, 198, 1279, 4805, 6684, 37,\n", + " 42135, 29, 220, 31653, 685, 29705, 238, 1066, 62, 49270,\n", + " 62, 7266, 21412, 11, 289, 60, 198]], device='cuda:0'), 'attention_mask': tensor([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", + " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]],\n", + " device='cuda:0')}" + ] + }, + "execution_count": 70, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "inputs.to(torch.device(\"cuda:0\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "id": "c37563dd", + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 80, + "id": "a65b11c1", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 34.9 ms, sys: 20.5 ms, total: 55.3 ms\n", + "Wall time: 51.9 ms\n" + ] + }, + { + "data": { + "text/plain": [ + "tensor([[ 2.5791e-01, 2.2195e-01, 6.1754e-01, -4.1989e-01, -1.1693e+00,\n", + " -4.2416e-01, -1.2921e-02, -6.3473e-02, 1.8195e-01, -2.2530e-02,\n", + " 6.7583e-01, -5.1123e-01, 8.9316e-02, 7.8981e-01, -2.5015e-01,\n", + " 1.9408e-01, 6.3822e-01, 9.7251e-01, -7.8701e-02, -6.0445e-01,\n", + " -9.8325e-01, 2.0833e-01, -1.9024e+00, 9.8087e-01, 2.8801e-02,\n", + " -5.0874e-01, 1.5393e+00, -3.0677e-01, 1.4771e-01, -1.6751e+00,\n", + " -1.6980e+00, 8.8647e-01, 8.6847e-01, 8.0590e-01, -1.2548e+00,\n", + " -1.8401e-02, -5.4263e-01, -6.5954e-01, -3.4339e-01, -4.9939e-01,\n", + " 5.0794e-01, 4.8216e-01, 6.5291e-02, 6.9481e-02, 9.3943e-01,\n", + " 1.1056e-01, -9.0834e-01, 5.9255e-01, 6.8692e-01, 2.9180e-01,\n", + " 3.6947e-01, 7.4422e-01, 1.4877e-01, 4.4374e-01, 1.0055e+00,\n", + " -7.2418e-01, 1.5478e-01, 2.2642e-01, 2.0349e-01, 6.3065e-01,\n", + " -4.8900e-01, -1.1986e+00, -2.5607e-01, 9.0008e-01, 1.2902e-01,\n", + " 5.3663e+00, -1.3429e-01, 7.9810e-01, -9.2527e-01, 4.1761e-01,\n", + " -2.6186e-01, -2.0151e-01, -3.0563e-01, -2.9163e-01, 4.9363e-01,\n", + " -1.0187e+00, 8.9439e-02, -1.5086e-01, 2.4725e-01, -1.8182e-01,\n", + " -5.4884e-01, 1.3904e+00, -7.3445e-01, 1.7192e-01, -8.2985e-01,\n", + " 3.2465e-02, -5.8382e+00, -1.2886e-01, 1.2982e+00, -2.2217e-01,\n", + " 1.2597e+00, -2.0946e-01, -1.3153e+00, 1.8629e-01, 5.6036e-01,\n", + " -8.0274e-03, 3.7031e-02, -7.2689e-02, -4.2259e-01, -7.3990e-01,\n", + " -7.2865e-01, -3.5358e-01, -1.0636e+00, -7.8831e-02, -1.8759e-01,\n", + " 8.3637e-01, 3.1117e-01, 6.1989e-01, -1.2414e-01, 5.0989e-01,\n", + " 1.1214e+00, -1.0783e+00, -5.9599e-01, 7.6019e-01, -7.4424e-01,\n", + " 7.1560e-02, 2.7303e-01, -9.0587e-01, 4.4073e-01, 1.9910e-01,\n", + " -6.6675e-02, 3.5972e-01, 2.7237e-01, 4.4705e-01, 1.5214e+00,\n", + " 2.8673e-01, -9.4406e-01, -1.5607e-01, -8.6961e-01, 3.1393e-01,\n", + " 1.2868e-01, 7.9200e-02, -2.5765e-01, -5.2737e-01, 2.6680e-02,\n", + " 1.1275e+00, 1.4169e-01, -4.6904e-01, 4.5877e-01, -1.5315e-01,\n", + " 1.8020e-01, 2.1740e-01, -5.3287e-01, -2.6836e-01, 2.9287e-01,\n", + " -4.4783e-01, 1.4212e-01, 4.7490e-01, 6.1778e-02, 4.4987e-01,\n", + " 1.7372e-01, 5.6562e-02, -1.5217e+00, 1.1232e+00, 5.5890e-01,\n", + " 4.6813e-01, -7.9550e-01, -1.7125e+00, 5.0035e-01, -4.3188e-01,\n", + " -1.6289e+00, 1.5501e-02, -1.0270e+00, 2.3431e-01, -3.7337e-01,\n", + " -3.9971e-01, 5.9703e-02, 5.1988e-01, 3.0298e-01, -9.9593e-01,\n", + " 9.9599e-02, -1.8770e-01, 1.0976e-03, 3.1389e-01, -6.7635e-02,\n", + " -2.5654e-01, -1.0079e+00, -3.3847e-01, 1.0830e+00, -8.5420e-02,\n", + " -6.8329e-01, -2.2209e-01, 9.8029e-01, -3.5572e-01, -9.7739e-02,\n", + " 3.1339e-01, -7.5790e-01, -1.1114e-01, 1.3195e+00, -2.6651e-01,\n", + " -8.6284e-02, -6.1514e-02, -9.6365e-01, 5.8111e-01, -7.9026e-01,\n", + " 3.4359e-02, 1.4805e-01, -3.9151e-01, -3.3965e-01, 1.3782e+00,\n", + " 1.5073e-01, 5.0220e-01, 9.1980e-02, -1.2238e-01, -5.1866e-02,\n", + " -1.1727e-01, -8.5541e-01, -3.1244e-01, 1.1770e-01, 1.7958e+00,\n", + " -2.2563e-01, -5.6103e-01, -1.1069e+00, -9.7789e-01, -1.2189e+00,\n", + " -1.0113e+00, 3.3071e-01, 1.4034e+00, -5.0933e-01, -6.8410e-02,\n", + " 3.9110e-01, -2.6448e-01, -1.6042e-01, -6.9321e-01, 3.4150e-01,\n", + " -5.1736e-01, 1.5081e-01, -7.2690e-02, -4.4258e-01, 9.6587e-01,\n", + " 1.1243e+00, 3.6307e-01, -8.3291e-02, 9.4453e-01, 5.9471e-01,\n", + " -1.9810e-01, -1.2600e+00, 1.1084e-01, 6.1746e-01, -4.3833e-01,\n", + " 4.9749e-01, -6.7068e-01, 7.3220e-01, -3.6291e-01, 3.4117e+00,\n", + " 4.2510e-01, -1.3726e+00, 9.1664e-01, 3.2722e-01, 4.9647e-02,\n", + " 2.7723e-01, 1.3886e+00, 2.0157e+00, 3.6911e-01, 2.2851e-01,\n", + " -1.3195e+00, -2.0280e-01, 2.3085e-01, -1.2566e+00, -6.1733e-02,\n", + " -4.9790e-01, -4.2601e-01, -8.4731e-01, -1.2078e-01, -8.5055e-01,\n", + " 8.4002e-02, -8.0585e-01, -8.6329e-01, 5.9033e-02, 5.8254e-01,\n", + " -7.6936e-01, 1.0379e+00, 1.8664e-01, 2.5744e-01, -3.4262e-01,\n", + " 1.7607e+00, -2.5542e-01, -5.9538e-01, 4.8452e+00, 5.2938e-01,\n", + " 4.5253e-01, 5.4308e-01, 7.8852e-01, -8.5713e-01, 5.7179e-01,\n", + " -2.5038e-02, 4.0092e-01, 1.6203e+00, 8.9025e-01, -3.7051e-01,\n", + " 2.8878e-01, -4.0672e-01, -2.8992e-01, 1.2436e+00, 8.4281e-01,\n", + " 3.3404e-01, 3.5212e+00, -6.2464e-01, 1.6889e-01, 5.0950e-01,\n", + " 6.0882e-01, -1.0021e+00, -1.4234e+00, -1.8274e-01, -1.1205e+00,\n", + " 5.8818e-01, 2.0395e-01, -1.5636e-01, -2.7134e-01, 1.7388e-01,\n", + " 3.6655e-01, 3.5106e-02, 1.1893e+00, -9.5504e-01, -1.7335e+00,\n", + " 2.6121e-01, 1.5959e-01, 2.1152e-01, 4.6638e-01, 3.0343e-02,\n", + " 1.1501e+00, 7.6918e-01, -1.0978e-01, 2.6012e-01, 7.4091e-01,\n", + " -4.8964e-02, -2.2023e-01, 3.8122e-01, 5.8171e-01, -3.0145e-01,\n", + " -2.5431e-01, -5.6066e-02, -1.4672e-02, -7.7137e-01, -7.6175e-02,\n", + " 7.0992e-03, 3.3074e-01, 2.0417e-01, -2.2275e-02, -1.8799e+00,\n", + " 3.2200e-01, -3.4532e-01, -1.2397e+00, 7.1686e-01, -9.5655e-01,\n", + " 6.4862e-01, -1.4755e-01, -1.1373e+00, -4.0796e-01, -9.8583e-01,\n", + " -7.0172e-01, 3.0339e-02, 9.0085e-01, -5.3020e-01, 1.2186e-01,\n", + " 1.3400e-01, 3.0865e-01, 4.4937e-01, -9.9567e-01, 1.1413e+00,\n", + " -9.1110e-01, 4.7118e-01, 4.6427e-01, -1.9850e+00, -4.4892e-02,\n", + " -8.3049e-01, -1.2660e-01, -5.3937e-02, 6.8228e-01, -7.8966e-01,\n", + " 4.0273e-01, -7.6949e-02, -4.4711e-01, -3.7368e-02, -1.4817e-01,\n", + " -2.0367e-01, 6.8044e-01, -2.3592e-01, 2.1292e-02, -1.2443e-02,\n", + " -2.8481e-01, -1.0849e+00, 2.0934e-02, -9.8099e-01, -2.8614e-01,\n", + " -1.1888e-01, 2.3289e+00, -8.4848e-01, 1.0650e+00, -4.8800e-01,\n", + " -7.3174e-01, 1.7405e+00, -1.1867e-01, 6.0654e-01, -3.2369e-01,\n", + " 4.2891e-03, 1.3777e-01, -8.7465e-01, 3.9065e-01, 5.8552e-01,\n", + " -1.1822e-01, -2.8735e-01, 4.5932e-01, -1.0558e+00, -2.0773e-01,\n", + " -4.2257e-02, -7.4723e-01, -1.0418e-01, 1.1046e-01, 2.5200e-01,\n", + " -7.7008e-01, -6.9124e-01, 1.2328e-02, 9.2941e-01, -9.5823e-01,\n", + " 7.8868e-01, 4.8766e-01, 1.2000e+00, 2.0337e+00, 1.3819e+00,\n", + " -7.0696e-01, -3.9163e+00, -4.0065e-01, 2.4839e-01, 6.8717e-01,\n", + " 2.4711e-01, -2.3006e-01, 8.9260e-01, -1.1988e-01, -1.0582e+00,\n", + " -6.1881e-01, 1.9576e-01, -3.5435e-01, 3.7132e-01, -1.0532e+00,\n", + " 5.1836e-01, 1.4250e-01, 3.1409e-01, 6.4463e-01, -1.0462e+00,\n", + " 6.6189e-02, -1.4131e-02, -5.6984e-01, 7.3621e-01, 4.5947e-01,\n", + " -3.4809e-02, -5.7257e-01, -2.4561e-01, -3.6593e-01, -2.4191e-02,\n", + " -8.4636e-01, -2.9382e-01, 8.2950e-01, -2.8704e-01, -6.1543e-01,\n", + " -2.8003e-02, 4.3479e-01, 1.0016e+00, 4.4122e-01, -6.2453e-02,\n", + " -1.7907e+00, 1.3726e+00, -1.7483e-01, -6.3347e-01, -3.6389e-01,\n", + " 5.5712e-01, 1.7147e-01, -1.7688e-02, 6.7811e-01, 4.3906e-02,\n", + " -5.6973e-01, -4.2983e-01, 7.6825e-01, 1.2913e+00, -4.2006e-01,\n", + " 5.0041e-01, -5.7768e-01, 1.0845e-01, -5.6376e-01, -3.3116e-01,\n", + " 1.9473e-01, 1.6148e+00, -7.9727e-01, 1.8664e-01, 6.9911e-01,\n", + " -7.8633e-01, 3.7915e+00, -1.5934e-01, 1.3869e-01, -4.3771e-01,\n", + " -6.2996e-01, 2.0706e+00, 2.8263e-01, -4.5241e-01, -5.7089e-02,\n", + " 1.1213e+00, -4.9894e-02, 2.0719e-01, -9.7199e-01, -1.4038e-01,\n", + " 1.8437e+00, -5.0838e-01, -4.5824e-01, 1.8325e-02, 1.6349e-01,\n", + " 2.3760e-01, 9.2985e-01, 4.2633e-01, 6.1916e-01, -4.9182e-01,\n", + " 1.0441e-01, 1.4710e+00, -1.0186e+00, 6.7099e-02, 1.1068e+00,\n", + " 3.1317e-01, -7.6338e-02, 8.3072e-01, 2.1650e-02, -1.7168e-01,\n", + " 6.7508e-01, 3.4145e-01, -1.5327e+00, 5.8343e-01, -1.1839e+00,\n", + " -6.2361e-02, 5.3014e-02, 1.2807e+00, 1.0915e+00, -8.6758e-01,\n", + " 1.0344e+00, 3.5148e-01, -9.8511e-01, -7.7864e-01, 1.2014e-02,\n", + " -8.4390e-01, -1.0858e+00, 5.2799e-01, 2.7797e-01, 8.7393e-01,\n", + " 1.6785e+00, 4.9440e-01, 2.5744e-02, -1.8004e+00, -5.2505e-01,\n", + " -1.7587e-01, -6.3750e-01, 3.1973e-01, -7.9784e-02, -2.1100e-01,\n", + " 8.2759e-01, -1.2025e+00, -1.4338e+00, 1.4730e+00, -1.0535e+00,\n", + " 4.8772e-01, 8.9435e-01, -1.0030e+00, 3.5396e-01, -3.7726e-01,\n", + " -4.0844e-01, -6.5378e-01, -2.3089e-01, -1.0119e-01, 4.6434e-01,\n", + " -9.2134e-01, 4.6914e-01, 3.7056e-01, -7.9002e-01, 4.0602e-03,\n", + " -7.7099e-01, 6.6663e-01, 3.4323e-01, -1.0957e+00, 9.0008e-01,\n", + " -4.0340e-01, -8.0557e-01, 2.0100e+00, -1.1193e+00, -3.3462e-01,\n", + " -4.5713e-01, 1.7430e+00, -1.3358e+00, -7.0499e-02, -5.5699e-01,\n", + " 4.2485e-01, 5.6796e-01, 3.1242e-01, 1.5269e+00, -1.2979e+00,\n", + " -5.4608e-02, 1.1424e+00, 1.0122e+00, 6.8035e-01, 1.3340e+00,\n", + " 3.0875e-01, 6.4539e-01, -3.2952e-01, -1.0114e+00, 7.5653e-01,\n", + " -5.5787e-01, -1.6231e+00, -4.5545e-01, 6.2937e-02, 7.6943e-01,\n", + " -1.0360e-01, -7.2392e-01, 1.4437e+00, 3.0526e-02, 1.0234e-01,\n", + " -4.8389e-02, 1.0221e+00, 2.0669e-01, -2.4083e+00, 8.9863e-01,\n", + " 4.7944e-01, -8.4005e-01, -6.3425e-01, -7.0860e-01, -3.2115e-01,\n", + " 6.3741e-01, -1.9570e-01, -4.6066e-01, 5.6871e-01, 3.1966e-01,\n", + " 6.6062e-01, -2.6750e-01, 8.9014e-01, -5.9421e-01, 1.2518e+00,\n", + " -6.4939e-01, 9.3352e-01, 1.8120e-01, -2.7597e-01, 1.2792e-01,\n", + " -3.4255e-01, 7.5101e-01, -8.0686e-01, 8.6254e-01, -9.3071e-01,\n", + " 5.9072e-01, 5.2779e-01, 7.6271e-01, -3.3893e-01, 3.8799e-01,\n", + " -4.3208e-01, -1.6288e-01, 5.6881e-02, 1.9117e-01, 2.8365e-02,\n", + " 9.5397e-01, -1.8657e-01, 3.1514e-01, -5.9065e-01, 1.5293e+00,\n", + " 3.0394e-01, -5.9831e-01, -4.2810e-01, 7.7838e-01, -7.6792e-01,\n", + " -7.1487e-01, -1.4372e-01, -1.2557e+00, 7.7492e-02, -5.5653e-01,\n", + " 1.7257e-01, 6.0696e-01, 4.0132e-01, -4.6850e-01, -3.3263e-01,\n", + " -1.7245e+00, 1.2748e+00, 3.4657e-01, -2.1751e-01, 4.7821e-01,\n", + " 8.0060e-01, 2.2041e-01, 4.5081e-01, 1.1787e+00, 4.2144e-02,\n", + " -6.8987e-01, 8.5284e-01, 8.6362e-01, 5.5671e-01, -3.9498e-01,\n", + " -8.1284e-01, 3.5733e-01, 2.3513e-01, 8.8358e-01, 7.1591e-01,\n", + " 2.1957e-01, 2.4697e-01, -7.2402e-01, -3.5483e-01, -2.1268e+00,\n", + " 1.0866e+00, -7.8985e-01, -1.0879e-01, 7.2387e-02, 1.6729e+00,\n", + " 1.5013e-01, 5.7198e-01, 4.3690e-01, -7.7675e-01, -1.0156e+00,\n", + " 7.6743e-01, -9.8260e-02, -9.4869e-01, 9.8678e-01, 5.2385e-01,\n", + " 3.7922e-01, 2.1336e-01, -1.0158e-01, 1.2670e+00, 5.4585e-01,\n", + " 7.0972e-01, -1.7041e+00, 1.3264e-01, -2.2960e-01, 8.2514e-01,\n", + " -1.0594e+00, -9.2191e-01, -2.6381e+00, 4.5535e-01, -1.5036e+00,\n", + " -1.3049e+00, 3.2866e-01, -6.8583e-01, -4.3822e-02, -1.0406e+00,\n", + " 1.4157e+00, 1.4056e+00, 2.3406e-01, -1.4198e+00, -4.1428e-01,\n", + " 7.1460e-01, -9.9952e-01, -8.9575e-02, -9.3440e-01, 2.4660e-01,\n", + " -1.5474e-02, 1.0905e+00, -2.7923e-01, 7.2442e-02, -1.7743e-01,\n", + " 7.7267e-03, -2.7213e-01, 2.9660e-02, -5.0046e-01, -1.1490e+00,\n", + " 9.7945e-01, -1.2734e+00, -6.2709e-01, -3.5033e-01, 2.5188e-01,\n", + " 3.4297e-01, 1.1887e+00, -2.2985e-01, 8.3060e-01, 5.9193e-02,\n", + " -1.3219e+00, -7.2062e-01, 7.3293e-02, -1.1011e+00, -2.7101e-01,\n", + " 8.7101e-02, 2.4765e-01, -2.5952e-01]], device='cuda:0',\n", + " grad_fn=)" + ] + }, + "execution_count": 80, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "%%time\n", + "inputs = rk.tokenize(\"GOAL R : Type u,\\tL : Type v,\\tM : Type w,\\t_inst_1 : comm_ring R,\\t_inst_2 : lie_ring L,\\t_inst_3 : lie_algebra R L,\\t_inst_4 : add_comm_group M,\\t_inst_5 : module R M,\\t_inst_6 : lie_ring_module L M,\\t_inst_7 : lie_module R L M,\\tN N' : lie_submodule R L M,\\th : ↑N = ↑N',\\tm : M\\t⊢ m ∈ N ↔ m ∈ N'\\n \", \"rw [\\u2190 mem_coe_submodule, h]\\n\", return_tensors='pt')\n", + "inputs.to(torch.device(\"cuda:0\"))\n", + "torch.mean(hf_model(**inputs).last_hidden_state,axis=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 75, + "id": "e3d8b8d3", + "metadata": {}, + "outputs": [ + { + "ename": "TypeError", + "evalue": "expected Tensor as element 0 in argument 0, but got BatchEncoding", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", + "Input \u001b[0;32mIn [75]\u001b[0m, in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcat\u001b[49m\u001b[43m(\u001b[49m\u001b[43m(\u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43minputs\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[0;31mTypeError\u001b[0m: expected Tensor as element 0 in argument 0, but got BatchEncoding" + ] + } + ], + "source": [ + "torch.cat((inputs,inputs))" + ] + }, + { + "cell_type": "code", + "execution_count": 76, + "id": "257a145d", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "GPTNeoModel(\n", + " (wte): Embedding(50259, 768)\n", + " (wpe): Embedding(2048, 768)\n", + " (drop): Dropout(p=0, inplace=False)\n", + " (h): ModuleList(\n", + " (0): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (1): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (2): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (3): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (4): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (5): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (6): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (7): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (8): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (9): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (10): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " (11): GPTNeoBlock(\n", + " (ln_1): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (attn): GPTNeoAttention(\n", + " (attention): GPTNeoSelfAttention(\n", + " (attn_dropout): Dropout(p=0, inplace=False)\n", + " (resid_dropout): Dropout(p=0, inplace=False)\n", + " (k_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (v_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (q_proj): Linear(in_features=768, out_features=768, bias=False)\n", + " (out_proj): Linear(in_features=768, out_features=768, bias=True)\n", + " )\n", + " )\n", + " (ln_2): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + " (mlp): GPTNeoMLP(\n", + " (c_fc): Linear(in_features=768, out_features=3072, bias=True)\n", + " (c_proj): Linear(in_features=3072, out_features=768, bias=True)\n", + " (act): NewGELUActivation()\n", + " (dropout): Dropout(p=0, inplace=False)\n", + " )\n", + " )\n", + " )\n", + " (ln_f): LayerNorm((768,), eps=1e-05, elementwise_affine=True)\n", + ")" + ] + }, + "execution_count": 76, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "hf_model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0d074ea0", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/models/train_model.py b/src/models/train_model.py deleted file mode 100644 index e69de29..0000000