{"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"provenance":[],"gpuType":"T4","authorship_tag":"ABX9TyMTReh0EoDSDru3JR0TKQID"},"kernelspec":{"name":"python3","display_name":"Python 3"},"language_info":{"name":"python"},"accelerator":"GPU"},"cells":[{"cell_type":"code","source":["from google.colab import drive\n","drive.mount('/content/drive')"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"6bCby4UHZJzE","executionInfo":{"status":"ok","timestamp":1715176156744,"user_tz":240,"elapsed":2647,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"22521c69-b709-497f-e60d-2d5b60ce957e"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n"]}]},{"cell_type":"code","source":["ls"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"X3qPPBZwHs_v","executionInfo":{"status":"ok","timestamp":1715176178163,"user_tz":240,"elapsed":9,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"23a75c2e-37ea-411d-f5fe-867359d84f0a"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["basic_prediction.ipynb \u001b[0m\u001b[01;34mdata\u001b[0m/ DSL_statement_generation.ipynb ra_tasks.md \u001b[01;34mscripts\u001b[0m/\n"]}]},{"cell_type":"code","source":["from transformers import (\n"," AutoTokenizer,\n"," AutoModelForSequenceClassification\n",")\n","\n","import numpy as np\n","import pandas as pd\n","import torch\n","from sklearn.feature_extraction.text import TfidfVectorizer\n","import torch.nn as nn\n","import torch.optim as optim\n","from torch.utils.data import DataLoader, TensorDataset\n","from sklearn.model_selection import train_test_split\n","from sklearn.preprocessing import StandardScaler\n","from transformers import BertTokenizer, BertModel\n","from torch.utils.data import DataLoader, Dataset\n","from sklearn.metrics import f1_score, accuracy_score\n"],"metadata":{"id":"UKzsApTNV1Qx"},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":["### Surrogate Q"],"metadata":{"id":"hBqyPKvjW9cv"}},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"D-JnQZOQY6Xh","executionInfo":{"status":"ok","timestamp":1715176154101,"user_tz":240,"elapsed":16149,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"0152e36e-afda-4f29-c31e-0f6625e3418b"},"outputs":[{"output_type":"stream","name":"stdout","text":["Requirement already satisfied: openai in /usr/local/lib/python3.10/dist-packages (1.26.0)\n","Requirement already satisfied: anyio<5,>=3.5.0 in /usr/local/lib/python3.10/dist-packages (from openai) (3.7.1)\n","Requirement already satisfied: distro<2,>=1.7.0 in /usr/lib/python3/dist-packages (from openai) (1.7.0)\n","Requirement already satisfied: httpx<1,>=0.23.0 in /usr/local/lib/python3.10/dist-packages (from openai) (0.27.0)\n","Requirement already satisfied: pydantic<3,>=1.9.0 in /usr/local/lib/python3.10/dist-packages (from openai) (2.7.1)\n","Requirement already satisfied: sniffio in /usr/local/lib/python3.10/dist-packages (from openai) (1.3.1)\n","Requirement already satisfied: tqdm>4 in /usr/local/lib/python3.10/dist-packages (from openai) (4.66.4)\n","Requirement already satisfied: typing-extensions<5,>=4.7 in /usr/local/lib/python3.10/dist-packages (from openai) (4.11.0)\n","Requirement already satisfied: idna>=2.8 in /usr/local/lib/python3.10/dist-packages (from anyio<5,>=3.5.0->openai) (3.7)\n","Requirement already satisfied: exceptiongroup in /usr/local/lib/python3.10/dist-packages (from anyio<5,>=3.5.0->openai) (1.2.1)\n","Requirement already satisfied: certifi in /usr/local/lib/python3.10/dist-packages (from httpx<1,>=0.23.0->openai) (2024.2.2)\n","Requirement already satisfied: httpcore==1.* in /usr/local/lib/python3.10/dist-packages (from httpx<1,>=0.23.0->openai) (1.0.5)\n","Requirement already satisfied: h11<0.15,>=0.13 in /usr/local/lib/python3.10/dist-packages (from httpcore==1.*->httpx<1,>=0.23.0->openai) (0.14.0)\n","Requirement already satisfied: annotated-types>=0.4.0 in /usr/local/lib/python3.10/dist-packages (from pydantic<3,>=1.9.0->openai) (0.6.0)\n","Requirement already satisfied: pydantic-core==2.18.2 in /usr/local/lib/python3.10/dist-packages (from pydantic<3,>=1.9.0->openai) (2.18.2)\n"]}],"source":["!pip install openai"]},{"cell_type":"code","source":["import os\n","import numpy as np\n","import pandas as pd\n","import time\n","from openai import OpenAI\n","from getpass import getpass\n","import base64\n","import requests\n","import random\n","\n","openai_api_key = 'YOURKEY'\n","os.environ[\"OPENAI_API_KEY\"] = openai_api_key\n","\n","client = OpenAI()\n","image_directory = './phone_screenshots'\n","\n","def encode_image(image_path):\n"," with open(image_path, \"rb\") as image_file:\n"," return base64.b64encode(image_file.read()).decode('utf-8')\n","\n","headers = {\n"," \"Content-Type\": \"application/json\",\n"," \"Authorization\": f\"Bearer {os.getenv('OPENAI_API_KEY')}\"\n","}"],"metadata":{"id":"h-HzyqH6ZHnp"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["data = pd.read_csv('data/Statement corpus.csv')"],"metadata":{"id":"5hO-ijFAZL8r"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["data"],"metadata":{"id":"xsrkBjSCZwyi","executionInfo":{"status":"ok","timestamp":1715176192114,"user_tz":240,"elapsed":22,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"75120a9e-068c-45a1-d14d-b0a789a6de5b","colab":{"base_uri":"https://localhost:8080/","height":545}},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":[" statement behavior everyday \\\n","0 1 plus 1 is 2 0 1 \n","1 5 is alot bigger than 1 0 0 \n","2 a balanced diet and regular exercise is needed... 1 1 \n","3 a ball is round 0 1 \n","4 a baton twirler doesn't want a broken finger 0 1 \n","... ... ... ... \n","4402 young people should not believe that they cann... 0 0 \n","4403 your credibility is the currency of the realm 1 0 \n","4404 your religion is not the only one true religion 1 1 \n","4405 zero plus any number equals that number 1 0 \n","4406 zest is the secret of all beauty there is no b... 0 0 \n","\n"," figure_of_speech judgment opinion reasoning \\\n","0 0 0 0 1 \n","1 0 0 0 0 \n","2 0 1 0 1 \n","3 0 0 0 0 \n","4 0 1 1 0 \n","... ... ... ... ... \n","4402 0 0 0 0 \n","4403 1 0 0 1 \n","4404 0 1 1 1 \n","4405 1 0 1 0 \n","4406 1 1 0 0 \n","\n"," category elicitation \n","0 Mathematics and logic category response \n","1 Mathematics and logic category response \n","2 Health and fitness category response \n","3 Natural and physical sciences Concept Net \n","4 Human activities Concept Net \n","... ... ... \n","4402 History and events news media \n","4403 People and self aphorism \n","4404 Religion and belief systems category response \n","4405 General reference category response \n","4406 General reference aphorism \n","\n","[4407 rows x 9 columns]"],"text/html":["\n","
\n","
\n","\n","\n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n","
statementbehavioreverydayfigure_of_speechjudgmentopinionreasoningcategoryelicitation
01 plus 1 is 2010001Mathematics and logiccategory response
15 is alot bigger than 1000000Mathematics and logiccategory response
2a balanced diet and regular exercise is needed...110101Health and fitnesscategory response
3a ball is round010000Natural and physical sciencesConcept Net
4a baton twirler doesn't want a broken finger010110Human activitiesConcept Net
..............................
4402young people should not believe that they cann...000000History and eventsnews media
4403your credibility is the currency of the realm101001People and selfaphorism
4404your religion is not the only one true religion110111Religion and belief systemscategory response
4405zero plus any number equals that number101010General referencecategory response
4406zest is the secret of all beauty there is no b...001100General referenceaphorism
\n","

4407 rows × 9 columns

\n","
\n","
\n","\n","
\n"," \n","\n"," \n","\n"," \n","
\n","\n","\n","
\n"," \n","\n","\n","\n"," \n","
\n","
\n","
\n"],"application/vnd.google.colaboratory.intrinsic+json":{"type":"dataframe","variable_name":"data","summary":"{\n \"name\": \"data\",\n \"rows\": 4407,\n \"fields\": [\n {\n \"column\": \"statement\",\n \"properties\": {\n \"dtype\": \"string\",\n \"num_unique_values\": 4407,\n \"samples\": [\n \"we are at a moment in our countrys history where we need to heal and unify\",\n \"he's voted against protecting our environment\",\n \"leaders should be able to empathize with their workers during a pandemic\"\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"behavior\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 1,\n 0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"everyday\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 0,\n 1\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"figure_of_speech\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 1,\n 0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"judgment\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 1,\n 0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"opinion\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 1,\n 0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"reasoning\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 0,\n 1\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"category\",\n \"properties\": {\n \"dtype\": \"category\",\n \"num_unique_values\": 13,\n \"samples\": [\n \"Culture and the arts\",\n \"History and events\"\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"elicitation\",\n \"properties\": {\n \"dtype\": \"category\",\n \"num_unique_values\": 7,\n \"samples\": [\n \"category response\",\n \"Concept Net\"\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n }\n ]\n}"}},"metadata":{},"execution_count":7}]},{"cell_type":"code","source":["description = '''\n","For each statement provided below, please classify it into binary categories for the following labels. For each label, return strictly only '1' for a match and only '0' otherwise, following the detailed descriptions provided:\n","\n","Behavior:\n","Social (1): Relates to beliefs, perceptions, preferences, and socially constructed rules governing human experience, either \"real\" or opinion-based and of human origins. Example: 'I exist and am the same person I was yesterday.'\n","Physical (0): Pertains to objective features of the world, governed by natural laws like physics or biology, and can be measured or logically derived. Example: 'The Earth is the third planet from the Sun.'\n","\n","Everyday:\n","Everyday (1): Situations commonly encountered or potentially encountered in ordinary life. Example: 'Touching a hot stove will burn you.'\n","Abstract (0): Regularities or conclusions not directly observable or derived solely from personal experience. Example: 'Capitalism is a better economic system than Communism.'\n","\n","Figure of Speech:\n","Figure of speech (1): Contains aphorisms, metaphors, or hyperboles. Example: 'Birds of a feather flock together.'\n","Literal language (0): Plain and direct language that means exactly what it says. Example: 'The sky is blue.'\n","\n","Judgment:\n","Normative (1): Involves a judgment, belief, value, or social norm. Example: 'Treat others how you want them to treat you.'\n","Positive (0): Refers to empirical regularities or scientific laws. Example: 'The sun rises in the east and sets in the west.'\n","\n","Opinion:\n","Opinion (1): Subjective statements that might be believed true but cannot be objectively proven. Example: 'FDR was the greatest US president of the 20th Century.'\n","Factual (0): Statements that can be independently verified as correct or incorrect. Example: 'Obama was the 24th president of the United States (this is incorrect).'\n","\n","Reasoning:\n","Knowledge (1): Observations about the world, true or false, subjective or objective. Example: 'Dogs are nicer than cats.'\n","Reasoning (0): Conclusions derived from combining knowledge and logic. Example: 'My dog is wagging its tail, therefore it is happy.'\n","\n","The output should be in JSON schema.\n","\n","Please classify the following statement:\n","'''\n","\n","# For example, your return should follow the JSON schema like this:\n","# {\n","# Behavior: 0,\n","# Everyday: 0,\n","# Figure of Speech: 0,\n","# Judgment: 0,\n","# Opinion: 0,\n","# Reasoning: 0\n","# }"],"metadata":{"id":"kfIeS0zhapG-"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["def generateQ(description, question):\n"," response = client.chat.completions.create(\n"," model=\"gpt-4-turbo-preview\",\n"," response_format={ \"type\": \"json_object\" },\n"," messages=[\n"," {\"role\": \"system\", \"content\": description},\n"," {\n"," \"role\": \"user\",\n"," \"content\": [\n"," {\"type\": \"text\", \"text\": question},\n"," ],\n"," }\n"," ],\n"," logprobs=True,\n"," top_logprobs=1,\n"," )\n"," return response"],"metadata":{"id":"LjXW_mcNXByH"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["import re\n","\n","def extract_class(line):\n"," return re.findall(r'\"(.*?)\"', line)\n","\n","def extract_digit(line):\n"," match = re.search(r'[01]', line)\n"," if match:\n"," return match.group()\n"," else:\n"," return None\n","\n","def parse_response(statement, msg):\n"," lines = msg.strip().split('\\n')\n"," result_dict = {}\n"," result_dict['statement'] = statement\n"," for line in lines:\n"," if ': ' in line:\n"," label, value = line.split(': ')\n"," value = extract_digit(value)[0]\n"," label = extract_class(label)[0]\n"," print(label, value)\n"," result_dict[label] = int(value)\n"," return result_dict"],"metadata":{"id":"majCEGgreKLG"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["outputs = []"],"metadata":{"id":"_nMJutZ-kSHC"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["for index, row in data.iterrows():\n"," if index < 4000:\n"," continue\n"," # if index == 4000:\n"," # break\n"," question = row['statement']\n"," response = generateQ(description, question)\n"," msg = response.choices[0].message.content\n"," print(question)\n"," response_data = parse_response(question, msg)\n"," outputs.append(response_data)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"LB__dyQ-dHtB","executionInfo":{"status":"ok","timestamp":1715037986685,"user_tz":240,"elapsed":1204069,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"3dd0b481-89f0-4813-a36d-26259e1c9eeb"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["we should enact policies which are fiscally responsible\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should ensure leadership and strength back to congress\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should ensure police have body cameras on them\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should fight for conservative solutions\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should fight for health care coverage\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should fight for the green new deal\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should fight the imperative to take things personally when speaking to others\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should fight to stop spread of covid19\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should fill out the questionnaire on gun reform\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should find a way to eliminate party lines so we can solve problems\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should find bipartisan solutions to our most pressing issues\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should find solutions to the big issues facing our country\n","Behavior 0\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should focus more on preventing gun violence not associated with mass shootings such as crime on city streets\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should focus on the economy and maintain fiscal integrity\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should focus on the needs of this community\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should follow the cdc guidelines and recommendations\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should gather together and rally behind this\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should get more reforms not more corrupt pac money\n","Behavior 0\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should give children the best environment to learn in schools\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should give the government back to the people\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should have a free and open society\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should help elect a conservative cop\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should help mj hegar defeat senator cornyn at the polls\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should increase gun control\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should join oct 24 day of action\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should keep pace with the conservatives\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should keep pace with the democrat machine\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should keep pace with the democrats\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should keep science based solutions above political grandstanding\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should let nicole to continue fighting for us and hold the left accountable\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should listen to conservamerica\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should listen to those who are experts in their field and stop politicizing health care issues\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should look out for each other\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should make sure all people are safe\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should manage our country's finances the way a household manages their budget\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should not allow for cheating and fraud in elections\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should not allow politicians to create a police state\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should not drive while drunk\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should not ignore the levels of vitamin k2 in our body\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should not militarize police or any other public service entity\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should not prioritize punishment over the countrys well being\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should not restrict others from going to church\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should not send workers back due to corona threats\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should not spend money on infrastructure that shrinks the economy\n","Behavior 0\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 1\n","we should not support antifa rioters\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should not support irresponsible tax cuts and spending cuts\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should not support voting against our liberties\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should not teach buzzwords in schools without giving their background\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should not use kids for political gain\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should only purchase fireworks from reliable outlets\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should open schools during covid 19 crisis as they are not big transmitters of the disease\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should oppose politicians who are pro abortion\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should oppose the weaponizing of a federal agency\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should protect the 1st amendment\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should protect the environment\n","Behavior 0\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should protect the environment and the planet including everything on it\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should put in safeguards to protect american national security\n","Behavior 0\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should put our faith in leaders who tackle crises effectively\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should raise the voice for the hard working educators teacher parents and administrator\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should remain opposed towards to voter suppression\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should respect free markets property rights and personal responsibility\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should respect our police officers\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should send our kids back to school\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should show support towards our environment\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should sign up for the mailing list\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should stay at home when sick\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support a farm bill which invests in farm programs crop insurance conservation and rural development\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support a national mask mandate\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support a person with compassion\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support abortion and woman's right to choose\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support affordable care act succeeds\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support an international trade agenda which holds partners to their existing commitments\n","Behavior 0\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support and protect women's sports\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support andy ruff for congress\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support any means necessary for voters to have their voices heard\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support budget reduction\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support clean environment clean energy\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support conservative ideals\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support conservative policies\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support conservative politicians\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support conservative solutions to issues\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support constitutional carry\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support covid measures to be followed\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support environmental regulation\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support equity and opportunity\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support expanding vote by mail\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support for ralph shicatano\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support freedom and liberty\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support freedom transparency and accountability in our elections\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support getting rid of jennifer wexton\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support gun safety activity\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support harley rouda's reelection efforts\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support her dreams and passion\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support ike who wants to help with our struggles\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support initiative 1776\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support jesse philippe\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support josh gottheimer for congress\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support keeping families off the street\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support leaders who work for the people not themselves\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support listening to the advice of public healthy experts\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support lowering taxes\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support maine values\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support medical fund\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support medicare for all and the green new deal\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support message about votebymail\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support meyer decision\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support moderate democrats that republicans can work with\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support nancy pelosi\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support not having restrictions on religious services\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support our economy by continuing to do what we can to combat the spread of covid\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support our schools while controlling spending\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support phil king who supports public safety and a strong economy\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support pro law enforcement policies and public safety\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support progressive values\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support protect our shared conservative\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support protecting the 2nd amendment defending life and securing the border\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support reforms to policing\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support reopening schools in the fall\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support safety measures that allow poll workers to feel safe and polls to remain open\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support scientists and doctors\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support small businesses and families\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support steve daines in his bid for the office of governor\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support the affordable care act\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support the conservative push to move things forward\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support the creation of jobs\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support the election of dr pritesh gandhi\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support the environment\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support the gun gun violence prevention\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support the leaders of the medical community and their decisions\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support the new senator\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support the president\n","Behavior 0\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support the president's budget for america's future\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support the republican party\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support the the limitation of movement of the general public\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support the us chamber of commerce\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support the vote by mail for all voters\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support the vote by mail outreach initiative\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support universal background checks\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support universal vote by mail\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support vote by mail\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support voter id laws for everyone\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support voting by mail\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support voting in person\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support wearing a mask\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support wearing mask\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support wearing masks in public places\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support what people want at church\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should support women leaders\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should supports any methods of voting so that the most americans get their voices heard\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should take precautions to protect our own health and minimize the spread of disease\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should take steps to help local businesses during this difficult time\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should teach non academic skills like repair work and cooking in schools as well\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should think about people before politics\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should trust ms galvin to put alaskans first\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should try and make a difference\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should vaccinate as many people as possible to protect vulnerable people in our society\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should vote an underdog who supports a more civil government\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should vote either by mail or in person\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should vote for decmocrats to protect voting rights\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should vote for firearm bill\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should vote for jessi ebben\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should vote for leaders who share in republican values\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should vote out sharice davids\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should wash our hands cover our cough to stop the spread of covid19\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 0\n","Reasoning 0\n","we should wear masks during the covid 19 crisis\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should wear masks to stop the spread of covid19\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should work together to help change leadership\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should work together to restore balance in the legislature\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should work together towards achieving our goals\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should work towards bringing this back\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we should work towards getting through this\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we shouldn't distribute stimulus checks to the extent that it overheats the economy\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 1\n","we shouldn't make voting by mail be a partisan issue\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we shouldn't put people in jail who don't belong in jail\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we shouldnt even think about sending our babies back into the classroom\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 1\n","we spend our time envying people whom we wouldn't wish to be\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we want people who will make our country grow\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we will bankrupt ourselves in the vain search for absolute security\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we will be forever known by the tracks we leave\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","we will provide a new kind of politician\n","Behavior 0\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","wearing a mask in public stops the spread of infection transmitted through respiratory droplets\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","weekend voting should be implemented across america\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","were going to protect asylum seekers\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","west michigan needs support for a lot such as their water\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","what goes up must come down\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 0\n","Opinion 0\n","Reasoning 0\n","what you just did is really hard tonight you can go to sleep knowing you are stronger today than you were yesterday\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","what you seek is seeking you\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 0\n","Opinion 1\n","Reasoning 0\n","whatever you are be a good one\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","whatever you cannot understand you cannot possess\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 0\n","Opinion 1\n","Reasoning 0\n","when a rule is obviously ridiculous in a particular situation then the rule should change\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when corporation tax rates go up corporations either must lay off workers or raise the costs of their products\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 1\n","when counting forward eight comes after seven\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","when everything is going right be prepared for something to go wrong and vice versa\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when john feels cold he often eats cold ice cream\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","when john makes new friends he trips them every time he sees them\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 0\n","Reasoning 1\n","when john tries to enrich mary's lives he will try to make her dreams come true\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when managing others we should put aside ego and accept responsibility\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when mary feels a little cold she should put on a bathing suit\n","Behavior 0\n","Everyday 1\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when one door closes there's a lock being installed somewhere else\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 0\n","Opinion 0\n","Reasoning 0\n","when one half of the world experiences day the other half experiences night\n","Behavior 0\n","Everyday 0\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 0\n","when people are in trouble they find a way to get out of it\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when people build a birdhouse they hang it outside\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 0\n","when people dig themselves they can fall into a hole\n","Behavior 0\n","Everyday 1\n","Figure of Speech 1\n","Judgment 0\n","Opinion 0\n","Reasoning 0\n","when people don't pay attention while driving they will drive into a lake\n","Behavior 0\n","Everyday 1\n","Figure of Speech 1\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","when people get a ticket they put it into pocket\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","when people get back home they think they will never leave it\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when people get hot they start to breath fire\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 0\n","Opinion 0\n","Reasoning 0\n","when people get together with friends they usually engage in fun and healthy activities\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when people pound sand they eat sand\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 0\n","Opinion 1\n","Reasoning 0\n","when people take someone else's allowance they usually honestly give it back\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when people want to write a story they often think they can write a story without any work\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when people wear pants they use belts if necessary\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 0\n","when politicians focus on partisan politics rahter than issues people care about politicians' actions seem meaningless to people\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when searching for something you always find it in the last place you look\n","Behavior 0\n","Everyday 1\n","Figure of Speech 1\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","when some people might take a breather others might not\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when someone becomes a police officer one goes back to school for a finance degree\n","Behavior 0\n","Everyday 0\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","when someone borrows a car they want to take it to the store\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 1\n","Reasoning 1\n","when someone eats bread they might not be sorry about it\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when someone hears a strange noise they will look for the source\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 0\n","when someone is wearing a swimsuit they cannot play basketball\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 0\n","Opinion 1\n","Reasoning 0\n","when someone leaves a fast food store with a bag the bag will be full of food\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","when someone makes sandwiches they don't need to ask anyone else about it\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when someone takes time off they spend time relaxing and then cathcing up on work needed at home\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when the sun rises in the morning many living things rise from their sleep with it\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","when the wolf is mentioned he is already near you\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 0\n","Opinion 0\n","Reasoning 0\n","when their lives or careers are at risk people protect their paychecks through insurance agencies\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when we can talk about our feelings they become less overwhelming less upsetting and less scary\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when we cut someone else's arm that person would not call for help\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 1\n","Reasoning 0\n","when we text someone else we will get annoyed if we don't hear back immediately\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when we turn eight years old we want to celebrate\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when we work together we can find solutions\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when you add two numbers the resulting number is greater than either of the two that were added in the equation\n","Behavior 0\n","Everyday 0\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","when you are rich it is hard to know who your real friends are\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when you come to a fork in the road take it\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 0\n","Reasoning 0\n","when you listen it's amazing what you can learn when you act on what you've learned it's amazing what you can change\n","Behavior 1\n","Everyday 1\n","Figure of Speech 1\n","Judgment 1\n","Opinion 0\n","Reasoning 0\n","when you say that you agree with a thing in principle you mean that you have not the slightest intention of carrying it out in practice\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when you take stuff from one writer it`s plagiarism but when you take it from many writers it`s research\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when you travel to the arctic you need to bring warm clothing as the climate is very cold\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 0\n","when you undertake to run a revolution the difficulty is not to make it go it is to hold it in check\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when you want to fool the world tell the truth\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","when you`re arguing with a fool make sure he isn`t doing the same thing\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","whenever man commits a crime heaven finds a witness\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","where children are there is the golden age\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","where large sums of money are concerned it is advisable to trust nobody\n","Behavior 0\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","where social distancing is difficult we should wear a mask\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","where the press is free and every man able to read all is safe\n","Behavior 0\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","where there is no imagination there is no horror\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","where there is no property there is no injustice\n","Behavior 0\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 1\n","where there`s marriage without love there will be love without marriage\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","while some young adults and teens may adapt to online friendship and entertainment some may exist in boredom and loneliness during enforced isolation\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","whitmer helps people in her state\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","who buys cheap pays twice\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","whoever envies another confesses his superiority\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","whoever in discussion adduces authority uses not intellect but memory\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","why would he vote against teacher pay raises\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","will you help team hinson today\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","will you stand with dr fauci and sign now\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","win without boasting lose without excuse\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 0\n","Reasoning 0\n","winter is cold\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","winter is not a season it`s an occupation\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 0\n","Opinion 1\n","Reasoning 0\n","wisdom is like a baobab tree it is hard for an individual to embrace it\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 0\n","Reasoning 0\n","with a dirty nappy and the train gets full he has a choice to put his nappy down anywhere including on the floor or hold his nappy\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","with all things and in all things we are all relatives\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","with the health risk and pandemic all voters should be able to vote in a way they feel safe\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","without craftsmanship inspiration is a mere reed shaken in the wind\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","women should have equal rights under the law\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","women's sports are sacred and need to be protected from political attacks\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","won't put politics over people\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","word is a shadow of a deed\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 0\n","Opinion 1\n","Reasoning 0\n","words are the most powerful drugs used by mankind\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","work hard but make sure you enjoy life too\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","working out and doing cardio can improve your health\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 0\n","Reasoning 1\n","working out can lead to lower weights and better health\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","working out is like saving for your health 401k\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 0\n","Opinion 1\n","Reasoning 0\n","writing is like prostitution first you do it for love and then for a few close friends and then for money\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","writing is nature's way of exposing how sloppy your thinking is\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","wrong given red cards in soccer should be overturned\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","yeast is not used for every sort of bread\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","yesterday is history tomorrow is a mystery today is a gift that's why we call it 'the present'\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you are likely to find a bookshelf in the library\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a capital in a country\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 0\n","you are likely to find a car in a parking lot\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a cat in a bed\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a computer in any school\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a corner cupboard in the kitchen\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a crab in sand\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a dog in a kennel\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a fish in water\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a human in homes\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a human in the school\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a jellyfish in an aquarium\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a jellyfish in most oceans\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a motherboard in a computer\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a mouthwash in bathroom\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a potato in kitchen\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a potato in restaurants\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a potato in your pantry\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a shirt in closet\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 0\n","you are likely to find a stapler in a desk\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find a star in the night sky\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 0\n","you are likely to find a towel rack in bathroom\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find bread in breadbox\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find children in a school\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are likely to find toilet paper in a bathroom\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you are required to live your life from the start to the end no one else can do it for you\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 0\n","Reasoning 0\n","you are still you even if you relocate to another location\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you are the master of the secrets you keep but the slave of those who go away\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you are what you eat\n","Behavior 0\n","Everyday 1\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you can ask me to look for the truth but you cannot ask me to find it\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 0\n","Opinion 1\n","Reasoning 0\n","you can be happy or you can be unhappy it's just according to the way you look at things\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you can be healthy no matter what level of fitness you are\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you can call the place where you were born home\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you can charge rechargeable batteries\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you can fool some of the people some of the time and all of the people some of the time but you can`t fool all of the people all of the time\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you can go to church to learn about god\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 1\n","Reasoning 0\n","you can only know what the weather is like in the place you are\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you can pretend to be serious you can`t pretend to be witty\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 0\n","Reasoning 0\n","you can strain your back lifting heavy objects\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you can't divide a dozen whole eggs evenly among a family of 5\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you can't know where you're gong until you understand where you're coming from\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you can't teach a kid to ride a bike at a seminar\n","Behavior 0\n","Everyday 1\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you cannot both be and not be something at the same time\n","Behavior 0\n","Everyday 0\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you cannot corrupt the people but you often can deceive them\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you do not know your friends from your enemies until the ice breaks\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you have a god to believe in so use him wisely he may or may not be real but he is still a guide to take you on a path of your own choice\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 1\n","you lose weight the more you exercise\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you need a passport to travel to other countries\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 0\n","Reasoning 0\n","you need electricity to power a computer\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you need to vote for a republican instead of a democrat this time\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you never put metal in a microwave\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you only get one crack at a big opportunity so make sure you recognize it and grab it with both hands\n","Behavior 0\n","Everyday 1\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you only get one liver make sure you treat it right\n","Behavior 1\n","Everyday 1\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you plug a graphics processing unit into a motherboard\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you should eat healthy\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you should never wear your best trousers when you go out to fight for freedom and liberty\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you should vote for me so i can help the area regain control of their money\n","Behavior 0\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you think more clearly after a good night's sleep\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 1\n","Reasoning 0\n","you were designed for accomplishment engineered for success and endowed with the seeds of greatness\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you will never get rich at the rate of a penny a thought\n","Behavior 0\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you work out and make your body feel better or stay in one place and feel worse\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would breathe because you want live\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would bring home some fish because fish is good to eat\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 1\n","you would bring home some fish because you want to eat it\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would bungee jump from a high bridge because you want be excited\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would buy a hamburger because you want to eat it\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would buy presents for others because you like them\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 1\n","you would buy products because you need them\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 1\n","Reasoning 0\n","you would buy something for a loved one because you want to express love\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would chop firewood because you want to build a fire\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would climb because you want reach the top\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 0\n","Reasoning 1\n","you would compete because you want to win\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would conquer your opponent because you want win\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would cool off because you are hot\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would cry because you are sad\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would cry because you were sad\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 0\n","Reasoning 1\n","you would cut your hair because you want it shorter\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 1\n","you would do the housework because you like a clean house\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 1\n","Reasoning 1\n","you would drink because you want to quench your thirst\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would drink because you were thirsty\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would drink water because you are thirsty\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would eat food because you are hungry\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would entertain someone because you want please them\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would examine a thing because it interests you\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 1\n","Reasoning 0\n","you would feed your family because i love them\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 1\n","you would fight inflation because you want to save some money\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would find information because you want to know something\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would give assistance because a person needs help\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 0\n","Reasoning 1\n","you would give assistance because you want help someone\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would go for a swim because you want exercise\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would go to a concert because you want to listen to music\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 1\n","Reasoning 0\n","you would go to the zoo because you want to see animals\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 1\n","Reasoning 1\n","you would have a rest because you were very tired\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would kiss someone because you want to express that you love them\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 0\n","Reasoning 1\n","you would learn because you want to have more knowledge\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would listen because you want learn\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 0\n","Reasoning 1\n","you would listen to the radio because you want to hear a traffic report\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 1\n","Reasoning 1\n","you would make bread because you want to eat the bread\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would paint a picture because you want to be artistic\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would pay cash because you want avoid debt\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 1\n","you would play because you want to have some fun\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would read because you want to learn\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would read the newspaper because you want to be informed\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 1\n","Reasoning 1\n","you would remember a phone number because you want to call someone\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would reproduce because you want have children\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would sell your new book because you want money\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would sing because you are happy\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would skateboard because you want to have fun\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would sleep because you want to rest\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would swim because you are hot\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would tickle because you want to make laugh\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 0\n","Reasoning 1\n","you would urinate because your bladder was full\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would wait on tables because you are a waiter\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would wait your turn because you are polite\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would walk because you want some exercise\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 1\n","Reasoning 1\n","you would watch a movie because you want entertainment\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you would watch television because your bored\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 1\n","Reasoning 1\n","you would work because you need money\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 1\n","you would work because you want money\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 0\n","Reasoning 0\n","you'll be healthier if you get at least a little bit of exercise every day\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","you're going to make mistakes the real mistake is not learning from them\n","Behavior 0\n","Everyday 1\n","Figure of Speech 1\n","Judgment 1\n","Opinion 0\n","Reasoning 0\n","you're never going to know everything there is to know in tech instead aim for the confidence to know that you can figure it out\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 1\n","young children should be allowed to play sports without masks\n","Behavior 1\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","young people should not believe that they cannot be affect by covid 19\n","Behavior 0\n","Everyday 1\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","your credibility is the currency of the realm\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","your religion is not the only one true religion\n","Behavior 1\n","Everyday 0\n","Figure of Speech 0\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n","zero plus any number equals that number\n","Behavior 0\n","Everyday 0\n","Figure of Speech 0\n","Judgment 0\n","Opinion 0\n","Reasoning 0\n","zest is the secret of all beauty there is no beauty that is attractive without zest\n","Behavior 1\n","Everyday 0\n","Figure of Speech 1\n","Judgment 1\n","Opinion 1\n","Reasoning 0\n"]}]},{"cell_type":"code","source":["Q = pd.DataFrame(outputs)\n","Q.to_csv(\"Q3.csv\", index=False)\n","Q"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":423},"id":"4jk7Tev-lb1x","executionInfo":{"status":"ok","timestamp":1715037986685,"user_tz":240,"elapsed":68,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"57c025bc-cc99-4bcb-926c-68ff0c5e0623"},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":[" statement Behavior Everyday \\\n","0 remaining hydrated is important since your bod... 0 1 \n","1 remember democracy never lasts long it soon wa... 0 0 \n","2 remember that life is neither pain nor pleasur... 1 0 \n","3 remember that there is nothing stable in human... 0 0 \n","4 remember this is your body your residence for ... 1 1 \n","... ... ... ... \n","1402 young people should not believe that they cann... 0 1 \n","1403 your credibility is the currency of the realm 1 0 \n","1404 your religion is not the only one true religion 1 0 \n","1405 zero plus any number equals that number 0 0 \n","1406 zest is the secret of all beauty there is no b... 1 0 \n","\n"," Figure of Speech Judgment Opinion Reasoning Judgement \n","0 0 1.0 0 1 NaN \n","1 1 1.0 1 0 NaN \n","2 0 1.0 1 0 NaN \n","3 1 1.0 1 0 NaN \n","4 1 1.0 1 0 NaN \n","... ... ... ... ... ... \n","1402 0 1.0 1 0 NaN \n","1403 1 1.0 1 0 NaN \n","1404 0 1.0 1 0 NaN \n","1405 0 0.0 0 0 NaN \n","1406 1 1.0 1 0 NaN \n","\n","[1407 rows x 8 columns]"],"text/html":["\n","
\n","
\n","\n","\n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n","
statementBehaviorEverydayFigure of SpeechJudgmentOpinionReasoningJudgement
0remaining hydrated is important since your bod...0101.001NaN
1remember democracy never lasts long it soon wa...0011.010NaN
2remember that life is neither pain nor pleasur...1001.010NaN
3remember that there is nothing stable in human...0011.010NaN
4remember this is your body your residence for ...1111.010NaN
...........................
1402young people should not believe that they cann...0101.010NaN
1403your credibility is the currency of the realm1011.010NaN
1404your religion is not the only one true religion1001.010NaN
1405zero plus any number equals that number0000.000NaN
1406zest is the secret of all beauty there is no b...1011.010NaN
\n","

1407 rows × 8 columns

\n","
\n","
\n","\n","
\n"," \n","\n"," \n","\n"," \n","
\n","\n","\n","
\n"," \n","\n","\n","\n"," \n","
\n","
\n","
\n"],"application/vnd.google.colaboratory.intrinsic+json":{"type":"dataframe","variable_name":"Q","summary":"{\n \"name\": \"Q\",\n \"rows\": 1407,\n \"fields\": [\n {\n \"column\": \"statement\",\n \"properties\": {\n \"dtype\": \"string\",\n \"num_unique_values\": 1407,\n \"samples\": [\n \"the effect of going to a play is to be entertained\",\n \"we need to back republican values across the country\",\n \"we should vote for leaders who share in republican values\"\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Behavior\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 1,\n 0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Everyday\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 0,\n 1\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Figure of Speech\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 1,\n 0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Judgment\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0.4706417667590856,\n \"min\": 0.0,\n \"max\": 1.0,\n \"num_unique_values\": 2,\n \"samples\": [\n 0.0,\n 1.0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Opinion\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 1,\n 0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Reasoning\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 0,\n 1\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Judgement\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": null,\n \"min\": 1.0,\n \"max\": 1.0,\n \"num_unique_values\": 1,\n \"samples\": [\n 1.0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n }\n ]\n}"}},"metadata":{},"execution_count":23}]},{"cell_type":"code","source":["Q.info()"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"FrsG8xdBiLXi","executionInfo":{"status":"ok","timestamp":1715037986686,"user_tz":240,"elapsed":59,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"220ee2f8-bc8f-4f4c-c3bf-92c860a0d42e"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["\n","RangeIndex: 1407 entries, 0 to 1406\n","Data columns (total 8 columns):\n"," # Column Non-Null Count Dtype \n","--- ------ -------------- ----- \n"," 0 statement 1407 non-null object \n"," 1 Behavior 1407 non-null int64 \n"," 2 Everyday 1407 non-null int64 \n"," 3 Figure of Speech 1407 non-null int64 \n"," 4 Judgment 1406 non-null float64\n"," 5 Opinion 1407 non-null int64 \n"," 6 Reasoning 1407 non-null int64 \n"," 7 Judgement 1 non-null float64\n","dtypes: float64(2), int64(5), object(1)\n","memory usage: 88.1+ KB\n"]}]},{"cell_type":"code","source":["ls"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"_HvC70_sFcqr","executionInfo":{"status":"ok","timestamp":1714985226942,"user_tz":240,"elapsed":396,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"409ebdbb-2f31-4a3a-81a7-3ed141e67696"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["basic_prediction.ipynb \u001b[0m\u001b[01;34mdata\u001b[0m/ DSL_statement_generation.ipynb ra_tasks.md \u001b[01;34mscripts\u001b[0m/\n"]}]},{"cell_type":"code","source":["Q"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"n3onLAttfpn0","executionInfo":{"status":"ok","timestamp":1714975999016,"user_tz":240,"elapsed":162,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"4a10bf8c-3440-4b42-f33b-e022a7949e7a"},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":["[{'statement': '1 plus 1 is 2',\n"," 'Behavior': 0,\n"," 'Everyday': 0,\n"," 'Figure of Speech': 0,\n"," 'Judgment': 0,\n"," 'Opinion': 0,\n"," 'Reasoning': 0}]"]},"metadata":{},"execution_count":43}]},{"cell_type":"markdown","source":["### Preprocessing Q"],"metadata":{"id":"CBS-4VcM-V8r"}},{"cell_type":"code","source":["Q1 = pd.read_csv(\"Q.csv\")\n","Q2 = pd.read_csv(\"Q_second.csv\")\n","Q3 = pd.read_csv(\"Q3.csv\")"],"metadata":{"id":"jnH7xzuA-Ycg"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["def fixQ(df, filePath=None):\n"," if 'Judgement' in df.columns:\n"," df['Judgment'] = df['Judgement'].where(df['Judgement'].notna(), df['Judgment'])\n"," df.drop('Judgement', axis=1, inplace=True)\n"," for col in df.select_dtypes(include='number').columns:\n"," df[col] = df[col].astype(int)\n"," if filePath:\n"," df.to_csv(filePath, index=False)\n"," df.head()\n"," return df\n","\n","Q1 = fixQ(Q1)\n","Q2 = fixQ(Q2)\n","Q3 = fixQ(Q3)"],"metadata":{"id":"tZMbeEAo-99t"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["mergeQ = pd.concat([Q1, Q2, Q3])\n","mergeQ = mergeQ.reset_index(drop=True)\n","mergeQ.to_csv(\"data/mergeQ.csv\")"],"metadata":{"id":"mp-7um8YArl2"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["mergeQ.info()"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"V12I6xWrCR0X","executionInfo":{"status":"ok","timestamp":1715168951102,"user_tz":240,"elapsed":4,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"69d6bdd7-e576-41ec-e0b9-39a60221f86a"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["\n","RangeIndex: 4407 entries, 0 to 4406\n","Data columns (total 7 columns):\n"," # Column Non-Null Count Dtype \n","--- ------ -------------- ----- \n"," 0 statement 4407 non-null object\n"," 1 Behavior 4407 non-null int64 \n"," 2 Everyday 4407 non-null int64 \n"," 3 Figure of Speech 4407 non-null int64 \n"," 4 Judgment 4407 non-null int64 \n"," 5 Opinion 4407 non-null int64 \n"," 6 Reasoning 4407 non-null int64 \n","dtypes: int64(6), object(1)\n","memory usage: 241.1+ KB\n"]}]},{"cell_type":"markdown","source":["### Step1: Train g(Q,X) supervised model"],"metadata":{"id":"r_2eh8fwd6hy"}},{"cell_type":"code","source":["mergeQ = pd.read_csv(\"data/mergeQ.csv\")\n","mergeQ = mergeQ.iloc[:,1:]"],"metadata":{"id":"ohmEQyiCbScb"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["token_feature = 1000\n","vectorizer = TfidfVectorizer(max_features=token_feature)\n","tfidf_features = vectorizer.fit_transform(data['statement'])\n","X_features = torch.tensor(tfidf_features.toarray(), dtype=torch.float32)\n","tensor_X = pd.DataFrame(X_features.numpy())\n","\n","X_combined = pd.concat([mergeQ, tensor_X], axis=1)\n","Y = torch.tensor(data[['behavior', 'everyday', 'figure_of_speech', 'judgment', 'opinion', 'reasoning']].values, dtype=torch.float32)"],"metadata":{"id":"mzbOr0WrZyE4"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["mergeQ.head()"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":206},"id":"vYLjF4hRFN_i","executionInfo":{"status":"ok","timestamp":1715176233779,"user_tz":240,"elapsed":7,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"0dd1f171-98f9-4889-e8dd-86e92b7084e5"},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":[" statement Behavior Everyday \\\n","0 1 plus 1 is 2 0 1 \n","1 5 is alot bigger than 1 0 0 \n","2 a balanced diet and regular exercise is needed... 0 1 \n","3 a ball is round 0 1 \n","4 a baton twirler doesn't want a broken finger 1 1 \n","\n"," Figure of Speech Judgment Opinion Reasoning \n","0 0 0 0 1 \n","1 1 0 0 0 \n","2 0 1 0 0 \n","3 0 0 0 1 \n","4 0 0 1 1 "],"text/html":["\n","
\n","
\n","\n","\n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n","
statementBehaviorEverydayFigure of SpeechJudgmentOpinionReasoning
01 plus 1 is 2010001
15 is alot bigger than 1001000
2a balanced diet and regular exercise is needed...010100
3a ball is round010001
4a baton twirler doesn't want a broken finger110011
\n","
\n","
\n","\n","
\n"," \n","\n"," \n","\n"," \n","
\n","\n","\n","
\n"," \n","\n","\n","\n"," \n","
\n","
\n","
\n"],"application/vnd.google.colaboratory.intrinsic+json":{"type":"dataframe","variable_name":"mergeQ","summary":"{\n \"name\": \"mergeQ\",\n \"rows\": 4407,\n \"fields\": [\n {\n \"column\": \"statement\",\n \"properties\": {\n \"dtype\": \"string\",\n \"num_unique_values\": 4407,\n \"samples\": [\n \"we are at a moment in our countrys history where we need to heal and unify\",\n \"he's voted against protecting our environment\",\n \"leaders should be able to empathize with their workers during a pandemic\"\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Behavior\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 1,\n 0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Everyday\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 0,\n 1\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Figure of Speech\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 1,\n 0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Judgment\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 1,\n 0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Opinion\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 1,\n 0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Reasoning\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 0,\n 1\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n }\n ]\n}"}},"metadata":{},"execution_count":9}]},{"cell_type":"code","source":["X_train, X_test, y_train, y_test = train_test_split(mergeQ, Y, test_size=0.2, random_state=42)\n","\n","tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')\n","def tokenize_data(text):\n"," return tokenizer(text, padding='max_length', max_length=512, truncation=True, return_tensors=\"pt\")\n","\n","X_train_tokens = [tokenize_data(text) for text in X_train['statement']]\n","X_test_tokens = [tokenize_data(text) for text in X_test['statement']]"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"0aFzEm5mK13M","executionInfo":{"status":"ok","timestamp":1715176258556,"user_tz":240,"elapsed":9135,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"f47571a8-fc90-4cea-f7c0-03b8380494f9"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_token.py:88: UserWarning: \n","The secret `HF_TOKEN` does not exist in your Colab secrets.\n","To authenticate with the Hugging Face Hub, create a token in your settings tab (https://huggingface.co/settings/tokens), set it as secret in your Google Colab and restart your session.\n","You will be able to reuse this secret in all of your notebooks.\n","Please note that authentication is recommended but still optional to access public models or datasets.\n"," warnings.warn(\n"]}]},{"cell_type":"code","source":["class TextNN(nn.Module):\n"," def __init__(self):\n"," super().__init__()\n"," self.bert = BertModel.from_pretrained('bert-base-uncased')\n"," self.classifier = nn.Linear(self.bert.config.hidden_size + 6, 6)\n","\n"," def forward(self, input_ids, attention_mask, features):\n"," outputs = self.bert(input_ids=input_ids, attention_mask=attention_mask)\n"," pooled_output = outputs.pooler_output\n"," combined_features = torch.cat((pooled_output, features), dim=1)\n"," # print(\"Shape of combined features:\", combined_features.shape)\n"," logits = self.classifier(combined_features)\n"," return logits"],"metadata":{"id":"0y8eQsVLWbnf"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["# class TextAndTabularDataset(Dataset):\n","# def __init__(self, texts, features, labels, tokenizer, max_length=512):\n","# self.texts = texts\n","# self.features = features\n","# self.labels = labels\n","# self.tokenizer = tokenizer\n","# self.max_length = max_length\n","\n","# def __len__(self):\n","# return len(self.texts)\n","\n","# def __getitem__(self, idx):\n","# text_data = self.tokenizer(self.texts[idx],\n","# padding='max_length',\n","# max_length=self.max_length,\n","# truncation=True,\n","# return_tensors=\"pt\")\n","# input_ids = text_data['input_ids'].squeeze(0) # Removing batch dimension\n","# attention_mask = text_data['attention_mask'].squeeze(0)\n","\n","# features = torch.tensor(self.features[idx], dtype=torch.float32)\n","# labels = torch.tensor(self.labels[idx], dtype=torch.float32)\n","\n","# return input_ids, attention_mask, features, labels"],"metadata":{"id":"1zIoMrcJTOik"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["X_train"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":423},"id":"Ed6WdiphTqlE","executionInfo":{"status":"ok","timestamp":1715175927543,"user_tz":240,"elapsed":413,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"05af1d0c-a150-4ca8-e24a-73304d4e5b1a"},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":[" statement Behavior Everyday \\\n","4336 you think more clearly after a good night's sleep 0 1 \n","3505 the last thing you do when you take a bath is ... 0 1 \n","3134 something that might happen while getting a go... 0 1 \n","1448 if john pulls mary away mary will assume she i... 1 1 \n","1749 if max accepts the job then sam feels proud 1 1 \n","... ... ... ... \n","3444 the higher you go the cooler it becomes 0 0 \n","466 during economic crisis local businesses should... 1 0 \n","3092 something that might happen as a consequence o... 0 1 \n","3772 waiting for luck is similar to waiting for death 0 0 \n","860 if alex calls sam's mother they would have a c... 0 1 \n","\n"," Figure of Speech Judgment Opinion Reasoning \n","4336 0 0 1 0 \n","3505 0 0 0 1 \n","3134 0 0 0 1 \n","1448 0 1 1 0 \n","1749 0 1 1 0 \n","... ... ... ... ... \n","3444 0 0 0 1 \n","466 0 1 1 0 \n","3092 0 0 0 1 \n","3772 1 1 1 0 \n","860 0 0 0 1 \n","\n","[3525 rows x 7 columns]"],"text/html":["\n","
\n","
\n","\n","\n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n","
statementBehaviorEverydayFigure of SpeechJudgmentOpinionReasoning
4336you think more clearly after a good night's sleep010010
3505the last thing you do when you take a bath is ...010001
3134something that might happen while getting a go...010001
1448if john pulls mary away mary will assume she i...110110
1749if max accepts the job then sam feels proud110110
........................
3444the higher you go the cooler it becomes000001
466during economic crisis local businesses should...100110
3092something that might happen as a consequence o...010001
3772waiting for luck is similar to waiting for death001110
860if alex calls sam's mother they would have a c...010001
\n","

3525 rows × 7 columns

\n","
\n","
\n","\n","
\n"," \n","\n"," \n","\n"," \n","
\n","\n","\n","
\n"," \n","\n","\n","\n"," \n","
\n","
\n","
\n"],"application/vnd.google.colaboratory.intrinsic+json":{"type":"dataframe","variable_name":"X_train","summary":"{\n \"name\": \"X_train\",\n \"rows\": 3525,\n \"fields\": [\n {\n \"column\": \"statement\",\n \"properties\": {\n \"dtype\": \"string\",\n \"num_unique_values\": 3525,\n \"samples\": [\n \"if alex pushes sam aside alex walks away without apologizing\",\n \"if alex bothers sam so much alex will feel ashamed\",\n \"if max accepts sam invitation then max intends to the appreciation\"\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Behavior\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 1,\n 0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Everyday\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 0,\n 1\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Figure of Speech\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 1,\n 0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Judgment\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 1,\n 0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Opinion\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 0,\n 1\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"Reasoning\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 0,\n \"min\": 0,\n \"max\": 1,\n \"num_unique_values\": 2,\n \"samples\": [\n 1,\n 0\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n }\n ]\n}"}},"metadata":{},"execution_count":33}]},{"cell_type":"code","source":["class TokenDataset(Dataset):\n"," def __init__(self, tokens, features, labels):\n"," self.tokens = tokens\n"," self.features = features\n"," self.labels = labels\n","\n"," def __len__(self):\n"," return len(self.labels)\n","\n"," def __getitem__(self, idx):\n"," input_ids = self.tokens[idx]['input_ids'].squeeze(0)\n"," attention_mask = self.tokens[idx]['attention_mask'].squeeze(0)\n"," features = self.features[idx]\n"," label = self.labels[idx]\n"," # print(input_ids.shape, attention_mask.shape, features.shape)\n"," return input_ids, attention_mask, features, label"],"metadata":{"id":"BiwrWAVrSp6B"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["train_features = torch.tensor(X_train.drop('statement', axis=1).values, dtype=torch.float32)\n","test_features = torch.tensor(X_test.drop('statement', axis=1).values, dtype=torch.float32)\n","train_labels = y_train\n","test_labels = y_test\n","\n","train_dataset = TokenDataset(X_train_tokens, train_features, train_labels)\n","test_dataset = TokenDataset(X_test_tokens, test_features, test_labels)\n","\n","train_loader = DataLoader(train_dataset, batch_size=16, shuffle=True)\n","test_loader = DataLoader(test_dataset, batch_size=16, shuffle=False)"],"metadata":{"id":"utmKZF3tU5vY"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["##\n","for input_ids, attention_mask, features, labels in train_loader:\n"," print()"],"metadata":{"id":"6vn_xjnpcjEF"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n","print(f'current device: {device}')\n","model = TextNN().to(device)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"TNQ-kTZQVHJZ","executionInfo":{"status":"ok","timestamp":1715176299246,"user_tz":240,"elapsed":1532,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"eec0dd89-19e0-4666-d570-e969f06c3489"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["current device: cuda\n"]}]},{"cell_type":"code","source":["model.load_state_dict(torch.load('model.pth'))"],"metadata":{"id":"mT2caNL8fcZi"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["def evaluate(model, data_loader, criterion, device):\n"," model.eval()\n"," total_loss = 0\n"," all_labels = []\n"," all_predictions = []\n","\n"," with torch.no_grad():\n"," for input_ids, attention_mask, features, labels in data_loader:\n"," input_ids = input_ids.to(device)\n"," attention_mask = attention_mask.to(device)\n"," features = features.to(device)\n"," labels = labels.to(device)\n","\n"," outputs = model(input_ids, attention_mask, features)\n"," loss = criterion(outputs, labels)\n"," total_loss += loss.item()\n"," predictions = torch.sigmoid(outputs).round()\n","\n"," all_labels.extend(labels.cpu().numpy())\n"," all_predictions.extend(predictions.detach().cpu().numpy())\n","\n"," average_loss = total_loss / len(data_loader)\n"," accuracy = accuracy_score(all_labels, all_predictions)\n"," macrof1 = f1_score(all_labels, all_predictions, average='macro')\n"," microf1 = f1_score(all_labels, all_predictions, average='micro')\n"," return average_loss, accuracy, macrof1, microf1"],"metadata":{"id":"55Ye3xhbfi1m"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["optimizer = torch.optim.Adam(model.parameters(), lr=1e-4)\n","criterion = nn.BCEWithLogitsLoss()\n","epoch = 5\n","\n","for epoch in range(epoch):\n"," model.train()\n"," total_loss = 0\n"," all_labels = []\n"," all_predictions = []\n"," flag = 0\n"," for input_ids, attention_mask, features, labels in train_loader:\n"," # if flag==5:\n"," # break\n"," optimizer.zero_grad()\n"," logits = model(input_ids.to(device),\n"," attention_mask.to(device),\n"," features.to(device))\n"," loss = criterion(logits, labels.to(device))\n"," loss.backward()\n"," optimizer.step()\n","\n"," total_loss += loss.item()\n"," predictions = torch.sigmoid(logits).round()\n","\n"," all_labels.extend(labels.cpu().numpy())\n"," all_predictions.extend(predictions.detach().cpu().numpy())\n"," flag += 1\n","\n"," average_loss = total_loss / len(train_loader)\n"," accuracy = accuracy_score(all_labels, all_predictions)\n"," macrof1 = f1_score(all_labels, all_predictions, average='macro')\n"," microf1 = f1_score(all_labels, all_predictions, average='micro')\n"," print(f'epoch {epoch} === Training loss: {average_loss:.4f}, Accuracy: {accuracy:.4f}, Macro F1 : {macrof1:.4f}, Micro F1 : {microf1:.4f} ===')\n","\n"," test_loss, test_accuracy, macrof1, microf1 = evaluate(model, test_loader, criterion, device)\n"," print(f'=== Test loss: {test_loss:.4f}, Accuracy: {test_accuracy:.4f}, Macro F1 : {macrof1:.4f}, Micro F1 : {microf1:.4f} ===')"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"VBOajsnIWdM_","executionInfo":{"status":"ok","timestamp":1715180320463,"user_tz":240,"elapsed":1687870,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"d098681d-167b-48a0-9e9e-d57787f426dd"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([5, 774])\n","epoch 0 === Training loss: 0.2292, Accuracy: 0.6003, Macro F1 : 0.9140, Micro F1 : 0.9209 ===\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([2, 774])\n","=== Test loss: 0.8466, Accuracy: 0.1871, Macro F1 : 0.7021, Micro F1 : 0.7354 ===\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([5, 774])\n","epoch 1 === Training loss: 0.1595, Accuracy: 0.7214, Macro F1 : 0.9455, Micro F1 : 0.9489 ===\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([2, 774])\n","=== Test loss: 0.9988, Accuracy: 0.1746, Macro F1 : 0.6726, Micro F1 : 0.7225 ===\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([5, 774])\n","epoch 2 === Training loss: 0.1193, Accuracy: 0.7926, Macro F1 : 0.9587, Micro F1 : 0.9630 ===\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([2, 774])\n","=== Test loss: 1.1038, Accuracy: 0.1565, Macro F1 : 0.6858, Micro F1 : 0.7188 ===\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([5, 774])\n","epoch 3 === Training loss: 0.0921, Accuracy: 0.8443, Macro F1 : 0.9705, Micro F1 : 0.9730 ===\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([2, 774])\n","=== Test loss: 1.1999, Accuracy: 0.1814, Macro F1 : 0.6976, Micro F1 : 0.7400 ===\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([5, 774])\n","epoch 4 === Training loss: 0.0745, Accuracy: 0.8701, Macro F1 : 0.9744, Micro F1 : 0.9780 ===\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([2, 774])\n","=== Test loss: 1.2083, Accuracy: 0.1746, Macro F1 : 0.6976, Micro F1 : 0.7348 ===\n"]}]},{"cell_type":"code","source":["torch.save(model.state_dict(), 'model0+10.pth')"],"metadata":{"id":"tJT4v0-TVuGV"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["test_loss, test_accuracy, macrof1, microf1 = evaluate(model, test_loader, criterion, device)\n","print(f'=== Test loss: {test_loss:.4f}, Accuracy: {test_accuracy:.4f}, Macro F1 : {macrof1:.4f}, Micro F1 : {microf1:.4f} ===')"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"1KnvDYvAfu6g","executionInfo":{"status":"ok","timestamp":1715180821206,"user_tz":240,"elapsed":26804,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"aa8851e9-8c93-4196-969b-f7a7abdb9002"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([2, 774])\n","=== Test loss: 1.2083, Accuracy: 0.1746, Macro F1 : 0.6976, Micro F1 : 0.7348 ===\n"]}]},{"cell_type":"code","source":["labels_array = y_test.cpu().numpy()\n","predictions_gpt = X_test.iloc[:, -6:].values\n","\n","accuracy_gpt = accuracy_score(labels_array, predictions_gpt)\n","macrof1_gpt = f1_score(labels_array, predictions_gpt, average='macro')\n","microf1_gpt = f1_score(labels_array, predictions_gpt, average='micro')\n","print(f'Surrogate Estimation: === Accuracy: {accuracy_gpt:.4f}, Macro F1 : {macrof1_gpt:.4f}, Micro F1 : {microf1_gpt:.4f} ===')"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"yubJJJbTlO5U","executionInfo":{"status":"ok","timestamp":1715178590332,"user_tz":240,"elapsed":357,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"4161a0d5-dabe-41ad-a4e9-3f762d2f6a26"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["GPT Evaluation: === Accuracy: 0.0998, Macro F1 : 0.6191, Micro F1 : 0.6436 ===\n"]}]},{"cell_type":"markdown","source":["#### Save"],"metadata":{"id":"EkEJQL497vDu"}},{"cell_type":"code","source":["l"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"bJ1TETBj767F","executionInfo":{"status":"ok","timestamp":1715184059646,"user_tz":240,"elapsed":555,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"9bcacc0a-03a6-4fec-d0cd-994aea09af7a"},"execution_count":56,"outputs":[{"output_type":"stream","name":"stdout","text":["basic_prediction.ipynb \u001b[0m\u001b[01;34mdata\u001b[0m/ DSL.ipynb \u001b[01;34mmodels\u001b[0m/ ra_tasks.md \u001b[01;34mscripts\u001b[0m/\n"]}]},{"cell_type":"code","source":["from transformers import BertModel, BertConfig, PreTrainedModel\n","import torch\n","import torch.nn as nn\n","\n","class TransTextNN(PreTrainedModel):\n"," def __init__(self):\n"," super().__init__(BertConfig.from_pretrained(\"bert-base-uncased\"))\n"," self.bert = BertModel.from_pretrained(\"bert-base-uncased\")\n"," self.classifier = nn.Linear(self.config.hidden_size + 6, 6)\n","\n"," def forward(self, input_ids, attention_mask, features):\n"," outputs = self.bert(input_ids=input_ids, attention_mask=attention_mask)\n"," pooled_output = outputs.pooler_output\n"," combined_features = torch.cat((pooled_output, features), dim=1)\n"," logits = self.classifier(combined_features)\n"," return logits\n","\n","state_dict = torch.load(\"models/model0+5.pth\")\n","model = TransTextNN.from_pretrained(\"models\")\n","model.load_state_dict(state_dict)\n","model.save_pretrained(\"huggingface_model\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":349},"id":"obTzOunZ7xRo","executionInfo":{"status":"error","timestamp":1715184212525,"user_tz":240,"elapsed":2590,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"03f0934a-0941-43f3-d45e-89e50bb20f1f"},"execution_count":59,"outputs":[{"output_type":"error","ename":"AttributeError","evalue":"'NoneType' object has no attribute 'from_pretrained'","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)","\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 17\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 18\u001b[0m \u001b[0mstate_dict\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"models/model0+5.pth\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 19\u001b[0;31m \u001b[0mmodel\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mTransTextNN\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfrom_pretrained\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"models\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 20\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload_state_dict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstate_dict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 21\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msave_pretrained\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"huggingface_model\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py\u001b[0m in \u001b[0;36mfrom_pretrained\u001b[0;34m(cls, pretrained_model_name_or_path, config, cache_dir, ignore_mismatched_sizes, force_download, local_files_only, token, revision, use_safetensors, *model_args, **kwargs)\u001b[0m\n\u001b[1;32m 3120\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mPretrainedConfig\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3121\u001b[0m \u001b[0mconfig_path\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mconfig\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mconfig\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0mpretrained_model_name_or_path\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 3122\u001b[0;31m config, model_kwargs = cls.config_class.from_pretrained(\n\u001b[0m\u001b[1;32m 3123\u001b[0m \u001b[0mconfig_path\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3124\u001b[0m \u001b[0mcache_dir\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcache_dir\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mAttributeError\u001b[0m: 'NoneType' object has no attribute 'from_pretrained'"]}]},{"cell_type":"code","source":[],"metadata":{"id":"LvFM-xmA74NZ"},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":["### Step2: Construct Bias-Corrected Pseudo-Outcomes"],"metadata":{"id":"OXPjlD-llqis"}},{"cell_type":"code","source":["def pseudo_construction(model, X_text, R=0):\n"," model.eval()\n"," total_samples = len(test_loader.dataset)\n"," labeled_samples = sum(1 for _, _, _, label in test_loader.dataset if label is not None)\n"," pi_Q_X = labeled_samples / total_samples\n","\n"," pseudo_outcomes = []\n"," actual_labels = []\n"," mask_collected = []\n","\n"," with torch.no_grad():\n"," for input_ids, attention_mask, features, label in test_loader:\n"," input_ids, attention_mask, features = input_ids.to(device), attention_mask.to(device), features.to(device)\n"," outputs = model(input_ids, attention_mask, features)\n"," predictions = torch.sigmoid(outputs).squeeze().cpu().numpy()\n"," R = 1 if label is not None else 0\n"," if R:\n"," actual_label = label.cpu().numpy()\n"," Y_tilde = predictions + (R / pi_Q_X) * (actual_label - predictions)\n"," pseudo_outcomes.extend(Y_tilde.tolist())\n"," actual_labels.extend(actual_label.tolist())\n"," features_collected.extend(attention_mask.cpu().numpy())\n","\n"," return pseudo_outcomes, mask_collected"],"metadata":{"id":"1UA4dbkDyiR5"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["pseudo_outcomes, features_collected = pseudo_construction(model, X_test, 0)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"t8rYhFzN1vwq","executionInfo":{"status":"ok","timestamp":1715182995055,"user_tz":240,"elapsed":26555,"user":{"displayName":"Binbin Chen","userId":"10839306029887912307"}},"outputId":"9356379c-3f6e-437d-8b2c-233d86230527"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([16, 774])\n","Shape of combined features: torch.Size([2, 774])\n"]}]},{"cell_type":"markdown","source":["### Step 3: Solve the Logistic Regression Moment Equation"],"metadata":{"id":"QpUJG5AuzYJ_"}},{"cell_type":"code","source":["import statsmodels.api as sm\n","\n","X = sm.add_constant(features_collected)\n","Y_tilde = np.array(pseudo_outcomes)\n","\n","model = sm.GLM(Y_tilde, X, family=sm.families.Binomial())\n","result = model.fit()\n","\n","print(result.summary())"],"metadata":{"id":"hqgJvKHc1Lnz"},"execution_count":null,"outputs":[]}]}