EricR401S commited on
Commit
baf9dad
Β·
1 Parent(s): e9b22a3
Files changed (1) hide show
  1. analysis.ipynb +786 -7
analysis.ipynb CHANGED
@@ -7,24 +7,21 @@
7
  },
8
  {
9
  "cell_type": "code",
10
- "execution_count": 2,
11
  "metadata": {},
12
  "outputs": [
13
  {
14
  "name": "stderr",
15
  "output_type": "stream",
16
  "text": [
17
- "Downloading data: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 11.3M/11.3M [00:02<00:00, 5.11MB/s]\n",
18
- "Generating train split: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 5123/5123 [00:00<00:00, 8076.72 examples/s]\n",
19
- "Generating validation split: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 1281/1281 [00:00<00:00, 7711.73 examples/s]\n",
20
- "Generating test split: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 712/712 [00:00<00:00, 6968.01 examples/s]\n"
21
  ]
22
  }
23
  ],
24
  "source": [
25
  "import datasets\n",
26
  "\n",
27
- "test = datasets.load_dataset(\"steamcyclone/Pill_Ideologies-Post_Titles\", trust_remote_code=True, cache_dir=\"cache\")"
28
  ]
29
  },
30
  {
@@ -32,12 +29,794 @@
32
  "metadata": {},
33
  "source": []
34
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
  {
36
  "cell_type": "code",
37
  "execution_count": null,
38
  "metadata": {},
39
  "outputs": [],
40
- "source": []
 
 
 
41
  }
42
  ],
43
  "metadata": {
 
7
  },
8
  {
9
  "cell_type": "code",
10
+ "execution_count": 5,
11
  "metadata": {},
12
  "outputs": [
13
  {
14
  "name": "stderr",
15
  "output_type": "stream",
16
  "text": [
17
+ "Using the latest cached version of the module from C:\\Users\\ericr\\.cache\\huggingface\\modules\\datasets_modules\\datasets\\steamcyclone--Pill_Ideologies-Post_Titles\\793a6b87307104ca492b65c5a82ea97d785585b22683f670e677e61876e2166c (last modified on Tue Mar 19 04:37:51 2024) since it couldn't be found locally at steamcyclone/Pill_Ideologies-Post_Titles, or remotely on the Hugging Face Hub.\n"
 
 
 
18
  ]
19
  }
20
  ],
21
  "source": [
22
  "import datasets\n",
23
  "\n",
24
+ "test = datasets.load_dataset(\"steamcyclone/Pill_Ideologies-Post_Titles\", trust_remote_code=True)"
25
  ]
26
  },
27
  {
 
29
  "metadata": {},
30
  "source": []
31
  },
32
+ {
33
+ "cell_type": "code",
34
+ "execution_count": 13,
35
+ "metadata": {},
36
+ "outputs": [],
37
+ "source": [
38
+ "import pandas as pd"
39
+ ]
40
+ },
41
+ {
42
+ "cell_type": "code",
43
+ "execution_count": 16,
44
+ "metadata": {},
45
+ "outputs": [
46
+ {
47
+ "data": {
48
+ "text/plain": [
49
+ "DatasetDict({\n",
50
+ " train: Dataset({\n",
51
+ " features: ['subreddit', 'id', 'title', 'text', 'url', 'score', 'date', 'subreddit_subscribers', 'num_comments', 'ups', 'downs', 'upvote_ratio', 'is_video'],\n",
52
+ " num_rows: 5123\n",
53
+ " })\n",
54
+ " validation: Dataset({\n",
55
+ " features: ['subreddit', 'id', 'title', 'text', 'url', 'score', 'date', 'subreddit_subscribers', 'num_comments', 'ups', 'downs', 'upvote_ratio', 'is_video'],\n",
56
+ " num_rows: 1281\n",
57
+ " })\n",
58
+ " test: Dataset({\n",
59
+ " features: ['subreddit', 'id', 'title', 'text', 'url', 'score', 'date', 'subreddit_subscribers', 'num_comments', 'ups', 'downs', 'upvote_ratio', 'is_video'],\n",
60
+ " num_rows: 712\n",
61
+ " })\n",
62
+ "})"
63
+ ]
64
+ },
65
+ "execution_count": 16,
66
+ "metadata": {},
67
+ "output_type": "execute_result"
68
+ }
69
+ ],
70
+ "source": [
71
+ "test"
72
+ ]
73
+ },
74
+ {
75
+ "cell_type": "markdown",
76
+ "metadata": {},
77
+ "source": [
78
+ "### Concatenate the Datasets"
79
+ ]
80
+ },
81
+ {
82
+ "cell_type": "code",
83
+ "execution_count": 17,
84
+ "metadata": {},
85
+ "outputs": [],
86
+ "source": [
87
+ "train = pd.DataFrame(test[\"train\"])\n",
88
+ "validation = pd.DataFrame(test[\"validation\"])\n",
89
+ "test = pd.DataFrame(test[\"test\"])"
90
+ ]
91
+ },
92
+ {
93
+ "cell_type": "code",
94
+ "execution_count": 18,
95
+ "metadata": {},
96
+ "outputs": [],
97
+ "source": [
98
+ "# concatenate all the dataframes\n",
99
+ "\n",
100
+ "df = pd.concat([train, validation, test])"
101
+ ]
102
+ },
103
+ {
104
+ "cell_type": "code",
105
+ "execution_count": 20,
106
+ "metadata": {},
107
+ "outputs": [
108
+ {
109
+ "data": {
110
+ "text/plain": [
111
+ "True"
112
+ ]
113
+ },
114
+ "execution_count": 20,
115
+ "metadata": {},
116
+ "output_type": "execute_result"
117
+ }
118
+ ],
119
+ "source": [
120
+ "df.shape == (len(train) + len(validation) + len(test), len(train.columns))"
121
+ ]
122
+ },
123
+ {
124
+ "cell_type": "code",
125
+ "execution_count": 61,
126
+ "metadata": {},
127
+ "outputs": [],
128
+ "source": [
129
+ "# preparing feature engineering by targetting mentions of words\n",
130
+ "gendered_words_men = [\"boy\", \"boys\", \"man\", \"men\", \"guy\", \"guys\", \"he\", \"him\", \"his\"]\n",
131
+ "gendered_words_father = [\"father\", \"dad\", \"daddy\"]\n",
132
+ "gendered_words_brother = [\"brother\", \"brothers\"]\n",
133
+ "gendered_words_son = [\"son\", \"sons\"]\n",
134
+ "gendered_words_uncle = [\"uncle\", \"uncles\"]\n",
135
+ "gendered_words_nephew = [\"nephew\", \"nephews\"]\n",
136
+ "gendered_words_husband = [\"husband\", \"husbands\", \"spouse\", \"spouses\"]\n",
137
+ "gendered_words_boyfriend = [\"boyfriend\", \"boyfriends\", \"partner\", \"partners\"]\n",
138
+ "\n",
139
+ "gendered_words_women = [\n",
140
+ " \"women\",\n",
141
+ " \"woman\",\n",
142
+ " \"girl\",\n",
143
+ " \"girls\",\n",
144
+ " \"she\",\n",
145
+ " \"her\",\n",
146
+ " \"hers\",\n",
147
+ " \"lady\",\n",
148
+ " \"ladies\",\n",
149
+ "]\n",
150
+ "gendered_words_mother = [\n",
151
+ " \"mother\",\n",
152
+ " \"mothers\",\n",
153
+ " \"mom\",\n",
154
+ " \"moms\",\n",
155
+ " \"mama\",\n",
156
+ " \"mamas\",\n",
157
+ " \"mum\",\n",
158
+ " \"mommy\",\n",
159
+ " \"mommies\",\n",
160
+ "]\n",
161
+ "gendered_words_sister = [\"sister\", \"sisters\"]\n",
162
+ "gendered_words_daughter = [\"daughter\", \"daughters\"]\n",
163
+ "gendered_words_aunt = [\"aunt\", \"aunts\"]\n",
164
+ "gendered_words_niece = [\"niece\", \"nieces\"]\n",
165
+ "gendered_words_wife = [\"wife\", \"wives\", \"spouse\", \"spouses\"]\n",
166
+ "gendered_words_girlfriend = [\"girlfriend\", \"girlfriends\", \"partner\", \"partners\"]\n",
167
+ "\n",
168
+ "# physical relations\n",
169
+ "sex_words = [\"sex\", \"sexual\", \"sexual\", \"intercourse\", \"intimacy\", \"intimate\"]\n",
170
+ "female_organs_words = [\"breasts\", \"boobs\", \"vagina\", \"pussy\", \"vulva\", \"clitoris\"]\n",
171
+ "male_organ_words = [\"penis\", \"cock\", \"dick\", \"balls\", \"testicles\"]\n",
172
+ "pregnancy_words = [\n",
173
+ " \"pregnancy\",\n",
174
+ " \"pregnant\",\n",
175
+ " \"conceive\",\n",
176
+ " \"conception\",\n",
177
+ " \"conceiving\",\n",
178
+ " \"conceived\",\n",
179
+ " \"miscarriage\",\n",
180
+ " \"miscarry\",\n",
181
+ " \"miscarried\",\n",
182
+ " \"miscarrying\",\n",
183
+ " \"abortion\",\n",
184
+ " \"abort\",\n",
185
+ " \"aborted\",\n",
186
+ " \"aborting\",\n",
187
+ " \"abortions\",\n",
188
+ "]\n",
189
+ "menstrual_words = [\"period\", \"menstruation\", \"menstrual\", \"cramps\"]\n",
190
+ "love_words = [\n",
191
+ " \"love\",\n",
192
+ " \"loves\",\n",
193
+ " \"loved\",\n",
194
+ " \"loving\",\n",
195
+ " \"lover\",\n",
196
+ " \"lovers\",\n",
197
+ " \"lovable\",\n",
198
+ " \"loveable\",\n",
199
+ " \"lovingly\",\n",
200
+ " \"unloved\",\n",
201
+ " \"beloved\",\n",
202
+ " \"loveless\",\n",
203
+ " \"lovesick\",\n",
204
+ " \"self-love\",\n",
205
+ "]\n",
206
+ "romance_words = [\n",
207
+ " 'romance',\n",
208
+ " \"romantic\",\n",
209
+ " \"romantically\",\n",
210
+ " \"romanticism\",\n",
211
+ " \"romanticist\",\n",
212
+ " \"romanticize\",\n",
213
+ " \"romanticized\",\n",
214
+ " \"romanticizing\",\n",
215
+ " \"romanticization\",\n",
216
+ " \"romanticised\",\n",
217
+ " \"romanticising\",\n",
218
+ " \"romanticise\",\n",
219
+ "]\n",
220
+ "\n",
221
+ "self_words = [\n",
222
+ " \"i\",\n",
223
+ " \"me\",\n",
224
+ " \"my\",\n",
225
+ " \"mine\",\n",
226
+ " \"myself\",\n",
227
+ " \"self\",\n",
228
+ " \"mine\"]\n",
229
+ "\n",
230
+ "social_media_websites = [\n",
231
+ " \"facebook\",\n",
232
+ " \"instagram\",\n",
233
+ " \"twitter\",\n",
234
+ " \"linkedIn\",\n",
235
+ " \"snapchat\",\n",
236
+ " \"tikTok\",\n",
237
+ " \"pinterest\",\n",
238
+ " \"reddit\",\n",
239
+ " \"youTube\",\n",
240
+ " \"whatsapp\",\n",
241
+ " \"wechat\",\n",
242
+ " \"telegram\",\n",
243
+ " \"tumblr\",\n",
244
+ " \"discord\",\n",
245
+ " \"clubhouse\",\n",
246
+ " \"twitch\",\n",
247
+ " \"vine\",\n",
248
+ " \"Myspace\"\n",
249
+ "]\n",
250
+ "\n",
251
+ "otherness = ['he', 'him', 'her', 'she', 'they', 'them', 'it', 'its', 'their', 'theirs']\n",
252
+ "\n",
253
+ "togetherness = ['our', 'ours', 'us', 'we', 'ourselves']\n"
254
+ ]
255
+ },
256
+ {
257
+ "cell_type": "code",
258
+ "execution_count": 55,
259
+ "metadata": {},
260
+ "outputs": [],
261
+ "source": [
262
+ "df['title_processed'] = df['title'].str.lower()\n",
263
+ "df['title_processed'] = df['title_processed'].str.replace('[^\\w\\s]','')\n",
264
+ "df['title_split'] = df['title_processed'].str.split()\n",
265
+ "df['title_count'] = df['title_split'].apply(len)\n",
266
+ "\n",
267
+ "\n",
268
+ "df['text_processed'] = df['text'].str.lower()\n",
269
+ "df['text_processed'] = df['text_processed'].str.replace('[^\\w\\s]','')\n",
270
+ "df['text_split'] = df['text_processed'].str.split()\n",
271
+ "df['text_count'] = df['text_split'].apply(len)\n",
272
+ "\n",
273
+ "# remove stopwords\n",
274
+ "stop = stopwords.words('english')\n",
275
+ "df['title_split'] = df['title_split'].apply(lambda x: [item for item in x if item not in stop])\n",
276
+ "df['text_split'] = df['text_split'].apply(lambda x: [item for item in x if item not in stop])\n"
277
+ ]
278
+ },
279
+ {
280
+ "cell_type": "code",
281
+ "execution_count": 56,
282
+ "metadata": {},
283
+ "outputs": [
284
+ {
285
+ "data": {
286
+ "text/plain": [
287
+ "0 [casual, sex, perceived, men, women]\n",
288
+ "1 [wrong, \"settle\", can't, get, really, want?]\n",
289
+ "2 [anyone, else, annoyed, seeing, valentines, po...\n",
290
+ "3 [60, dod, 2019, -, official, kickoff]\n",
291
+ "4 [go, getting, relationship]\n",
292
+ "Name: title_split, dtype: object"
293
+ ]
294
+ },
295
+ "execution_count": 56,
296
+ "metadata": {},
297
+ "output_type": "execute_result"
298
+ }
299
+ ],
300
+ "source": [
301
+ "df['title_split'].head()"
302
+ ]
303
+ },
304
+ {
305
+ "cell_type": "code",
306
+ "execution_count": 57,
307
+ "metadata": {},
308
+ "outputs": [
309
+ {
310
+ "data": {
311
+ "text/html": [
312
+ "<div>\n",
313
+ "<style scoped>\n",
314
+ " .dataframe tbody tr th:only-of-type {\n",
315
+ " vertical-align: middle;\n",
316
+ " }\n",
317
+ "\n",
318
+ " .dataframe tbody tr th {\n",
319
+ " vertical-align: top;\n",
320
+ " }\n",
321
+ "\n",
322
+ " .dataframe thead th {\n",
323
+ " text-align: right;\n",
324
+ " }\n",
325
+ "</style>\n",
326
+ "<table border=\"1\" class=\"dataframe\">\n",
327
+ " <thead>\n",
328
+ " <tr style=\"text-align: right;\">\n",
329
+ " <th></th>\n",
330
+ " <th>subreddit</th>\n",
331
+ " <th>id</th>\n",
332
+ " <th>title</th>\n",
333
+ " <th>text</th>\n",
334
+ " <th>url</th>\n",
335
+ " <th>score</th>\n",
336
+ " <th>date</th>\n",
337
+ " <th>subreddit_subscribers</th>\n",
338
+ " <th>num_comments</th>\n",
339
+ " <th>ups</th>\n",
340
+ " <th>...</th>\n",
341
+ " <th>title_love_count</th>\n",
342
+ " <th>text_love_count</th>\n",
343
+ " <th>title_romance_count</th>\n",
344
+ " <th>text_romance_count</th>\n",
345
+ " <th>title_I_count</th>\n",
346
+ " <th>text_I_count</th>\n",
347
+ " <th>title_i_count</th>\n",
348
+ " <th>text_i_count</th>\n",
349
+ " <th>title_facebook_count</th>\n",
350
+ " <th>text_facebook_count</th>\n",
351
+ " </tr>\n",
352
+ " </thead>\n",
353
+ " <tbody>\n",
354
+ " <tr>\n",
355
+ " <th>0</th>\n",
356
+ " <td>PurplePillDebate</td>\n",
357
+ " <td>1bbj8lq</td>\n",
358
+ " <td>How casual sex is perceived for men and women</td>\n",
359
+ " <td>In regards to the famous double standard (how ...</td>\n",
360
+ " <td></td>\n",
361
+ " <td>0</td>\n",
362
+ " <td>1.710100e+09</td>\n",
363
+ " <td>127329</td>\n",
364
+ " <td>199</td>\n",
365
+ " <td>0</td>\n",
366
+ " <td>...</td>\n",
367
+ " <td>0</td>\n",
368
+ " <td>0</td>\n",
369
+ " <td>0</td>\n",
370
+ " <td>0</td>\n",
371
+ " <td>0</td>\n",
372
+ " <td>0</td>\n",
373
+ " <td>0</td>\n",
374
+ " <td>10</td>\n",
375
+ " <td>0</td>\n",
376
+ " <td>0</td>\n",
377
+ " </tr>\n",
378
+ " <tr>\n",
379
+ " <th>1</th>\n",
380
+ " <td>PurplePillDebate</td>\n",
381
+ " <td>1aigdg6</td>\n",
382
+ " <td>Is it wrong to \"settle\" because you can't get ...</td>\n",
383
+ " <td>I wanted to discuss this from the **male's per...</td>\n",
384
+ " <td></td>\n",
385
+ " <td>33</td>\n",
386
+ " <td>1.707024e+09</td>\n",
387
+ " <td>127329</td>\n",
388
+ " <td>360</td>\n",
389
+ " <td>33</td>\n",
390
+ " <td>...</td>\n",
391
+ " <td>0</td>\n",
392
+ " <td>0</td>\n",
393
+ " <td>0</td>\n",
394
+ " <td>0</td>\n",
395
+ " <td>0</td>\n",
396
+ " <td>0</td>\n",
397
+ " <td>0</td>\n",
398
+ " <td>2</td>\n",
399
+ " <td>0</td>\n",
400
+ " <td>0</td>\n",
401
+ " </tr>\n",
402
+ " <tr>\n",
403
+ " <th>2</th>\n",
404
+ " <td>ForeverAloneWomen</td>\n",
405
+ " <td>1aqwejo</td>\n",
406
+ " <td>Is anyone else annoyed by seeing all the valen...</td>\n",
407
+ " <td>I removed instagram and facebook from my phone...</td>\n",
408
+ " <td></td>\n",
409
+ " <td>54</td>\n",
410
+ " <td>1.707941e+09</td>\n",
411
+ " <td>22857</td>\n",
412
+ " <td>5</td>\n",
413
+ " <td>54</td>\n",
414
+ " <td>...</td>\n",
415
+ " <td>0</td>\n",
416
+ " <td>1</td>\n",
417
+ " <td>0</td>\n",
418
+ " <td>0</td>\n",
419
+ " <td>0</td>\n",
420
+ " <td>3</td>\n",
421
+ " <td>0</td>\n",
422
+ " <td>5</td>\n",
423
+ " <td>0</td>\n",
424
+ " <td>2</td>\n",
425
+ " </tr>\n",
426
+ " <tr>\n",
427
+ " <th>3</th>\n",
428
+ " <td>marriedredpill</td>\n",
429
+ " <td>b45byj</td>\n",
430
+ " <td>60 DoD 2019 - Official kickoff</td>\n",
431
+ " <td>Rejoice, for 60 DoD 2019 is finally here! I kn...</td>\n",
432
+ " <td></td>\n",
433
+ " <td>60</td>\n",
434
+ " <td>1.553263e+09</td>\n",
435
+ " <td>50459</td>\n",
436
+ " <td>50</td>\n",
437
+ " <td>60</td>\n",
438
+ " <td>...</td>\n",
439
+ " <td>0</td>\n",
440
+ " <td>0</td>\n",
441
+ " <td>0</td>\n",
442
+ " <td>0</td>\n",
443
+ " <td>0</td>\n",
444
+ " <td>0</td>\n",
445
+ " <td>0</td>\n",
446
+ " <td>6</td>\n",
447
+ " <td>0</td>\n",
448
+ " <td>0</td>\n",
449
+ " </tr>\n",
450
+ " <tr>\n",
451
+ " <th>4</th>\n",
452
+ " <td>ForeverAloneWomen</td>\n",
453
+ " <td>18uwb2f</td>\n",
454
+ " <td>How do I go about getting in a relationship</td>\n",
455
+ " <td>I 21 tried to date for years...and so far ive ...</td>\n",
456
+ " <td></td>\n",
457
+ " <td>24</td>\n",
458
+ " <td>1.703988e+09</td>\n",
459
+ " <td>22857</td>\n",
460
+ " <td>31</td>\n",
461
+ " <td>24</td>\n",
462
+ " <td>...</td>\n",
463
+ " <td>0</td>\n",
464
+ " <td>0</td>\n",
465
+ " <td>0</td>\n",
466
+ " <td>0</td>\n",
467
+ " <td>0</td>\n",
468
+ " <td>3</td>\n",
469
+ " <td>1</td>\n",
470
+ " <td>14</td>\n",
471
+ " <td>0</td>\n",
472
+ " <td>0</td>\n",
473
+ " </tr>\n",
474
+ " </tbody>\n",
475
+ "</table>\n",
476
+ "<p>5 rows Γ— 72 columns</p>\n",
477
+ "</div>"
478
+ ],
479
+ "text/plain": [
480
+ " subreddit id \\\n",
481
+ "0 PurplePillDebate 1bbj8lq \n",
482
+ "1 PurplePillDebate 1aigdg6 \n",
483
+ "2 ForeverAloneWomen 1aqwejo \n",
484
+ "3 marriedredpill b45byj \n",
485
+ "4 ForeverAloneWomen 18uwb2f \n",
486
+ "\n",
487
+ " title \\\n",
488
+ "0 How casual sex is perceived for men and women \n",
489
+ "1 Is it wrong to \"settle\" because you can't get ... \n",
490
+ "2 Is anyone else annoyed by seeing all the valen... \n",
491
+ "3 60 DoD 2019 - Official kickoff \n",
492
+ "4 How do I go about getting in a relationship \n",
493
+ "\n",
494
+ " text url score date \\\n",
495
+ "0 In regards to the famous double standard (how ... 0 1.710100e+09 \n",
496
+ "1 I wanted to discuss this from the **male's per... 33 1.707024e+09 \n",
497
+ "2 I removed instagram and facebook from my phone... 54 1.707941e+09 \n",
498
+ "3 Rejoice, for 60 DoD 2019 is finally here! I kn... 60 1.553263e+09 \n",
499
+ "4 I 21 tried to date for years...and so far ive ... 24 1.703988e+09 \n",
500
+ "\n",
501
+ " subreddit_subscribers num_comments ups ... title_love_count \\\n",
502
+ "0 127329 199 0 ... 0 \n",
503
+ "1 127329 360 33 ... 0 \n",
504
+ "2 22857 5 54 ... 0 \n",
505
+ "3 50459 50 60 ... 0 \n",
506
+ "4 22857 31 24 ... 0 \n",
507
+ "\n",
508
+ " text_love_count title_romance_count text_romance_count title_I_count \\\n",
509
+ "0 0 0 0 0 \n",
510
+ "1 0 0 0 0 \n",
511
+ "2 1 0 0 0 \n",
512
+ "3 0 0 0 0 \n",
513
+ "4 0 0 0 0 \n",
514
+ "\n",
515
+ " text_I_count title_i_count text_i_count title_facebook_count \\\n",
516
+ "0 0 0 10 0 \n",
517
+ "1 0 0 2 0 \n",
518
+ "2 3 0 5 0 \n",
519
+ "3 0 0 6 0 \n",
520
+ "4 3 1 14 0 \n",
521
+ "\n",
522
+ " text_facebook_count \n",
523
+ "0 0 \n",
524
+ "1 0 \n",
525
+ "2 2 \n",
526
+ "3 0 \n",
527
+ "4 0 \n",
528
+ "\n",
529
+ "[5 rows x 72 columns]"
530
+ ]
531
+ },
532
+ "execution_count": 57,
533
+ "metadata": {},
534
+ "output_type": "execute_result"
535
+ }
536
+ ],
537
+ "source": [
538
+ "df.head()"
539
+ ]
540
+ },
541
+ {
542
+ "cell_type": "code",
543
+ "execution_count": 58,
544
+ "metadata": {},
545
+ "outputs": [],
546
+ "source": [
547
+ "def count_words(text_word_list, category_words):\n",
548
+ " \"\"\"To extract the count of specific categories in a text\"\"\"\n",
549
+ " count = 0\n",
550
+ " for word in text_word_list:\n",
551
+ " if word in category_words:\n",
552
+ " count += 1\n",
553
+ " return count"
554
+ ]
555
+ },
556
+ {
557
+ "cell_type": "code",
558
+ "execution_count": 62,
559
+ "metadata": {},
560
+ "outputs": [],
561
+ "source": [
562
+ "# count the number of gendered words in the title\n",
563
+ "\n",
564
+ "categories_list = [gendered_words_men, gendered_words_father, gendered_words_brother, gendered_words_son, gendered_words_uncle, gendered_words_nephew, gendered_words_husband, gendered_words_boyfriend, \n",
565
+ " gendered_words_women, gendered_words_mother, gendered_words_sister, gendered_words_daughter, gendered_words_aunt, gendered_words_niece, gendered_words_wife, gendered_words_girlfriend,\n",
566
+ " sex_words, female_organs_words, male_organ_words, pregnancy_words, menstrual_words, love_words, romance_words, self_words, social_media_websites, otherness, togetherness]\n",
567
+ "\n",
568
+ "for category in categories_list:\n",
569
+ " df[\"title_\"+category[0]+\"_count\"] = df['title_split'].apply(count_words, category_words=category)\n",
570
+ " df[\"text_\"+category[0]+\"_count\"] = df['text_split'].apply(count_words, category_words=category)\n",
571
+ " "
572
+ ]
573
+ },
574
+ {
575
+ "cell_type": "code",
576
+ "execution_count": 60,
577
+ "metadata": {},
578
+ "outputs": [
579
+ {
580
+ "data": {
581
+ "text/html": [
582
+ "<div>\n",
583
+ "<style scoped>\n",
584
+ " .dataframe tbody tr th:only-of-type {\n",
585
+ " vertical-align: middle;\n",
586
+ " }\n",
587
+ "\n",
588
+ " .dataframe tbody tr th {\n",
589
+ " vertical-align: top;\n",
590
+ " }\n",
591
+ "\n",
592
+ " .dataframe thead th {\n",
593
+ " text-align: right;\n",
594
+ " }\n",
595
+ "</style>\n",
596
+ "<table border=\"1\" class=\"dataframe\">\n",
597
+ " <thead>\n",
598
+ " <tr style=\"text-align: right;\">\n",
599
+ " <th></th>\n",
600
+ " <th>subreddit</th>\n",
601
+ " <th>id</th>\n",
602
+ " <th>title</th>\n",
603
+ " <th>text</th>\n",
604
+ " <th>url</th>\n",
605
+ " <th>score</th>\n",
606
+ " <th>date</th>\n",
607
+ " <th>subreddit_subscribers</th>\n",
608
+ " <th>num_comments</th>\n",
609
+ " <th>ups</th>\n",
610
+ " <th>...</th>\n",
611
+ " <th>title_love_count</th>\n",
612
+ " <th>text_love_count</th>\n",
613
+ " <th>title_romance_count</th>\n",
614
+ " <th>text_romance_count</th>\n",
615
+ " <th>title_I_count</th>\n",
616
+ " <th>text_I_count</th>\n",
617
+ " <th>title_i_count</th>\n",
618
+ " <th>text_i_count</th>\n",
619
+ " <th>title_facebook_count</th>\n",
620
+ " <th>text_facebook_count</th>\n",
621
+ " </tr>\n",
622
+ " </thead>\n",
623
+ " <tbody>\n",
624
+ " <tr>\n",
625
+ " <th>0</th>\n",
626
+ " <td>PurplePillDebate</td>\n",
627
+ " <td>1bbj8lq</td>\n",
628
+ " <td>How casual sex is perceived for men and women</td>\n",
629
+ " <td>In regards to the famous double standard (how ...</td>\n",
630
+ " <td></td>\n",
631
+ " <td>0</td>\n",
632
+ " <td>1.710100e+09</td>\n",
633
+ " <td>127329</td>\n",
634
+ " <td>199</td>\n",
635
+ " <td>0</td>\n",
636
+ " <td>...</td>\n",
637
+ " <td>0</td>\n",
638
+ " <td>0</td>\n",
639
+ " <td>0</td>\n",
640
+ " <td>0</td>\n",
641
+ " <td>0</td>\n",
642
+ " <td>0</td>\n",
643
+ " <td>0</td>\n",
644
+ " <td>0</td>\n",
645
+ " <td>0</td>\n",
646
+ " <td>0</td>\n",
647
+ " </tr>\n",
648
+ " <tr>\n",
649
+ " <th>1</th>\n",
650
+ " <td>PurplePillDebate</td>\n",
651
+ " <td>1aigdg6</td>\n",
652
+ " <td>Is it wrong to \"settle\" because you can't get ...</td>\n",
653
+ " <td>I wanted to discuss this from the **male's per...</td>\n",
654
+ " <td></td>\n",
655
+ " <td>33</td>\n",
656
+ " <td>1.707024e+09</td>\n",
657
+ " <td>127329</td>\n",
658
+ " <td>360</td>\n",
659
+ " <td>33</td>\n",
660
+ " <td>...</td>\n",
661
+ " <td>0</td>\n",
662
+ " <td>0</td>\n",
663
+ " <td>0</td>\n",
664
+ " <td>0</td>\n",
665
+ " <td>0</td>\n",
666
+ " <td>0</td>\n",
667
+ " <td>0</td>\n",
668
+ " <td>0</td>\n",
669
+ " <td>0</td>\n",
670
+ " <td>0</td>\n",
671
+ " </tr>\n",
672
+ " <tr>\n",
673
+ " <th>2</th>\n",
674
+ " <td>ForeverAloneWomen</td>\n",
675
+ " <td>1aqwejo</td>\n",
676
+ " <td>Is anyone else annoyed by seeing all the valen...</td>\n",
677
+ " <td>I removed instagram and facebook from my phone...</td>\n",
678
+ " <td></td>\n",
679
+ " <td>54</td>\n",
680
+ " <td>1.707941e+09</td>\n",
681
+ " <td>22857</td>\n",
682
+ " <td>5</td>\n",
683
+ " <td>54</td>\n",
684
+ " <td>...</td>\n",
685
+ " <td>0</td>\n",
686
+ " <td>1</td>\n",
687
+ " <td>0</td>\n",
688
+ " <td>0</td>\n",
689
+ " <td>0</td>\n",
690
+ " <td>3</td>\n",
691
+ " <td>0</td>\n",
692
+ " <td>0</td>\n",
693
+ " <td>0</td>\n",
694
+ " <td>2</td>\n",
695
+ " </tr>\n",
696
+ " <tr>\n",
697
+ " <th>3</th>\n",
698
+ " <td>marriedredpill</td>\n",
699
+ " <td>b45byj</td>\n",
700
+ " <td>60 DoD 2019 - Official kickoff</td>\n",
701
+ " <td>Rejoice, for 60 DoD 2019 is finally here! I kn...</td>\n",
702
+ " <td></td>\n",
703
+ " <td>60</td>\n",
704
+ " <td>1.553263e+09</td>\n",
705
+ " <td>50459</td>\n",
706
+ " <td>50</td>\n",
707
+ " <td>60</td>\n",
708
+ " <td>...</td>\n",
709
+ " <td>0</td>\n",
710
+ " <td>0</td>\n",
711
+ " <td>0</td>\n",
712
+ " <td>0</td>\n",
713
+ " <td>0</td>\n",
714
+ " <td>0</td>\n",
715
+ " <td>0</td>\n",
716
+ " <td>0</td>\n",
717
+ " <td>0</td>\n",
718
+ " <td>0</td>\n",
719
+ " </tr>\n",
720
+ " <tr>\n",
721
+ " <th>4</th>\n",
722
+ " <td>ForeverAloneWomen</td>\n",
723
+ " <td>18uwb2f</td>\n",
724
+ " <td>How do I go about getting in a relationship</td>\n",
725
+ " <td>I 21 tried to date for years...and so far ive ...</td>\n",
726
+ " <td></td>\n",
727
+ " <td>24</td>\n",
728
+ " <td>1.703988e+09</td>\n",
729
+ " <td>22857</td>\n",
730
+ " <td>31</td>\n",
731
+ " <td>24</td>\n",
732
+ " <td>...</td>\n",
733
+ " <td>0</td>\n",
734
+ " <td>0</td>\n",
735
+ " <td>0</td>\n",
736
+ " <td>0</td>\n",
737
+ " <td>0</td>\n",
738
+ " <td>3</td>\n",
739
+ " <td>0</td>\n",
740
+ " <td>0</td>\n",
741
+ " <td>0</td>\n",
742
+ " <td>0</td>\n",
743
+ " </tr>\n",
744
+ " </tbody>\n",
745
+ "</table>\n",
746
+ "<p>5 rows Γ— 72 columns</p>\n",
747
+ "</div>"
748
+ ],
749
+ "text/plain": [
750
+ " subreddit id \\\n",
751
+ "0 PurplePillDebate 1bbj8lq \n",
752
+ "1 PurplePillDebate 1aigdg6 \n",
753
+ "2 ForeverAloneWomen 1aqwejo \n",
754
+ "3 marriedredpill b45byj \n",
755
+ "4 ForeverAloneWomen 18uwb2f \n",
756
+ "\n",
757
+ " title \\\n",
758
+ "0 How casual sex is perceived for men and women \n",
759
+ "1 Is it wrong to \"settle\" because you can't get ... \n",
760
+ "2 Is anyone else annoyed by seeing all the valen... \n",
761
+ "3 60 DoD 2019 - Official kickoff \n",
762
+ "4 How do I go about getting in a relationship \n",
763
+ "\n",
764
+ " text url score date \\\n",
765
+ "0 In regards to the famous double standard (how ... 0 1.710100e+09 \n",
766
+ "1 I wanted to discuss this from the **male's per... 33 1.707024e+09 \n",
767
+ "2 I removed instagram and facebook from my phone... 54 1.707941e+09 \n",
768
+ "3 Rejoice, for 60 DoD 2019 is finally here! I kn... 60 1.553263e+09 \n",
769
+ "4 I 21 tried to date for years...and so far ive ... 24 1.703988e+09 \n",
770
+ "\n",
771
+ " subreddit_subscribers num_comments ups ... title_love_count \\\n",
772
+ "0 127329 199 0 ... 0 \n",
773
+ "1 127329 360 33 ... 0 \n",
774
+ "2 22857 5 54 ... 0 \n",
775
+ "3 50459 50 60 ... 0 \n",
776
+ "4 22857 31 24 ... 0 \n",
777
+ "\n",
778
+ " text_love_count title_romance_count text_romance_count title_I_count \\\n",
779
+ "0 0 0 0 0 \n",
780
+ "1 0 0 0 0 \n",
781
+ "2 1 0 0 0 \n",
782
+ "3 0 0 0 0 \n",
783
+ "4 0 0 0 0 \n",
784
+ "\n",
785
+ " text_I_count title_i_count text_i_count title_facebook_count \\\n",
786
+ "0 0 0 0 0 \n",
787
+ "1 0 0 0 0 \n",
788
+ "2 3 0 0 0 \n",
789
+ "3 0 0 0 0 \n",
790
+ "4 3 0 0 0 \n",
791
+ "\n",
792
+ " text_facebook_count \n",
793
+ "0 0 \n",
794
+ "1 0 \n",
795
+ "2 2 \n",
796
+ "3 0 \n",
797
+ "4 0 \n",
798
+ "\n",
799
+ "[5 rows x 72 columns]"
800
+ ]
801
+ },
802
+ "execution_count": 60,
803
+ "metadata": {},
804
+ "output_type": "execute_result"
805
+ }
806
+ ],
807
+ "source": [
808
+ "df.head()"
809
+ ]
810
+ },
811
  {
812
  "cell_type": "code",
813
  "execution_count": null,
814
  "metadata": {},
815
  "outputs": [],
816
+ "source": [
817
+ "# make a new column for the total count of gendered words in the title and text\n",
818
+ "\n"
819
+ ]
820
  }
821
  ],
822
  "metadata": {