asahi417 commited on
Commit
5ad627f
1 Parent(s): 870bafa
.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ super_tweeteval
2
+ data
process_unlabeled_tweet.py ADDED
File without changes
timelm_preprocessor.py ADDED
@@ -0,0 +1,123 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Removes near-duplicates and tweets from top pct. of users.
3
+
4
+ original -> https://github.com/cardiffnlp/timelms/blob/main/scripts/preprocess.py
5
+
6
+ optional arguments:
7
+ -h, --help show this help message and exit
8
+ --src SRC Path to set of input tweets (.jl).
9
+ --out OUT Path to output from preprocessing (.jl).
10
+ --blacklist_pct BLACKLIST_PCT
11
+ Percent of most frequent users to ignore.
12
+ Example:
13
+ python preprocess.py --src data/tweets-2020-Q3.jl --out dataset/tweets/tweets-2020-Q3.jl
14
+ """
15
+
16
+ import argparse
17
+ import json
18
+ import logging
19
+ import os
20
+ import string
21
+ import re
22
+ from collections import Counter
23
+
24
+ from datasketch import MinHash, LeanMinHash
25
+ import xxhash
26
+
27
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s', datefmt='%d-%b-%y %H:%M:%S')
28
+ re_url = re.compile(r'https?:\/\/[\w\.\/\?\=\d&#%_:/-]+')
29
+ re_user = re.compile(r'@\w+')
30
+ with open('verified_users.v091122.txt') as f:
31
+ verified_users = set([f"@{i}" for i in f.read().split('\n') if len(i)])
32
+
33
+
34
+ def clean_text(text):
35
+ text = text.replace('\n', ' ').replace('\r', ' ').replace('\t', ' ')
36
+ text = re_url.sub('{URL}', text)
37
+ users = re_user.findall(text)
38
+ for user in users:
39
+ if user not in verified_users:
40
+ text = text.replace(user, '@user')
41
+ return text
42
+
43
+
44
+ def hash_tweet(target_tweet, num_perm=16):
45
+ def normalize_text(text):
46
+ text = text.translate(str.maketrans('', '', string.punctuation)) # remove punctuation
47
+ text = text.lower()
48
+ return text
49
+
50
+ def minhash(seq):
51
+ # https://skeptric.com/minhash/
52
+ m = MinHash(num_perm=num_perm, hashfunc=xxhash.xxh64_intdigest)
53
+ for s in seq:
54
+ m.update(s.encode('utf8'))
55
+ return LeanMinHash(m)
56
+
57
+ tokens = normalize_text(target_tweet['text']).split() # whitespace tokenization
58
+ return minhash(tokens)
59
+
60
+
61
+ if __name__ == '__main__':
62
+
63
+ parser = argparse.ArgumentParser(description='Removes near-duplicates and tweets from top pct. of users.')
64
+ parser.add_argument('--src', type=str, required=True, help='Path to set of input tweets (.jl).')
65
+ parser.add_argument('--out', type=str, required=True, help='Path to output from preprocessing (.jl).')
66
+ parser.add_argument('--blacklist_pct', type=float, required=False, default=0.01,
67
+ help='Percent of most frequent users to ignore.')
68
+ args = parser.parse_args()
69
+ os.makedirs(args.out, exist_ok=True)
70
+
71
+ logging.info('1st pass - Collecting username counts ...')
72
+ n_input_tweets = 0
73
+ user_counter = Counter()
74
+ with open(args.src) as in_tweets_f:
75
+ for idx, jl_str in enumerate(in_tweets_f):
76
+ if idx % 1e6 == 0:
77
+ logging.info('1st pass - at idx %d' % idx)
78
+ tweet = json.loads(jl_str)
79
+ user_counter[tweet['username']] += 1
80
+ n_input_tweets += 1
81
+
82
+ logging.info('1st pass - Completed, found %d tweets' % n_input_tweets)
83
+ logging.info('1st pass - Found %d users' % len(user_counter.keys()))
84
+
85
+ top_users = [user for user, _ in user_counter.most_common()]
86
+ n_blacklisted_users = int(len(top_users) * args.blacklist_pct)
87
+ blacklisted_users = set(top_users[:n_blacklisted_users])
88
+ n_users = len(user_counter.keys())
89
+ pct_blacklisted_users = round((n_blacklisted_users / n_users) * 100, 2)
90
+ n_blacklisted_tweets = sum([user_counter[u] for u in blacklisted_users])
91
+ pct_blacklisted_tweets = round((n_blacklisted_tweets / sum(user_counter.values())) * 100, 2)
92
+ logging.info(
93
+ f"1st pass - Blacklisted {len(blacklisted_users)} users ({pct_blacklisted_users}%), "
94
+ f"ignoring {n_blacklisted_tweets} tweets ({pct_blacklisted_tweets}%)"
95
+ )
96
+
97
+ logging.info('2nd pass - Hashing and writing valid tweets ...')
98
+ written_hashes = set()
99
+ n_written = 0
100
+ n_ignored_by_user = 0
101
+ n_ignored_by_hash = 0
102
+ with open(args.src) as in_tweets_f:
103
+ with open(args.out, 'w') as out_tweets_f:
104
+ for idx, jl_str in enumerate(in_tweets_f):
105
+ if idx % 1e5 == 0:
106
+ logging.info('2nd pass - at idx %d' % idx)
107
+ tweet = json.loads(jl_str)
108
+ tweet['text'] = clean_text(tweet['text'])
109
+ tweet_hash = hash_tweet(tweet)
110
+ if tweet['username'] in blacklisted_users:
111
+ n_ignored_by_user += 1
112
+ elif tweet_hash in written_hashes:
113
+ n_ignored_by_hash += 1
114
+ else:
115
+ out_tweets_f.write(json.dumps(tweet) + '\n')
116
+ n_written += 1
117
+ written_hashes.add(tweet_hash)
118
+ logging.info(f"2nd pass - Completed, wrote {n_written} tweets.")
119
+ if n_ignored_by_user > 0:
120
+ logging.info(f"\tignored {n_ignored_by_user} by user blacklist")
121
+ if n_ignored_by_hash > 0:
122
+ logging.info(f"\tignored {n_ignored_by_hash} by hash collision")
123
+ logging.info("Done")
tweet_generation.py ADDED
File without changes
util.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+
3
+
4
+ URL_RE = re.compile(r"https?:\/\/[\w\.\/\?\=\d&#%_:/-]+")
5
+ HANDLE_RE = re.compile(r"@\w+")
6
+ with open("verified_users.v091122.txt") as f:
7
+ USER = set(i for i in f.read().split("\n") if len(i))
8
+
9
+ def process_url(text: str) -> str:
10
+ return URL_RE.sub("{URL}", text)
11
+
12
+ def process_username(text: str) -> str:
13
+ return HANDLE_RE.sub("[URL]", text)
verified_users.v091122.txt ADDED
The diff for this file is too large to render. See raw diff