anantn commited on
Commit
be1c51e
·
1 Parent(s): 4fa8e37

Add 20230429 db snapshot as parquet files

Browse files
README.md CHANGED
@@ -1,3 +1,7 @@
1
  ---
2
  license: mit
3
  ---
 
 
 
 
 
1
  ---
2
  license: mit
3
  ---
4
+
5
+ This repository contains the datasets for hacker news, used by https://github.com/anantn/hn-chatgpt-plugin
6
+
7
+ As of June 2025, these are now exported as parquet files instead of sqlite for space efficiency
hn-sqlite-20230420.db.zst → embeddings.parquet RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:05051f013ade5829471ed9cde4269303eb9def19aab88103ad87149b8229ca37
3
- size 5406260876
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc3d78a063936b88cdd8cceec278035d816429c1125296f6d09cc0fd8b4652aa
3
+ size 9322735434
hn-sqlite-20230420_embeddings.db.zst → items.parquet RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:167adc5b0f2d969e2027cffdcb6c89975f5e5dee9d2db4f03d1412c36d32c67c
3
- size 8635136123
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d59e99f80ce0855c000d997ba91238d9708f45b6b2add45a586b3faca53fb54
3
+ size 8523867315
hn-sqlite-20230429.db.zst → kids.parquet RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2b80f8859e3a383efcdb63f5118a1ab52deebaae85ea0b43d52d8eb3549850be
3
- size 6448492710
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f43aa51e13b16d4afde757c650a11dfd1d9bbe006b4266c4bea320f1730b8637
3
+ size 210842922
sql2parquet.py ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import duckdb
2
+ import os
3
+ import sqlite3
4
+ import sys
5
+
6
+ def convert_sqlite_to_parquet_with_duckdb(sqlite_db_path, output_dir, tables_to_convert=None):
7
+ """
8
+ Converts tables from a SQLite database to Parquet files using DuckDB.
9
+
10
+ Args:
11
+ sqlite_db_path (str): The path to the SQLite database file.
12
+ output_dir (str): The directory where the Parquet files will be saved.
13
+ tables_to_convert (list, optional): A list of table names to convert.
14
+ If None, all tables will be converted.
15
+ """
16
+ # --- Step 1: Create the output directory if it doesn't exist ---
17
+ if not os.path.exists(output_dir):
18
+ os.makedirs(output_dir)
19
+ print(f"Created output directory: {output_dir}")
20
+
21
+ # --- Step 2: Connect to the SQLite database and get table names ---
22
+ # We use the built-in sqlite3 library just to get the list of tables.
23
+ # DuckDB will do the heavy lifting of reading the data.
24
+ try:
25
+ conn_sqlite = sqlite3.connect(sqlite_db_path)
26
+ cursor = conn_sqlite.cursor()
27
+
28
+ # Get a list of all tables. We exclude internal SQLite tables.
29
+ cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%';")
30
+ all_tables = [row[0] for row in cursor.fetchall()]
31
+
32
+ conn_sqlite.close()
33
+ except sqlite3.Error as e:
34
+ print(f"Error connecting to SQLite database: {e}")
35
+ return
36
+
37
+ # --- Step 3: Use DuckDB to attach the SQLite database and convert tables ---
38
+
39
+ # Connect to a DuckDB in-memory database.
40
+ # The 'read_only=False' is important for writing the Parquet files.
41
+ # You can also use a persistent DuckDB database file here for more complex workflows.
42
+ con_duckdb = duckdb.connect(database=':memory:', read_only=False)
43
+
44
+ try:
45
+ # Load the SQLite extension. It's often autoloader, but this ensures it.
46
+ con_duckdb.execute("INSTALL sqlite; LOAD sqlite;")
47
+
48
+ # Attach the SQLite database. This makes all its tables available to DuckDB.
49
+ con_duckdb.execute(f"ATTACH '{sqlite_db_path}' AS sqlite_db (TYPE sqlite);")
50
+
51
+ # Determine which tables to convert
52
+ if tables_to_convert is None:
53
+ tables_to_process = all_tables
54
+ else:
55
+ tables_to_process = tables_to_convert
56
+ # Filter to ensure the requested tables actually exist in the database
57
+ tables_to_process = [table for table in tables_to_process if table in all_tables]
58
+
59
+ if not tables_to_process:
60
+ print("No tables found to convert.")
61
+ return
62
+
63
+ print(f"Found {len(tables_to_process)} tables to convert: {', '.join(tables_to_process)}")
64
+
65
+ # Loop through the tables and use the efficient COPY command
66
+ for table_name in tables_to_process:
67
+ print(f"\nConverting table: '{table_name}'...")
68
+
69
+ # Define the output path for the Parquet file
70
+ parquet_file_path = os.path.join(output_dir, f'{table_name}.parquet')
71
+
72
+ # Use the powerful COPY statement. This reads directly from the attached
73
+ # SQLite table and writes directly to the Parquet file.
74
+ # The 'FORMAT parquet' option specifies the output format.
75
+ # You can also add options for compression (e.g., COMPRESSION 'zstd').
76
+ # The query is `SELECT * FROM attached_db_alias.table_name`.
77
+ copy_query = f"COPY (SELECT * FROM sqlite_db.\"{table_name}\") TO '{parquet_file_path}' (FORMAT parquet);"
78
+
79
+ try:
80
+ con_duckdb.execute(copy_query)
81
+ print(f" -> Successfully saved to '{parquet_file_path}'")
82
+ except duckdb.Error as copy_err:
83
+ print(f" -> ERROR converting table '{table_name}': {copy_err}")
84
+ print(" This may be due to unsupported data types or other schema issues.")
85
+
86
+ finally:
87
+ # --- Step 4: Clean up (detach the database and close the connection) ---
88
+ con_duckdb.execute("DETACH sqlite_db;")
89
+ con_duckdb.close()
90
+ print("\nConversion process finished.")
91
+
92
+ # --- How to use the function ---
93
+ if __name__ == '__main__':
94
+ # Replace 'your_large_database.db' with the path to your actual SQLite file
95
+ database_file = sys.argv[1]
96
+
97
+ # Replace 'path/to/your/output_folder' with your desired output directory
98
+ output_folder = '.'
99
+
100
+ # Call the function to perform the conversion
101
+ # You can also specify a list of tables if you don't want to convert all of them.
102
+ # For example: tables_to_convert=['table_1', 'another_large_table']
103
+ convert_sqlite_to_parquet_with_duckdb(database_file, output_folder)
hn-sqlite-20230429_embeddings.db.zst → users.parquet RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:03eb6a6f3c33abcf33d59e521d564dfde82ff8e358493618a410ee70568917ca
3
- size 8660150030
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5d5c2ce02bc7beea1688b0487336990bb30f906728da0c70e3be9fd361a383f
3
+ size 253783827