t.me/xtekky
commited on
Commit
·
081dfff
1
Parent(s):
b68da4c
quora (poe) [gpt-4/3.5] create bot feature
Browse filesadded create bot feature
quora.Model.create
new model names:
{
'sage' : 'capybara',
'gpt-4' : 'beaver',
'claude-v1.2' : 'a2_2',
'claude-instant-v1.0' : 'a2',
'gpt-3.5-turbo' : 'chinchilla'
}
- README.md +29 -21
- quora/__init__.py +106 -12
- quora/cookies.txt +5 -0
- requirements.txt +1 -1
- testing/quora_test_2.py +18 -0
README.md
CHANGED
@@ -6,7 +6,7 @@ This repository provides reverse-engineered language models from various sources
|
|
6 |
|
7 |
## To-Do List
|
8 |
|
9 |
-
- [
|
10 |
- [ ] poe.com chat history management (3)
|
11 |
- [x] renaming the 'poe' module to 'quora' (2)
|
12 |
- [x] add you.com api (1)
|
@@ -47,31 +47,41 @@ These sites will be reverse engineered but need account access:
|
|
47 |
### Example: `quora (poe)` (use like openai pypi package) - GPT-4 <a name="example-poe"></a>
|
48 |
|
49 |
```python
|
50 |
-
#
|
51 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
52 |
|
53 |
-
|
54 |
-
# quora.Completion.create
|
55 |
-
# quora.StreamCompletion.create
|
56 |
|
57 |
-
|
|
|
|
|
58 |
|
59 |
-
|
|
|
|
|
60 |
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
|
67 |
-
|
68 |
-
```python
|
69 |
|
70 |
-
|
71 |
-
|
72 |
-
|
|
|
|
|
73 |
|
74 |
-
print(response.completion.choices[0].text
|
75 |
```
|
76 |
|
77 |
#### Normal Response:
|
@@ -84,8 +94,6 @@ response = quora.Completion.create(model = 'gpt-4',
|
|
84 |
print(response.completion.choices[0].text)
|
85 |
```
|
86 |
|
87 |
-
|
88 |
-
|
89 |
### Example: `t3nsor` (use like openai pypi package) <a name="example-t3nsor"></a>
|
90 |
|
91 |
```python
|
|
|
6 |
|
7 |
## To-Do List
|
8 |
|
9 |
+
- [x] implement poe.com create bot feature (4) AVAILABLE NOW
|
10 |
- [ ] poe.com chat history management (3)
|
11 |
- [x] renaming the 'poe' module to 'quora' (2)
|
12 |
- [x] add you.com api (1)
|
|
|
47 |
### Example: `quora (poe)` (use like openai pypi package) - GPT-4 <a name="example-poe"></a>
|
48 |
|
49 |
```python
|
50 |
+
# quora model names: (use left key as argument)
|
51 |
+
models = {
|
52 |
+
'sage' : 'capybara',
|
53 |
+
'gpt-4' : 'beaver',
|
54 |
+
'claude-v1.2' : 'a2_2',
|
55 |
+
'claude-instant-v1.0' : 'a2',
|
56 |
+
'gpt-3.5-turbo' : 'chinchilla'
|
57 |
+
}
|
58 |
+
```
|
59 |
|
60 |
+
#### !! new: bot creation
|
|
|
|
|
61 |
|
62 |
+
```python
|
63 |
+
# import quora (poe) package
|
64 |
+
import quora
|
65 |
|
66 |
+
# create account
|
67 |
+
# make shure to set enable_bot_creation to True
|
68 |
+
token = quora.Account.create(logging = True, enable_bot_creation=True)
|
69 |
|
70 |
+
model = quora.Model.create(
|
71 |
+
token = token,
|
72 |
+
model = 'gpt-3.5-turbo', # or claude-instant-v1.0
|
73 |
+
system_prompt = 'you are ChatGPT a large language model ...'
|
74 |
+
)
|
75 |
|
76 |
+
print(model.name) # gptx....
|
|
|
77 |
|
78 |
+
# streaming response
|
79 |
+
for response in quora.StreamingCompletion.create(
|
80 |
+
custom_model = model.name,
|
81 |
+
prompt ='hello world',
|
82 |
+
token = token):
|
83 |
|
84 |
+
print(response.completion.choices[0].text)
|
85 |
```
|
86 |
|
87 |
#### Normal Response:
|
|
|
94 |
print(response.completion.choices[0].text)
|
95 |
```
|
96 |
|
|
|
|
|
97 |
### Example: `t3nsor` (use like openai pypi package) <a name="example-t3nsor"></a>
|
98 |
|
99 |
```python
|
quora/__init__.py
CHANGED
@@ -3,11 +3,12 @@ from quora.mail import Mail
|
|
3 |
from requests import Session
|
4 |
from re import search, findall
|
5 |
from json import loads
|
6 |
-
from time import sleep
|
7 |
from pathlib import Path
|
8 |
-
from random import choice
|
|
|
9 |
from urllib import parse
|
10 |
-
|
11 |
class PoeResponse:
|
12 |
|
13 |
class Completion:
|
@@ -48,8 +49,88 @@ class PoeResponse:
|
|
48 |
def json(self) -> dict:
|
49 |
return self.response_dict
|
50 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
class Account:
|
52 |
-
def create(proxy: None or str = None, logging: bool = False):
|
53 |
|
54 |
client = Session()
|
55 |
client.proxies = {
|
@@ -133,6 +214,13 @@ class Account:
|
|
133 |
with open(Path(__file__).resolve().parent / 'cookies.txt', 'a') as f:
|
134 |
f.write(f'{token}\n')
|
135 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
136 |
return token
|
137 |
|
138 |
def get():
|
@@ -142,17 +230,20 @@ class Account:
|
|
142 |
class StreamingCompletion:
|
143 |
def create(
|
144 |
model : str = 'gpt-4',
|
|
|
145 |
prompt: str = 'hello world',
|
146 |
token : str = ''):
|
147 |
|
148 |
models = {
|
149 |
'sage' : 'capybara',
|
150 |
'gpt-4' : 'beaver',
|
151 |
-
'claude
|
152 |
-
'claude' : 'a2',
|
153 |
-
'gpt-3.5': 'chinchilla'
|
154 |
}
|
155 |
|
|
|
|
|
156 |
client = PoeClient(token)
|
157 |
|
158 |
for chunk in client.send_message(models[model], prompt):
|
@@ -161,7 +252,7 @@ class StreamingCompletion:
|
|
161 |
'id' : chunk["messageId"],
|
162 |
'object' : 'text_completion',
|
163 |
'created': chunk['creationTime'],
|
164 |
-
'model' :
|
165 |
'choices': [{
|
166 |
'text' : chunk["text_new"],
|
167 |
'index' : 0,
|
@@ -178,17 +269,20 @@ class StreamingCompletion:
|
|
178 |
class Completion:
|
179 |
def create(
|
180 |
model : str = 'gpt-4',
|
|
|
181 |
prompt: str = 'hello world',
|
182 |
token : str = ''):
|
183 |
|
184 |
models = {
|
185 |
'sage' : 'capybara',
|
186 |
'gpt-4' : 'beaver',
|
187 |
-
'claude
|
188 |
-
'claude' : 'a2',
|
189 |
-
'gpt-3.5': 'chinchilla'
|
190 |
}
|
191 |
|
|
|
|
|
192 |
client = PoeClient(token)
|
193 |
|
194 |
for chunk in client.send_message(models[model], prompt):
|
@@ -198,7 +292,7 @@ class Completion:
|
|
198 |
'id' : chunk["messageId"],
|
199 |
'object' : 'text_completion',
|
200 |
'created': chunk['creationTime'],
|
201 |
-
'model' :
|
202 |
'choices': [{
|
203 |
'text' : chunk["text"],
|
204 |
'index' : 0,
|
|
|
3 |
from requests import Session
|
4 |
from re import search, findall
|
5 |
from json import loads
|
6 |
+
from time import sleep
|
7 |
from pathlib import Path
|
8 |
+
from random import choice, choices, randint
|
9 |
+
from string import ascii_letters, digits
|
10 |
from urllib import parse
|
11 |
+
|
12 |
class PoeResponse:
|
13 |
|
14 |
class Completion:
|
|
|
49 |
def json(self) -> dict:
|
50 |
return self.response_dict
|
51 |
|
52 |
+
|
53 |
+
class ModelResponse:
|
54 |
+
def __init__(self, json_response: dict) -> None:
|
55 |
+
self.id = json_response['data']['poeBotCreate']['bot']['id']
|
56 |
+
self.name = json_response['data']['poeBotCreate']['bot']['displayName']
|
57 |
+
self.limit = json_response['data']['poeBotCreate']['bot']['messageLimit']['dailyLimit']
|
58 |
+
self.deleted = json_response['data']['poeBotCreate']['bot']['deletionState']
|
59 |
+
|
60 |
+
class Model:
|
61 |
+
def create(
|
62 |
+
token: str,
|
63 |
+
model: str = 'gpt-3.5-turbo', # claude-instant
|
64 |
+
system_prompt: str = 'You are ChatGPT a large language model developed by Openai. Answer as consisely as possible',
|
65 |
+
description: str = 'gpt-3.5 language model from openai, skidded by poe.com',
|
66 |
+
handle: str = None) -> ModelResponse:
|
67 |
+
|
68 |
+
models = {
|
69 |
+
'gpt-3.5-turbo' : 'chinchilla',
|
70 |
+
'claude-instant-v1.0': 'a2'
|
71 |
+
}
|
72 |
+
|
73 |
+
if not handle:
|
74 |
+
handle = f'gptx{randint(1111111, 9999999)}'
|
75 |
+
|
76 |
+
client = Session()
|
77 |
+
client.cookies['p-b'] = token
|
78 |
+
|
79 |
+
settings = client.get('https://poe.com/api/settings').json()
|
80 |
+
|
81 |
+
client.headers = {
|
82 |
+
"host" : "poe.com",
|
83 |
+
"origin" : "https://poe.com",
|
84 |
+
"referer" : "https://poe.com/",
|
85 |
+
"content-type" : "application/json",
|
86 |
+
"poe-formkey" : settings['formkey'],
|
87 |
+
"poe-tchannel" : settings['tchannelData']['channel'],
|
88 |
+
"user-agent" : "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36",
|
89 |
+
"connection" : "keep-alive",
|
90 |
+
"sec-ch-ua" : "\"Chromium\";v=\"112\", \"Google Chrome\";v=\"112\", \"Not:A-Brand\";v=\"99\"",
|
91 |
+
"sec-ch-ua-mobile" : "?0",
|
92 |
+
"sec-ch-ua-platform": "\"macOS\"",
|
93 |
+
"sec-fetch-site" : "same-origin",
|
94 |
+
"sec-fetch-mode" : "cors",
|
95 |
+
"sec-fetch-dest" : "empty",
|
96 |
+
"accept" : "*/*",
|
97 |
+
"accept-encoding" : "gzip, deflate, br",
|
98 |
+
"accept-language" : "en-GB,en-US;q=0.9,en;q=0.8",
|
99 |
+
}
|
100 |
+
|
101 |
+
response = client.post("https://poe.com/api/gql_POST", json = {
|
102 |
+
'queryName': 'CreateBotMain_poeBotCreate_Mutation',
|
103 |
+
'variables': {
|
104 |
+
'model' : models[model],
|
105 |
+
'handle' : handle,
|
106 |
+
'prompt' : system_prompt,
|
107 |
+
'isPromptPublic' : True,
|
108 |
+
'introduction' : '',
|
109 |
+
'description' : description,
|
110 |
+
'profilePictureUrl' : 'https://qph.fs.quoracdn.net/main-qimg-24e0b480dcd946e1cc6728802c5128b6',
|
111 |
+
'apiUrl' : None,
|
112 |
+
'apiKey' : ''.join(choices(ascii_letters + digits, k = 32)),
|
113 |
+
'isApiBot' : False,
|
114 |
+
'hasLinkification' : False,
|
115 |
+
'hasMarkdownRendering' : False,
|
116 |
+
'hasSuggestedReplies' : False,
|
117 |
+
'isPrivateBot' : False
|
118 |
+
},
|
119 |
+
'query': 'mutation CreateBotMain_poeBotCreate_Mutation(\n $model: String!\n $handle: String!\n $prompt: String!\n $isPromptPublic: Boolean!\n $introduction: String!\n $description: String!\n $profilePictureUrl: String\n $apiUrl: String\n $apiKey: String\n $isApiBot: Boolean\n $hasLinkification: Boolean\n $hasMarkdownRendering: Boolean\n $hasSuggestedReplies: Boolean\n $isPrivateBot: Boolean\n) {\n poeBotCreate(model: $model, handle: $handle, promptPlaintext: $prompt, isPromptPublic: $isPromptPublic, introduction: $introduction, description: $description, profilePicture: $profilePictureUrl, apiUrl: $apiUrl, apiKey: $apiKey, isApiBot: $isApiBot, hasLinkification: $hasLinkification, hasMarkdownRendering: $hasMarkdownRendering, hasSuggestedReplies: $hasSuggestedReplies, isPrivateBot: $isPrivateBot) {\n status\n bot {\n id\n ...BotHeader_bot\n }\n }\n}\n\nfragment BotHeader_bot on Bot {\n displayName\n messageLimit {\n dailyLimit\n }\n ...BotImage_bot\n ...BotLink_bot\n ...IdAnnotation_node\n ...botHelpers_useViewerCanAccessPrivateBot\n ...botHelpers_useDeletion_bot\n}\n\nfragment BotImage_bot on Bot {\n displayName\n ...botHelpers_useDeletion_bot\n ...BotImage_useProfileImage_bot\n}\n\nfragment BotImage_useProfileImage_bot on Bot {\n image {\n __typename\n ... on LocalBotImage {\n localName\n }\n ... on UrlBotImage {\n url\n }\n }\n ...botHelpers_useDeletion_bot\n}\n\nfragment BotLink_bot on Bot {\n displayName\n}\n\nfragment IdAnnotation_node on Node {\n __isNode: __typename\n id\n}\n\nfragment botHelpers_useDeletion_bot on Bot {\n deletionState\n}\n\nfragment botHelpers_useViewerCanAccessPrivateBot on Bot {\n isPrivateBot\n viewerIsCreator\n}\n',
|
120 |
+
})
|
121 |
+
|
122 |
+
if not 'success' in response.text:
|
123 |
+
raise Exception('''
|
124 |
+
Bot creation Failed
|
125 |
+
!! Important !!
|
126 |
+
Bot creation was not enabled on this account
|
127 |
+
please use: quora.Account.create with enable_bot_creation set to True
|
128 |
+
''')
|
129 |
+
|
130 |
+
return ModelResponse(response.json())
|
131 |
+
|
132 |
class Account:
|
133 |
+
def create(proxy: None or str = None, logging: bool = False, enable_bot_creation: bool = False):
|
134 |
|
135 |
client = Session()
|
136 |
client.proxies = {
|
|
|
214 |
with open(Path(__file__).resolve().parent / 'cookies.txt', 'a') as f:
|
215 |
f.write(f'{token}\n')
|
216 |
|
217 |
+
if enable_bot_creation:
|
218 |
+
client.post("https://poe.com/api/gql_POST", json = {
|
219 |
+
"queryName": "UserProfileConfigurePreviewModal_markMultiplayerNuxCompleted_Mutation",
|
220 |
+
"variables": {},
|
221 |
+
"query": "mutation UserProfileConfigurePreviewModal_markMultiplayerNuxCompleted_Mutation {\n markMultiplayerNuxCompleted {\n viewer {\n hasCompletedMultiplayerNux\n id\n }\n }\n}\n"
|
222 |
+
})
|
223 |
+
|
224 |
return token
|
225 |
|
226 |
def get():
|
|
|
230 |
class StreamingCompletion:
|
231 |
def create(
|
232 |
model : str = 'gpt-4',
|
233 |
+
custom_model : str = None,
|
234 |
prompt: str = 'hello world',
|
235 |
token : str = ''):
|
236 |
|
237 |
models = {
|
238 |
'sage' : 'capybara',
|
239 |
'gpt-4' : 'beaver',
|
240 |
+
'claude-v1.2' : 'a2_2',
|
241 |
+
'claude-instant-v1.0' : 'a2',
|
242 |
+
'gpt-3.5-turbo' : 'chinchilla'
|
243 |
}
|
244 |
|
245 |
+
_model = models[model] if not custom_model else custom_model
|
246 |
+
|
247 |
client = PoeClient(token)
|
248 |
|
249 |
for chunk in client.send_message(models[model], prompt):
|
|
|
252 |
'id' : chunk["messageId"],
|
253 |
'object' : 'text_completion',
|
254 |
'created': chunk['creationTime'],
|
255 |
+
'model' : _model,
|
256 |
'choices': [{
|
257 |
'text' : chunk["text_new"],
|
258 |
'index' : 0,
|
|
|
269 |
class Completion:
|
270 |
def create(
|
271 |
model : str = 'gpt-4',
|
272 |
+
custom_model : str = None,
|
273 |
prompt: str = 'hello world',
|
274 |
token : str = ''):
|
275 |
|
276 |
models = {
|
277 |
'sage' : 'capybara',
|
278 |
'gpt-4' : 'beaver',
|
279 |
+
'claude-v1.2' : 'a2_2',
|
280 |
+
'claude-instant-v1.0' : 'a2',
|
281 |
+
'gpt-3.5-turbo' : 'chinchilla'
|
282 |
}
|
283 |
|
284 |
+
_model = models[model] if not custom_model else custom_model
|
285 |
+
|
286 |
client = PoeClient(token)
|
287 |
|
288 |
for chunk in client.send_message(models[model], prompt):
|
|
|
292 |
'id' : chunk["messageId"],
|
293 |
'object' : 'text_completion',
|
294 |
'created': chunk['creationTime'],
|
295 |
+
'model' : _model,
|
296 |
'choices': [{
|
297 |
'text' : chunk["text"],
|
298 |
'index' : 0,
|
quora/cookies.txt
CHANGED
@@ -8,3 +8,8 @@ pUEbtxobN_QUSpLIR8RGww==
|
|
8 |
9_dUWxKkHHhpQRSvCvBk2Q==
|
9 |
UV45rvGwUwi2qV9QdIbMcw==
|
10 |
cVIN0pK1Wx-F7zCdUxlYqA==
|
|
|
|
|
|
|
|
|
|
|
|
8 |
9_dUWxKkHHhpQRSvCvBk2Q==
|
9 |
UV45rvGwUwi2qV9QdIbMcw==
|
10 |
cVIN0pK1Wx-F7zCdUxlYqA==
|
11 |
+
UP2wQVds17VFHh6IfCQFrA==
|
12 |
+
18eKr0ME2Tzifdfqat38Aw==
|
13 |
+
FNgKEpc2r-XqWe0rHBfYpg==
|
14 |
+
juCAh6kB0sUpXHvKik2woA==
|
15 |
+
nBvuNYRLaE4xE4HuzBPiIQ==
|
requirements.txt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
websocket-client
|
2 |
requests
|
3 |
-
tls-client
|
|
|
1 |
websocket-client
|
2 |
requests
|
3 |
+
tls-client
|
testing/quora_test_2.py
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import quora
|
2 |
+
|
3 |
+
token = quora.Account.create(logging = True, enable_bot_creation=True)
|
4 |
+
|
5 |
+
model = quora.Model.create(
|
6 |
+
token = token,
|
7 |
+
model = 'gpt-3.5-turbo', # or claude-instant-v1.0
|
8 |
+
system_prompt = 'you are ChatGPT a large language model ...'
|
9 |
+
)
|
10 |
+
|
11 |
+
print(model.name)
|
12 |
+
|
13 |
+
for response in quora.StreamingCompletion.create(
|
14 |
+
custom_model = model.name,
|
15 |
+
prompt ='hello world',
|
16 |
+
token = token):
|
17 |
+
|
18 |
+
print(response.completion.choices[0].text)
|