Blackbox.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324
  1. from __future__ import annotations
  2. from aiohttp import ClientSession
  3. import re
  4. import json
  5. import random
  6. import string
  7. import base64
  8. from pathlib import Path
  9. from typing import Optional
  10. from ..typing import AsyncResult, Messages, ImagesType
  11. from ..requests.raise_for_status import raise_for_status
  12. from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
  13. from ..image import to_data_uri
  14. from ..cookies import get_cookies_dir
  15. from .helper import format_prompt, format_image_prompt
  16. from ..providers.response import JsonConversation, ImageResponse
  17. from ..errors import ModelNotSupportedError
  18. class Conversation(JsonConversation):
  19. validated_value: str = None
  20. chat_id: str = None
  21. message_history: Messages = []
  22. def __init__(self, model: str):
  23. self.model = model
  24. class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
  25. label = "Blackbox AI"
  26. url = "https://www.blackbox.ai"
  27. api_endpoint = "https://www.blackbox.ai/api/chat"
  28. working = True
  29. supports_stream = True
  30. supports_system_message = True
  31. supports_message_history = True
  32. default_model = "blackboxai"
  33. default_vision_model = default_model
  34. default_image_model = 'ImageGeneration'
  35. image_models = [default_image_model]
  36. vision_models = [default_vision_model, 'gpt-4o', 'o1', 'o3-mini', 'gemini-pro', 'gemini-1.5-flash', 'llama-3.1-8b', 'llama-3.1-70b', 'llama-3.1-405b', 'gemini-2.0-flash', 'deepseek-v3']
  37. userSelectedModel = ['gpt-4o', 'o1', 'o3-mini', 'gemini-pro', 'claude-sonnet-3.7', 'deepseek-v3', 'deepseek-r1', 'blackboxai-pro', 'Meta-Llama-3.3-70B-Instruct-Turbo', 'Mistral-Small-24B-Instruct-2501', 'DeepSeek-LLM-Chat-(67B)', 'dbrx-instruct', 'Qwen-QwQ-32B-Preview', 'Nous-Hermes-2-Mixtral-8x7B-DPO', 'gemini-2.0-flash']
  38. agentMode = {
  39. 'deepseek-v3': {'mode': True, 'id': "deepseek-chat", 'name': "DeepSeek-V3"},
  40. 'deepseek-r1': {'mode': True, 'id': "deepseek-reasoner", 'name': "DeepSeek-R1"},
  41. 'Meta-Llama-3.3-70B-Instruct-Turbo': {'mode': True, 'id': "meta-llama/Llama-3.3-70B-Instruct-Turbo", 'name': "Meta-Llama-3.3-70B-Instruct-Turbo"},
  42. 'Mistral-Small-24B-Instruct-2501': {'mode': True, 'id': "mistralai/Mistral-Small-24B-Instruct-2501", 'name': "Mistral-Small-24B-Instruct-2501"},
  43. 'DeepSeek-LLM-Chat-(67B)': {'mode': True, 'id': "deepseek-ai/deepseek-llm-67b-chat", 'name': "DeepSeek-LLM-Chat-(67B)"},
  44. 'dbrx-instruct': {'mode': True, 'id': "databricks/dbrx-instruct", 'name': "DBRX-Instruct"},
  45. 'Qwen-QwQ-32B-Preview': {'mode': True, 'id': "Qwen/QwQ-32B-Preview", 'name': "Qwen-QwQ-32B-Preview"},
  46. 'Nous-Hermes-2-Mixtral-8x7B-DPO': {'mode': True, 'id': "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", 'name': "Nous-Hermes-2-Mixtral-8x7B-DPO"},
  47. 'gemini-2.0-flash': {'mode': True, 'id': "Gemini/Gemini-Flash-2.0", 'name': "Gemini-Flash-2.0"},
  48. }
  49. trendingAgentMode = {
  50. "gemini-1.5-flash": {'mode': True, 'id': 'Gemini'},
  51. "llama-3.1-8b": {'mode': True, 'id': "llama-3.1-8b"},
  52. 'llama-3.1-70b': {'mode': True, 'id': "llama-3.1-70b"},
  53. 'llama-3.1-405b': {'mode': True, 'id': "llama-3.1-405"},
  54. 'Python Agent': {'mode': True, 'id': "Python Agent"},
  55. 'Java Agent': {'mode': True, 'id': "Java Agent"},
  56. 'JavaScript Agent': {'mode': True, 'id': "JavaScript Agent"},
  57. 'HTML Agent': {'mode': True, 'id': "HTML Agent"},
  58. 'Google Cloud Agent': {'mode': True, 'id': "Google Cloud Agent"},
  59. 'Android Developer': {'mode': True, 'id': "Android Developer"},
  60. 'Swift Developer': {'mode': True, 'id': "Swift Developer"},
  61. 'Next.js Agent': {'mode': True, 'id': "Next.js Agent"},
  62. 'MongoDB Agent': {'mode': True, 'id': "MongoDB Agent"},
  63. 'PyTorch Agent': {'mode': True, 'id': "PyTorch Agent"},
  64. 'React Agent': {'mode': True, 'id': "React Agent"},
  65. 'Xcode Agent': {'mode': True, 'id': "Xcode Agent"},
  66. 'blackboxai-pro': {'mode': True, 'id': "BLACKBOXAI-PRO"},
  67. 'Heroku Agent': {'mode': True, 'id': "Heroku Agent"},
  68. 'Godot Agent': {'mode': True, 'id': "Godot Agent"},
  69. 'Go Agent': {'mode': True, 'id': "Go Agent"},
  70. 'Gitlab Agent': {'mode': True, 'id': "Gitlab Agent"},
  71. 'Git Agent': {'mode': True, 'id': "Git Agent"},
  72. 'Flask Agent': {'mode': True, 'id': "Flask Agent"},
  73. 'Firebase Agent': {'mode': True, 'id': "Firebase Agent"},
  74. 'FastAPI Agent': {'mode': True, 'id': "FastAPI Agent"},
  75. 'Erlang Agent': {'mode': True, 'id': "Erlang Agent"},
  76. 'Electron Agent': {'mode': True, 'id': "Electron Agent"},
  77. 'Docker Agent': {'mode': True, 'id': "Docker Agent"},
  78. 'DigitalOcean Agent': {'mode': True, 'id': "DigitalOcean Agent"},
  79. 'Bitbucket Agent': {'mode': True, 'id': "Bitbucket Agent"},
  80. 'Azure Agent': {'mode': True, 'id': "Azure Agent"},
  81. 'Flutter Agent': {'mode': True, 'id': "Flutter Agent"},
  82. 'Youtube Agent': {'mode': True, 'id': "Youtube Agent"},
  83. 'builder Agent': {'mode': True, 'id': "builder Agent"},
  84. }
  85. models = list(dict.fromkeys([default_model, *userSelectedModel, *image_models, *list(agentMode.keys()), *list(trendingAgentMode.keys())]))
  86. model_aliases = {
  87. "gemini-1.5-flash": "gemini-1.5-flash",
  88. "gemini-1.5-pro": "gemini-pro",
  89. "llama-3.3-70b": "Meta-Llama-3.3-70B-Instruct-Turbo",
  90. "mixtral-small-28b": "Mistral-Small-24B-Instruct-2501",
  91. "deepseek-chat": "DeepSeek-LLM-Chat-(67B)",
  92. "qwq-32b": "Qwen-QwQ-32B-Preview",
  93. "hermes-2-dpo": "Nous-Hermes-2-Mixtral-8x7B-DPO",
  94. "claude-3.7-sonnet": "claude-sonnet-3.7",
  95. "flux": "ImageGeneration",
  96. }
  97. ENCRYPTED_SESSION = "eyJ1c2VyIjogeyJuYW1lIjogIkJMQUNLQk9YIEFJIiwgImVtYWlsIjogImdpc2VsZUBibGFja2JveC5haSIsICJpbWFnZSI6ICJodHRwczovL3l0My5nb29nbGV1c2VyY29udGVudC5jb20vQjd6RVlVSzUxWnNQYmFSUFVhMF9ZbnQ1WV9URFZoTE4tVjAzdndRSHM0eF96a2g4a1psLXkxcXFxb3hoeFFzcS1wUVBHS0R0WFE9czE2MC1jLWstYzB4MDBmZmZmZmYtbm8tcmoifSwgImV4cGlyZXMiOiBudWxsfQ=="
  98. ENCRYPTED_SUBSCRIPTION_CACHE = "eyJzdGF0dXMiOiAiUFJFTUlVTSIsICJleHBpcnlUaW1lc3RhbXAiOiBudWxsLCAibGFzdENoZWNrZWQiOiBudWxsLCAiaXNUcmlhbFN1YnNjcmlwdGlvbiI6IHRydWV9"
  99. ENCRYPTED_IS_PREMIUM = "dHJ1ZQ=="
  100. @classmethod
  101. async def fetch_validated(cls, url: str = "https://www.blackbox.ai", force_refresh: bool = False) -> Optional[str]:
  102. cache_file = Path(get_cookies_dir()) / 'blackbox.json'
  103. if not force_refresh and cache_file.exists():
  104. try:
  105. with open(cache_file, 'r') as f:
  106. data = json.load(f)
  107. if data.get('validated_value'):
  108. return data['validated_value']
  109. except Exception as e:
  110. print(f"Error reading cache: {e}")
  111. js_file_pattern = r'static/chunks/\d{4}-[a-fA-F0-9]+\.js'
  112. uuid_pattern = r'["\']([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})["\']'
  113. def is_valid_context(text: str) -> bool:
  114. return any(char + '=' in text for char in 'abcdefghijklmnopqrstuvwxyz')
  115. async with ClientSession() as session:
  116. try:
  117. async with session.get(url) as response:
  118. if response.status != 200:
  119. return None
  120. page_content = await response.text()
  121. js_files = re.findall(js_file_pattern, page_content)
  122. for js_file in js_files:
  123. js_url = f"{url}/_next/{js_file}"
  124. async with session.get(js_url) as js_response:
  125. if js_response.status == 200:
  126. js_content = await js_response.text()
  127. for match in re.finditer(uuid_pattern, js_content):
  128. start = max(0, match.start() - 10)
  129. end = min(len(js_content), match.end() + 10)
  130. context = js_content[start:end]
  131. if is_valid_context(context):
  132. validated_value = match.group(1)
  133. cache_file.parent.mkdir(exist_ok=True)
  134. try:
  135. with open(cache_file, 'w') as f:
  136. json.dump({'validated_value': validated_value}, f)
  137. except Exception as e:
  138. print(f"Error writing cache: {e}")
  139. return validated_value
  140. except Exception as e:
  141. print(f"Error retrieving validated_value: {e}")
  142. return None
  143. @classmethod
  144. def generate_id(cls, length: int = 7) -> str:
  145. chars = string.ascii_letters + string.digits
  146. return ''.join(random.choice(chars) for _ in range(length))
  147. @staticmethod
  148. def decrypt_data(encrypted_data):
  149. try:
  150. return json.loads(base64.b64decode(encrypted_data).decode('utf-8'))
  151. except:
  152. return None
  153. @staticmethod
  154. def decrypt_bool(encrypted_data):
  155. try:
  156. return base64.b64decode(encrypted_data).decode('utf-8').lower() == 'true'
  157. except:
  158. return False
  159. @classmethod
  160. async def create_async_generator(
  161. cls,
  162. model: str,
  163. messages: Messages,
  164. prompt: str = None,
  165. proxy: str = None,
  166. images: ImagesType = None,
  167. top_p: float = None,
  168. temperature: float = None,
  169. max_tokens: int = None,
  170. conversation: Conversation = None,
  171. return_conversation: bool = False,
  172. **kwargs
  173. ) -> AsyncResult:
  174. model = cls.get_model(model)
  175. headers = {
  176. 'accept': '*/*',
  177. 'accept-language': 'en-US,en;q=0.9',
  178. 'content-type': 'application/json',
  179. 'origin': 'https://www.blackbox.ai',
  180. 'referer': 'https://www.blackbox.ai/',
  181. 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36'
  182. }
  183. async with ClientSession(headers=headers) as session:
  184. if model in "ImageGeneration":
  185. prompt = format_image_prompt(messages, prompt)
  186. data = {
  187. "query": format_image_prompt(messages, prompt),
  188. "agentMode": True
  189. }
  190. headers['content-type'] = 'text/plain;charset=UTF-8'
  191. async with session.post(
  192. "https://www.blackbox.ai/api/image-generator",
  193. json=data,
  194. proxy=proxy,
  195. headers=headers
  196. ) as response:
  197. await raise_for_status(response)
  198. response_json = await response.json()
  199. if "markdown" in response_json:
  200. image_url_match = re.search(r'!\[.*?\]\((.*?)\)', response_json["markdown"])
  201. if image_url_match:
  202. image_url = image_url_match.group(1)
  203. yield ImageResponse(images=[image_url], alt=format_image_prompt(messages, prompt))
  204. return
  205. if conversation is None or not hasattr(conversation, "chat_id"):
  206. conversation = Conversation(model)
  207. conversation.validated_value = await cls.fetch_validated()
  208. conversation.chat_id = cls.generate_id()
  209. conversation.message_history = []
  210. current_messages = []
  211. for i, msg in enumerate(messages):
  212. msg_id = conversation.chat_id if i == 0 and msg["role"] == "user" else cls.generate_id()
  213. current_msg = {
  214. "id": msg_id,
  215. "content": msg["content"],
  216. "role": msg["role"]
  217. }
  218. current_messages.append(current_msg)
  219. if images is not None:
  220. current_messages[-1]['data'] = {
  221. "imagesData": [
  222. {
  223. "filePath": f"/{image_name}",
  224. "contents": to_data_uri(image)
  225. }
  226. for image, image_name in images
  227. ],
  228. "fileText": "",
  229. "title": ""
  230. }
  231. data = {
  232. "messages": current_messages,
  233. "agentMode": cls.agentMode.get(model, {}) if model in cls.agentMode else {},
  234. "id": conversation.chat_id,
  235. "previewToken": None,
  236. "userId": None,
  237. "codeModelMode": True,
  238. "trendingAgentMode": cls.trendingAgentMode.get(model, {}) if model in cls.trendingAgentMode else {},
  239. "isMicMode": False,
  240. "userSystemPrompt": None,
  241. "maxTokens": max_tokens,
  242. "playgroundTopP": top_p,
  243. "playgroundTemperature": temperature,
  244. "isChromeExt": False,
  245. "githubToken": "",
  246. "clickedAnswer2": False,
  247. "clickedAnswer3": False,
  248. "clickedForceWebSearch": False,
  249. "visitFromDelta": False,
  250. "isMemoryEnabled": False,
  251. "mobileClient": False,
  252. "userSelectedModel": model if model in cls.userSelectedModel else None,
  253. "validated": conversation.validated_value,
  254. "imageGenerationMode": False,
  255. "webSearchModePrompt": False,
  256. "deepSearchMode": False,
  257. "domains": None,
  258. "vscodeClient": False,
  259. "codeInterpreterMode": False,
  260. "customProfile": {
  261. "name": "",
  262. "occupation": "",
  263. "traits": [],
  264. "additionalInfo": "",
  265. "enableNewChats": False
  266. },
  267. "session": cls.decrypt_data(cls.ENCRYPTED_SESSION),
  268. "isPremium": cls.decrypt_bool(cls.ENCRYPTED_IS_PREMIUM),
  269. "subscriptionCache": cls.decrypt_data(cls.ENCRYPTED_SUBSCRIPTION_CACHE),
  270. "beastMode": False,
  271. "webSearchMode": False
  272. }
  273. async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
  274. await raise_for_status(response)
  275. full_response = []
  276. async for chunk in response.content.iter_any():
  277. if chunk:
  278. chunk_text = chunk.decode()
  279. full_response.append(chunk_text)
  280. yield chunk_text
  281. if return_conversation:
  282. full_response_text = ''.join(full_response)
  283. conversation.message_history.append({"role": "assistant", "content": full_response_text})
  284. yield conversation