Blackbox.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320
  1. from __future__ import annotations
  2. from aiohttp import ClientSession, TCPConnector, ClientTimeout
  3. from pathlib import Path
  4. import re
  5. import json
  6. import random
  7. import string
  8. from ..typing import AsyncResult, Messages, ImagesType
  9. from ..requests.raise_for_status import raise_for_status
  10. from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
  11. from ..image import ImageResponse, to_data_uri
  12. from ..cookies import get_cookies_dir
  13. from .helper import format_prompt
  14. from ..providers.response import FinishReason, JsonConversation
  15. class Conversation(JsonConversation):
  16. validated_value: str = None
  17. chat_id: str = None
  18. message_history: Messages = []
  19. def __init__(self, model: str):
  20. self.model = model
  21. class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
  22. label = "Blackbox AI"
  23. url = "https://www.blackbox.ai"
  24. api_endpoint = "https://www.blackbox.ai/api/chat"
  25. working = True
  26. needs_auth = True
  27. supports_stream = False
  28. supports_system_message = False
  29. supports_message_history = True
  30. default_model = "blackboxai"
  31. default_vision_model = default_model
  32. default_image_model = 'ImageGeneration'
  33. image_models = [default_image_model]
  34. vision_models = [default_vision_model, 'gpt-4o', 'gemini-pro', 'gemini-1.5-flash', 'llama-3.1-8b', 'llama-3.1-70b', 'llama-3.1-405b']
  35. userSelectedModel = ['gpt-4o', 'gemini-pro', 'claude-sonnet-3.5', 'blackboxai-pro']
  36. agentMode = {
  37. 'ImageGeneration': {'mode': True, 'id': "ImageGenerationLV45LJp", 'name': "Image Generation"},
  38. #
  39. 'meta-llama/Llama-3.3-70B-Instruct-Turbo': {'mode': True, 'id': "meta-llama/Llama-3.3-70B-Instruct-Turbo", 'name': "Meta-Llama-3.3-70B-Instruct-Turbo"},
  40. 'mistralai/Mistral-7B-Instruct-v0.2': {'mode': True, 'id': "mistralai/Mistral-7B-Instruct-v0.2", 'name': "Mistral-(7B)-Instruct-v0.2"},
  41. 'deepseek-ai/deepseek-llm-67b-chat': {'mode': True, 'id': "deepseek-ai/deepseek-llm-67b-chat", 'name': "DeepSeek-LLM-Chat-(67B)"},
  42. 'databricks/dbrx-instruct': {'mode': True, 'id': "databricks/dbrx-instruct", 'name': "DBRX-Instruct"},
  43. 'Qwen/QwQ-32B-Preview': {'mode': True, 'id': "Qwen/QwQ-32B-Preview", 'name': "Qwen-QwQ-32B-Preview"},
  44. 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO': {'mode': True, 'id': "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", 'name': "Nous-Hermes-2-Mixtral-8x7B-DPO"}
  45. }
  46. trendingAgentMode = {
  47. "gemini-1.5-flash": {'mode': True, 'id': 'Gemini'},
  48. "llama-3.1-8b": {'mode': True, 'id': "llama-3.1-8b"},
  49. 'llama-3.1-70b': {'mode': True, 'id': "llama-3.1-70b"},
  50. 'llama-3.1-405b': {'mode': True, 'id': "llama-3.1-405"},
  51. #
  52. 'Python Agent': {'mode': True, 'id': "Python Agent"},
  53. 'Java Agent': {'mode': True, 'id': "Java Agent"},
  54. 'JavaScript Agent': {'mode': True, 'id': "JavaScript Agent"},
  55. 'HTML Agent': {'mode': True, 'id': "HTML Agent"},
  56. 'Google Cloud Agent': {'mode': True, 'id': "Google Cloud Agent"},
  57. 'Android Developer': {'mode': True, 'id': "Android Developer"},
  58. 'Swift Developer': {'mode': True, 'id': "Swift Developer"},
  59. 'Next.js Agent': {'mode': True, 'id': "Next.js Agent"},
  60. 'MongoDB Agent': {'mode': True, 'id': "MongoDB Agent"},
  61. 'PyTorch Agent': {'mode': True, 'id': "PyTorch Agent"},
  62. 'React Agent': {'mode': True, 'id': "React Agent"},
  63. 'Xcode Agent': {'mode': True, 'id': "Xcode Agent"},
  64. 'AngularJS Agent': {'mode': True, 'id': "AngularJS Agent"},
  65. #
  66. 'blackboxai-pro': {'mode': True, 'id': "BLACKBOXAI-PRO"},
  67. #
  68. 'repomap': {'mode': True, 'id': "repomap"},
  69. #
  70. 'Heroku Agent': {'mode': True, 'id': "Heroku Agent"},
  71. 'Godot Agent': {'mode': True, 'id': "Godot Agent"},
  72. 'Go Agent': {'mode': True, 'id': "Go Agent"},
  73. 'Gitlab Agent': {'mode': True, 'id': "Gitlab Agent"},
  74. 'Git Agent': {'mode': True, 'id': "Git Agent"},
  75. 'Flask Agent': {'mode': True, 'id': "Flask Agent"},
  76. 'Firebase Agent': {'mode': True, 'id': "Firebase Agent"},
  77. 'FastAPI Agent': {'mode': True, 'id': "FastAPI Agent"},
  78. 'Erlang Agent': {'mode': True, 'id': "Erlang Agent"},
  79. 'Electron Agent': {'mode': True, 'id': "Electron Agent"},
  80. 'Docker Agent': {'mode': True, 'id': "Docker Agent"},
  81. 'DigitalOcean Agent': {'mode': True, 'id': "DigitalOcean Agent"},
  82. 'Bitbucket Agent': {'mode': True, 'id': "Bitbucket Agent"},
  83. 'Azure Agent': {'mode': True, 'id': "Azure Agent"},
  84. 'Flutter Agent': {'mode': True, 'id': "Flutter Agent"},
  85. 'Youtube Agent': {'mode': True, 'id': "Youtube Agent"},
  86. 'builder Agent': {'mode': True, 'id': "builder Agent"},
  87. }
  88. models = list(dict.fromkeys([default_model, *userSelectedModel, *list(agentMode.keys()), *list(trendingAgentMode.keys())]))
  89. model_aliases = {
  90. ### chat ###
  91. "gpt-4": "gpt-4o",
  92. "gemini-1.5-flash": "gemini-1.5-flash",
  93. "gemini-1.5-pro": "gemini-pro",
  94. "claude-3.5-sonnet": "claude-sonnet-3.5",
  95. "llama-3.3-70b": "meta-llama/Llama-3.3-70B-Instruct-Turbo",
  96. "mixtral-7b": "mistralai/Mistral-7B-Instruct-v0.2",
  97. "deepseek-chat": "deepseek-ai/deepseek-llm-67b-chat",
  98. "dbrx-instruct": "databricks/dbrx-instruct",
  99. "qwq-32b": "Qwen/QwQ-32B-Preview",
  100. "hermes-2-dpo": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
  101. ### image ###
  102. "flux": "ImageGeneration",
  103. }
  104. @classmethod
  105. async def fetch_validated(
  106. cls,
  107. url: str = "https://www.blackbox.ai",
  108. force_refresh: bool = False
  109. ) -> Optional[str]:
  110. """
  111. Asynchronously retrieves the validated_value from the specified URL.
  112. """
  113. cache_file = Path(get_cookies_dir()) / 'blackbox.json'
  114. if not force_refresh and cache_file.exists():
  115. try:
  116. with open(cache_file, 'r') as f:
  117. data = json.load(f)
  118. if data.get('validated_value'):
  119. return data['validated_value']
  120. except Exception as e:
  121. print(f"Error reading cache: {e}")
  122. js_file_pattern = r'static/chunks/\d{4}-[a-fA-F0-9]+\.js'
  123. uuid_pattern = r'["\']([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})["\']'
  124. def is_valid_context(text: str) -> bool:
  125. """Checks if the context is valid."""
  126. return any(char + '=' in text for char in 'abcdefghijklmnopqrstuvwxyz')
  127. async with ClientSession() as session:
  128. try:
  129. async with session.get(url) as response:
  130. if response.status != 200:
  131. print("Failed to load the page.")
  132. return None
  133. page_content = await response.text()
  134. js_files = re.findall(js_file_pattern, page_content)
  135. for js_file in js_files:
  136. js_url = f"{url}/_next/{js_file}"
  137. async with session.get(js_url) as js_response:
  138. if js_response.status == 200:
  139. js_content = await js_response.text()
  140. for match in re.finditer(uuid_pattern, js_content):
  141. start = max(0, match.start() - 10)
  142. end = min(len(js_content), match.end() + 10)
  143. context = js_content[start:end]
  144. if is_valid_context(context):
  145. validated_value = match.group(1)
  146. # Save to cache
  147. cache_file.parent.mkdir(exist_ok=True)
  148. try:
  149. with open(cache_file, 'w') as f:
  150. json.dump({'validated_value': validated_value}, f)
  151. except Exception as e:
  152. print(f"Error writing cache: {e}")
  153. return validated_value
  154. except Exception as e:
  155. print(f"Error retrieving validated_value: {e}")
  156. return None
  157. @classmethod
  158. def generate_chat_id(cls) -> str:
  159. """Generate a random chat ID"""
  160. chars = string.ascii_letters + string.digits
  161. return ''.join(random.choice(chars) for _ in range(7))
  162. @classmethod
  163. async def create_async_generator(
  164. cls,
  165. model: str,
  166. messages: Messages,
  167. prompt: str = None,
  168. proxy: str = None,
  169. web_search: bool = False,
  170. images: ImagesType = None,
  171. top_p: float = None,
  172. temperature: float = None,
  173. max_tokens: int = None,
  174. conversation: Conversation = None,
  175. return_conversation: bool = False,
  176. **kwargs
  177. ) -> AsyncResult:
  178. model = cls.get_model(model)
  179. headers = {
  180. 'accept': '*/*',
  181. 'accept-language': 'en-US,en;q=0.9',
  182. 'content-type': 'application/json',
  183. 'origin': 'https://www.blackbox.ai',
  184. 'referer': 'https://www.blackbox.ai/',
  185. 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36'
  186. }
  187. connector = TCPConnector(limit=10, ttl_dns_cache=300)
  188. timeout = ClientTimeout(total=30)
  189. async with ClientSession(headers=headers, connector=connector, timeout=timeout) as session:
  190. if conversation is None:
  191. conversation = Conversation(model)
  192. conversation.validated_value = await cls.fetch_validated()
  193. conversation.chat_id = cls.generate_chat_id()
  194. conversation.message_history = []
  195. current_messages = [{"id": conversation.chat_id, "content": format_prompt(messages), "role": "user"}]
  196. conversation.message_history.extend(messages)
  197. if images is not None:
  198. current_messages[-1]['data'] = {
  199. "imagesData": [
  200. {
  201. "filePath": f"/{image_name}",
  202. "contents": to_data_uri(image)
  203. }
  204. for image, image_name in images
  205. ],
  206. "fileText": "",
  207. "title": ""
  208. }
  209. data = {
  210. "messages": current_messages,
  211. "agentMode": cls.agentMode.get(model, {}) if model in cls.agentMode else {},
  212. "id": conversation.chat_id,
  213. "previewToken": None,
  214. "userId": None,
  215. "codeModelMode": True,
  216. "trendingAgentMode": cls.trendingAgentMode.get(model, {}) if model in cls.trendingAgentMode else {},
  217. "isMicMode": False,
  218. "userSystemPrompt": None,
  219. "maxTokens": max_tokens,
  220. "playgroundTopP": top_p,
  221. "playgroundTemperature": temperature,
  222. "isChromeExt": False,
  223. "githubToken": "",
  224. "clickedAnswer2": False,
  225. "clickedAnswer3": False,
  226. "clickedForceWebSearch": False,
  227. "visitFromDelta": False,
  228. "mobileClient": False,
  229. "userSelectedModel": model if model in cls.userSelectedModel else None,
  230. "validated": conversation.validated_value,
  231. "imageGenerationMode": False,
  232. "webSearchModePrompt": False,
  233. "deepSearchMode": False,
  234. "domains": None,
  235. "vscodeClient": False,
  236. "codeInterpreterMode": False,
  237. "webSearchMode": web_search
  238. }
  239. async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
  240. await raise_for_status(response)
  241. response_text = await response.text()
  242. parts = response_text.split('$~~~$')
  243. text_to_yield = parts[2] if len(parts) >= 3 else response_text
  244. if not text_to_yield or text_to_yield.isspace():
  245. return
  246. full_response = ""
  247. if model in cls.image_models:
  248. image_url_match = re.search(r'!\[.*?\]\((.*?)\)', text_to_yield)
  249. if image_url_match:
  250. image_url = image_url_match.group(1)
  251. prompt = messages[-1]["content"]
  252. yield ImageResponse(images=[image_url], alt=prompt)
  253. else:
  254. if "Generated by BLACKBOX.AI" in text_to_yield:
  255. conversation.validated_value = await cls.fetch_validated(force_refresh=True)
  256. if conversation.validated_value:
  257. data["validated"] = conversation.validated_value
  258. async with session.post(cls.api_endpoint, json=data, proxy=proxy) as new_response:
  259. await raise_for_status(new_response)
  260. new_response_text = await new_response.text()
  261. new_parts = new_response_text.split('$~~~$')
  262. new_text = new_parts[2] if len(new_parts) >= 3 else new_response_text
  263. if new_text and not new_text.isspace():
  264. yield new_text
  265. full_response = new_text
  266. else:
  267. if text_to_yield and not text_to_yield.isspace():
  268. yield text_to_yield
  269. full_response = text_to_yield
  270. else:
  271. if text_to_yield and not text_to_yield.isspace():
  272. yield text_to_yield
  273. full_response = text_to_yield
  274. if return_conversation:
  275. conversation.message_history.append({"role": "assistant", "content": full_response})
  276. yield conversation
  277. yield FinishReason("stop")