Ошибка запуска LLMs + autogen в google collab на python
я запускаю LLMs на python вот код из файла ipynb (ссылка номер один):
import torch
from pathlib import Path
if Path.cwd().name != 'text-generation-webui':
print("Installing the webui...")
!git clone https://github.com/oobabooga/text-generation-webui
%cd text-generation-webui
torver = torch.version
print(f"TORCH: {torver}")
is_cuda118 = '+cu118' in torver # 2.1.0+cu118
is_cuda117 = '+cu117' in torver # 2.0.1+cu117
textgen_requirements = open('requirements.txt').read().splitlines()
if is_cuda117:
textgen_requirements = [req.replace('+cu121', '+cu117').replace('+cu122', '+cu117').replace('torch2.1', 'torch2.0') for req in textgen_requirements]
elif is_cuda118:
textgen_requirements = [req.replace('+cu121', '+cu118').replace('+cu122', '+cu118') for req in textgen_requirements]
with open('temp_requirements.txt', 'w') as file:
file.write('\n'.join(textgen_requirements))
!pip install -r extensions/api/requirements.txt --upgrade
!pip install -r temp_requirements.txt --upgrade
print("\033[1;32;1m\n --> If you see a warning about \"previously imported packages\", just ignore it.\033[0;37;0m")
print("\033[1;32;1m\n --> There is no need to restart the runtime.\n\033[0;37;0m")
try:
import flash_attn
except:
!pip uninstall -y flash_attn
# Parameters
model_url = "https://huggingface.co/turboderp/Mistral-7B-instruct-exl2"
branch = "4.0bpw"
command_line_flags = "--n-gpu-layers 128 --load-in-4bit --use_double_quant"
api = True
if api:
for param in ['--api', '--public-api']:
if param not in command_line_flags:
command_line_flags += f" {param}"
model_url = model_url.strip()
if model_url != "":
if not model_url.startswith('http'):
model_url = 'https://huggingface.co/' + model_url
# Download the model
url_parts = model_url.strip('/').strip().split('/')
output_folder = f"{url_parts[-2]}_{url_parts[-1]}"
branch = branch.strip('"\' ')
if branch.strip() != '':
output_folder += f"_{branch}"
!python download-model.py {model_url} --branch {branch}
else:
!python download-model.py {model_url}
else:
output_folder = ""
# Start the web UI
cmd = f"python server.py --share"
if output_folder != "":
cmd += f" --model {output_folder}"
cmd += f" {command_line_flags}"
print(cmd)
!$cmd
он выдает localhost, public url, а также нужный мне API url. Но если я перейду на локальный сервер по http://127.0.0.1:7860/ или http://127.0.0.1:8000/ , то браузер скажет мне, что не может открыть эту страницу. Этот API url нужен мне в AutoGen:
config_list = [
{
"model": "mistral-7b",
"api_base": "https://wide-centuries-please-ours.trycloudflare.com/api",
"api_key": "NULL", # this is a placeholder
"api_type": "open_ai",
},
]
Но после запуска(вторая ссылка) он выдает мне ошибку:
--------------------------------------------------------------------------------
[autogen.oai.completion: 11-04 14:11:16] {238} INFO - retrying in 10 seconds...
Traceback (most recent call last):
File "/usr/local/lib/python3.10/dist-packages/openai/api_requestor.py", line 765, in _interpret_response_line
data = json.loads(rbody)
File "/usr/lib/python3.10/json/__init__.py", line 346, in loads
return _default_decoder.decode(s)
File "/usr/lib/python3.10/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/lib/python3.10/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.10/dist-packages/autogen/oai/completion.py", line 224, in _get_response
response = openai_completion.create(request_timeout=request_timeout, **config)
File "/usr/local/lib/python3.10/dist-packages/openai/api_resources/chat_completion.py", line 25, in create
return super().create(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/openai/api_resources/abstract/engine_api_resource.py", line 155, in create
response, _, api_key = requestor.request(
File "/usr/local/lib/python3.10/dist-packages/openai/api_requestor.py", line 299, in request
resp, got_stream = self._interpret_response(result, stream)
File "/usr/local/lib/python3.10/dist-packages/openai/api_requestor.py", line 710, in _interpret_response
self._interpret_response_line(
File "/usr/local/lib/python3.10/dist-packages/openai/api_requestor.py", line 767, in _interpret_response_line
raise error.APIError(
openai.error.APIError: HTTP code 404 from API (<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
"http://www.w3.org/TR/html4/strict.dtd">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html;charset=utf-8">
<title>Error response</title>
</head>
<body>
<h1>Error response</h1>
<p>Error code: 404</p>
<p>Message: Not Found.</p>
<p>Error code explanation: 404 - Nothing matches the given URI.</p>
</body>
</html>
)
INFO:autogen.oai.completion:retrying in 10 seconds...
Traceback (most recent call last):
File "/usr/local/lib/python3.10/dist-packages/openai/api_requestor.py", line 765, in _interpret_response_line
data = json.loads(rbody)
File "/usr/lib/python3.10/json/__init__.py", line 346, in loads
return _default_decoder.decode(s)
File "/usr/lib/python3.10/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/lib/python3.10/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.10/dist-packages/autogen/oai/completion.py", line 224, in _get_response
response = openai_completion.create(request_timeout=request_timeout, **config)
File "/usr/local/lib/python3.10/dist-packages/openai/api_resources/chat_completion.py", line 25, in create
return super().create(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/openai/api_resources/abstract/engine_api_resource.py", line 155, in create
response, _, api_key = requestor.request(
File "/usr/local/lib/python3.10/dist-packages/openai/api_requestor.py", line 299, in request
resp, got_stream = self._interpret_response(result, stream)
File "/usr/local/lib/python3.10/dist-packages/openai/api_requestor.py", line 710, in _interpret_response
self._interpret_response_line(
File "/usr/local/lib/python3.10/dist-packages/openai/api_requestor.py", line 767, in _interpret_response_line
raise error.APIError(
openai.error.APIError: HTTP code 404 from API (<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
"http://www.w3.org/TR/html4/strict.dtd">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html;charset=utf-8">
<title>Error response</title>
</head>
<body>
<h1>Error response</h1>
<p>Error code: 404</p>
<p>Message: Not Found.</p>
<p>Error code explanation: 404 - Nothing matches the given URI.</p>
</body>
</html>
)
[autogen.oai.completion: 11-04 14:11:26] {238} INFO - retrying in 10 seconds...
Traceback (most recent call last):
File "/usr/local/lib/python3.10/dist-packages/openai/api_requestor.py", line 765, in _interpret_response_line
data = json.loads(rbody)
File "/usr/lib/python3.10/json/__init__.py", line 346, in loads
return _default_decoder.decode(s)
File "/usr/lib/python3.10/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/lib/python3.10/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.10/dist-packages/autogen/oai/completion.py", line 224, in _get_response
response = openai_completion.create(request_timeout=request_timeout, **config)
File "/usr/local/lib/python3.10/dist-packages/openai/api_resources/chat_completion.py", line 25, in create
return super().create(*args, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/openai/api_resources/abstract/engine_api_resource.py", line 155, in create
response, _, api_key = requestor.request(
File "/usr/local/lib/python3.10/dist-packages/openai/api_requestor.py", line 299, in request
resp, got_stream = self._interpret_response(result, stream)
File "/usr/local/lib/python3.10/dist-packages/openai/api_requestor.py", line 710, in _interpret_response
self._interpret_response_line(
File "/usr/local/lib/python3.10/dist-packages/openai/api_requestor.py", line 767, in _interpret_response_line
raise error.APIError(
openai.error.APIError: HTTP code 530 from API (<!DOCTYPE html>
<!--[if lt IE 7]> <html class="no-js ie6 oldie" lang="en-US"> <![endif]-->
<!--[if IE 7]> <html class="no-js ie7 oldie" lang="en-US"> <![endif]-->
<!--[if IE 8]> <html class="no-js ie8 oldie" lang="en-US"> <![endif]-->
<!--[if gt IE 8]><!--> <html class="no-js" lang="en-US"> <!--<![endif]-->
<head>
<title>Argo Tunnel error | wide-centuries-please-ours.trycloudflare.com | Cloudflare</title>
<meta charset="UTF-8" />
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=Edge" />
<meta name="robots" content="noindex, nofollow" />
<meta name="viewport" content="width=device-width,initial-scale=1" />
<link rel="stylesheet" id="cf_styles-css" href="/cdn-cgi/styles/main.css" />
а в терминал TextGen-Mistral пишется:
127.0.0.1 - - [04/Nov/2023 16:09:56] code 404, message Not Found
127.0.0.1 - - [04/Nov/2023 16:09:56] "POST /api/chat/completions HTTP/1.1" 404 -
127.0.0.1 - - [04/Nov/2023 16:10:06] code 404, message Not Found
127.0.0.1 - - [04/Nov/2023 16:10:06] "POST /api/chat/completions HTTP/1.1" 404 -
TextGen-Mistral: https://colab.research.google.com/drive/1rdxGEiXf1S5EH55TuNMWS752R595ZmsQ?usp=sharing#scrollTo=qHQHTKY0kX5m
memGPT-autogen: https://colab.research.google.com/drive/1tG45c64ZjmPHEkyUuJP9CgJTgybc0Jn1?usp=sharing&authuser=1#scrollTo=tSudz_et207l
Примечание: public URL работает и бот там функционирует, но эту ссылку нельзя использовать в autogen (либо я не знаю как)