Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Alina Lozovskaya
commited on
Commit
·
5289522
1
Parent(s):
a54ba55
Config handling
Browse files- yourbench_space/app.py +3 -126
- yourbench_space/config.py +2 -2
- yourbench_space/utils.py +83 -0
yourbench_space/app.py
CHANGED
|
@@ -1,143 +1,22 @@
|
|
| 1 |
-
import os
|
| 2 |
import sys
|
| 3 |
from huggingface_hub import HfApi, whoami
|
| 4 |
import requests
|
| 5 |
-
import pathlib
|
| 6 |
-
import subprocess
|
| 7 |
-
import shutil
|
| 8 |
-
import io
|
| 9 |
import yaml
|
| 10 |
import gradio as gr
|
| 11 |
from loguru import logger
|
| 12 |
from yourbench.pipeline import run_pipeline
|
| 13 |
|
| 14 |
-
from yourbench_space.config import
|
| 15 |
-
from yourbench_space.utils import CONFIG_PATH, UPLOAD_DIRECTORY
|
| 16 |
|
| 17 |
UPLOAD_DIRECTORY.mkdir(parents=True, exist_ok=True)
|
| 18 |
|
| 19 |
-
|
| 20 |
logger.remove()
|
| 21 |
logger.add(sys.stderr, level="INFO")
|
| 22 |
|
| 23 |
-
class SubprocessManager:
|
| 24 |
-
def __init__(self, command):
|
| 25 |
-
self.command = command
|
| 26 |
-
self.process = None
|
| 27 |
-
self.output_stream = io.StringIO()
|
| 28 |
-
|
| 29 |
-
def start_process(self):
|
| 30 |
-
"""Start the subprocess."""
|
| 31 |
-
if self.is_running():
|
| 32 |
-
logger.info("Process is already running")
|
| 33 |
-
return
|
| 34 |
-
|
| 35 |
-
self.output_stream = io.StringIO()
|
| 36 |
-
|
| 37 |
-
self.process = subprocess.Popen(
|
| 38 |
-
self.command,
|
| 39 |
-
stdout=subprocess.PIPE,
|
| 40 |
-
stderr=subprocess.STDOUT, # Combine stderr with stdout
|
| 41 |
-
text=True,
|
| 42 |
-
bufsize=1, # Line-buffered
|
| 43 |
-
start_new_session=True # Start the process in a new session
|
| 44 |
-
)
|
| 45 |
-
os.set_blocking(self.process.stdout.fileno(), False)
|
| 46 |
-
logger.info("Started the process")
|
| 47 |
-
|
| 48 |
-
def read_and_get_output(self):
|
| 49 |
-
"""Read available subprocess output and return the captured output."""
|
| 50 |
-
if self.process and self.process.stdout:
|
| 51 |
-
try:
|
| 52 |
-
while True:
|
| 53 |
-
line = self.process.stdout.readline()
|
| 54 |
-
if line:
|
| 55 |
-
self.output_stream.write(line) # Capture in StringIO
|
| 56 |
-
else:
|
| 57 |
-
break
|
| 58 |
-
except BlockingIOError:
|
| 59 |
-
pass
|
| 60 |
-
return self.output_stream.getvalue()
|
| 61 |
-
|
| 62 |
-
def stop_process(self):
|
| 63 |
-
"""Terminate the subprocess."""
|
| 64 |
-
if not self.is_running():
|
| 65 |
-
logger.info("Process is not running")
|
| 66 |
-
return
|
| 67 |
-
logger.info("Sending SIGTERM to the Process")
|
| 68 |
-
self.process.terminate()
|
| 69 |
-
exit_code = self.process.wait() # Wait for process to terminate
|
| 70 |
-
logger.info(f"Process stopped exit code {exit_code}")
|
| 71 |
-
#return exit_code
|
| 72 |
-
|
| 73 |
-
def kill_process(self):
|
| 74 |
-
"""Forcefully kill the subprocess."""
|
| 75 |
-
if not self.is_running():
|
| 76 |
-
logger.info("Process is not running")
|
| 77 |
-
return
|
| 78 |
-
logger.info("Sending SIGKILL to the Process")
|
| 79 |
-
self.process.kill()
|
| 80 |
-
exit_code = self.process.wait() # Wait for process to be killed
|
| 81 |
-
logger.info(f"Process killed exit code {exit_code}")
|
| 82 |
-
#return exit_code
|
| 83 |
-
|
| 84 |
-
def is_running(self):
|
| 85 |
-
"""Check if the subprocess is still running."""
|
| 86 |
-
return self.process and self.process.poll() is None
|
| 87 |
-
|
| 88 |
-
|
| 89 |
command = ["uv", "run", "yourbench", f"--config={CONFIG_PATH}"]
|
| 90 |
manager = SubprocessManager(command)
|
| 91 |
|
| 92 |
-
|
| 93 |
-
def save_files(files: list[str]):
|
| 94 |
-
saved_paths = [shutil.move(str(pathlib.Path(file)), str(UPLOAD_DIRECTORY / pathlib.Path(file).name)) for file in files]
|
| 95 |
-
return f"Files saved to: {', '.join(saved_paths)}"
|
| 96 |
-
|
| 97 |
-
def populate_user_info(oauth_profile: gr.OAuthProfile = None, oauth_token: gr.OAuthToken = None):
|
| 98 |
-
if oauth_profile is None or oauth_token is None:
|
| 99 |
-
return (
|
| 100 |
-
gr.Dropdown.update(choices=["(Please log in to load tokens)"], value=None),
|
| 101 |
-
gr.Dropdown.update(choices=["(Please log in)"], value=None),
|
| 102 |
-
"🔒 Not logged in"
|
| 103 |
-
)
|
| 104 |
-
|
| 105 |
-
username = oauth_profile.username
|
| 106 |
-
org_names = []
|
| 107 |
-
token_names = []
|
| 108 |
-
|
| 109 |
-
try:
|
| 110 |
-
headers = {"Authorization": f"Bearer {oauth_token.token}"}
|
| 111 |
-
hf_api = HfApi(token=oauth_token.token)
|
| 112 |
-
|
| 113 |
-
# Fetch all user tokens
|
| 114 |
-
token_data = hf_api.list_tokens()
|
| 115 |
-
for t in token_data:
|
| 116 |
-
name = t.get("name") or f"{t['token'][:4]}...{t['token'][-4:]}"
|
| 117 |
-
token_names.append(name)
|
| 118 |
-
|
| 119 |
-
# Fetch user organizations
|
| 120 |
-
orgs = hf_api.get_user_organizations()
|
| 121 |
-
org_names = [org.organization for org in orgs]
|
| 122 |
-
|
| 123 |
-
except Exception as e:
|
| 124 |
-
print("Error fetching user/org info:", e)
|
| 125 |
-
token_names = [f"{oauth_token.token[:4]}...{oauth_token.token[-4:]}"]
|
| 126 |
-
|
| 127 |
-
org_options = [username] + org_names
|
| 128 |
-
default_org = username
|
| 129 |
-
|
| 130 |
-
return (
|
| 131 |
-
gr.Dropdown.update(choices=token_names, value=token_names[0] if token_names else None),
|
| 132 |
-
gr.Dropdown.update(choices=org_options, value=default_org),
|
| 133 |
-
f"✅ Logged in as {username}"
|
| 134 |
-
)
|
| 135 |
-
|
| 136 |
-
def hello(profile: gr.OAuthProfile | None) -> str:
|
| 137 |
-
if profile is None:
|
| 138 |
-
return "Please, login"
|
| 139 |
-
return f"Hi {profile.name}"
|
| 140 |
-
|
| 141 |
def update_hf_org_dropdown(oauth_token: gr.OAuthToken | None) -> str:
|
| 142 |
if oauth_token is None:
|
| 143 |
print("Please deploy this on Spaces and log in to list organizations.")
|
|
@@ -155,8 +34,6 @@ with gr.Blocks() as app:
|
|
| 155 |
gr.Markdown("## YourBench Configuration")
|
| 156 |
with gr.Row():
|
| 157 |
login_btn = gr.LoginButton()
|
| 158 |
-
hello_text = gr.Markdown()
|
| 159 |
-
app.load(hello, inputs=None, outputs=hello_text)
|
| 160 |
|
| 161 |
with gr.Tab("Configuration"):
|
| 162 |
model_name = gr.Textbox(label="Model Name")
|
|
@@ -170,7 +47,7 @@ with gr.Blocks() as app:
|
|
| 170 |
max_concurrent_requests = gr.Dropdown([8, 16, 32], value=16, label="Max Concurrent Requests")
|
| 171 |
preview_button = gr.Button("Generate Config")
|
| 172 |
preview_button.click(
|
| 173 |
-
|
| 174 |
inputs=[hf_org_dropdown, model_name, provider, base_url, api_key, max_concurrent_requests],
|
| 175 |
outputs=config_output
|
| 176 |
)
|
|
|
|
|
|
|
| 1 |
import sys
|
| 2 |
from huggingface_hub import HfApi, whoami
|
| 3 |
import requests
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
import yaml
|
| 5 |
import gradio as gr
|
| 6 |
from loguru import logger
|
| 7 |
from yourbench.pipeline import run_pipeline
|
| 8 |
|
| 9 |
+
from yourbench_space.config import generate_base_config, save_config
|
| 10 |
+
from yourbench_space.utils import CONFIG_PATH, UPLOAD_DIRECTORY, SubprocessManager, save_files
|
| 11 |
|
| 12 |
UPLOAD_DIRECTORY.mkdir(parents=True, exist_ok=True)
|
| 13 |
|
|
|
|
| 14 |
logger.remove()
|
| 15 |
logger.add(sys.stderr, level="INFO")
|
| 16 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 17 |
command = ["uv", "run", "yourbench", f"--config={CONFIG_PATH}"]
|
| 18 |
manager = SubprocessManager(command)
|
| 19 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
def update_hf_org_dropdown(oauth_token: gr.OAuthToken | None) -> str:
|
| 21 |
if oauth_token is None:
|
| 22 |
print("Please deploy this on Spaces and log in to list organizations.")
|
|
|
|
| 34 |
gr.Markdown("## YourBench Configuration")
|
| 35 |
with gr.Row():
|
| 36 |
login_btn = gr.LoginButton()
|
|
|
|
|
|
|
| 37 |
|
| 38 |
with gr.Tab("Configuration"):
|
| 39 |
model_name = gr.Textbox(label="Model Name")
|
|
|
|
| 47 |
max_concurrent_requests = gr.Dropdown([8, 16, 32], value=16, label="Max Concurrent Requests")
|
| 48 |
preview_button = gr.Button("Generate Config")
|
| 49 |
preview_button.click(
|
| 50 |
+
generate_base_config,
|
| 51 |
inputs=[hf_org_dropdown, model_name, provider, base_url, api_key, max_concurrent_requests],
|
| 52 |
outputs=config_output
|
| 53 |
)
|
yourbench_space/config.py
CHANGED
|
@@ -4,10 +4,10 @@ import gradio as gr
|
|
| 4 |
from yourbench_space.utils import CONFIG_PATH
|
| 5 |
|
| 6 |
|
| 7 |
-
def
|
| 8 |
config = {
|
| 9 |
"hf_configuration": {
|
| 10 |
-
"token":
|
| 11 |
"private": True,
|
| 12 |
"hf_organization": hf_org
|
| 13 |
},
|
|
|
|
| 4 |
from yourbench_space.utils import CONFIG_PATH
|
| 5 |
|
| 6 |
|
| 7 |
+
def generate_base_config(hf_org, model_name, provider, base_url, api_key, max_concurrent_requests):
|
| 8 |
config = {
|
| 9 |
"hf_configuration": {
|
| 10 |
+
"token": "$HF_TOKEN",
|
| 11 |
"private": True,
|
| 12 |
"hf_organization": hf_org
|
| 13 |
},
|
yourbench_space/utils.py
CHANGED
|
@@ -1,4 +1,87 @@
|
|
|
|
|
|
|
|
| 1 |
import pathlib
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
|
| 3 |
UPLOAD_DIRECTORY = pathlib.Path("/app/uploaded_files")
|
| 4 |
CONFIG_PATH = pathlib.Path("/app/yourbench_config.yml")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import os
|
| 3 |
import pathlib
|
| 4 |
+
import shutil
|
| 5 |
+
import gradio as gr
|
| 6 |
+
from loguru import logger
|
| 7 |
+
import subprocess
|
| 8 |
|
| 9 |
UPLOAD_DIRECTORY = pathlib.Path("/app/uploaded_files")
|
| 10 |
CONFIG_PATH = pathlib.Path("/app/yourbench_config.yml")
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def save_files(files: list[str]):
|
| 14 |
+
saved_paths = [shutil.move(str(pathlib.Path(file)), str(UPLOAD_DIRECTORY / pathlib.Path(file).name)) for file in files]
|
| 15 |
+
return f"Files saved to: {', '.join(saved_paths)}"
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class SubprocessManager:
|
| 19 |
+
def __init__(self, command):
|
| 20 |
+
self.command = command
|
| 21 |
+
self.process = None
|
| 22 |
+
self.output_stream = io.StringIO()
|
| 23 |
+
|
| 24 |
+
def start_process(self, oauth_token: gr.OAuthToken | None):
|
| 25 |
+
"""Start the subprocess."""
|
| 26 |
+
if self.is_running():
|
| 27 |
+
logger.info("Process is already running")
|
| 28 |
+
return
|
| 29 |
+
|
| 30 |
+
self.output_stream = io.StringIO()
|
| 31 |
+
|
| 32 |
+
new_env = os.environ.copy()
|
| 33 |
+
# Override env token, when running in gradio space
|
| 34 |
+
if oauth_token:
|
| 35 |
+
new_env["HF_TOKEN"] = oauth_token
|
| 36 |
+
|
| 37 |
+
self.process = subprocess.Popen(
|
| 38 |
+
self.command,
|
| 39 |
+
stdout=subprocess.PIPE,
|
| 40 |
+
stderr=subprocess.STDOUT, # Combine stderr with stdout
|
| 41 |
+
text=True,
|
| 42 |
+
bufsize=1,
|
| 43 |
+
start_new_session=True,
|
| 44 |
+
env=new_env
|
| 45 |
+
)
|
| 46 |
+
os.set_blocking(self.process.stdout.fileno(), False)
|
| 47 |
+
logger.info("Started the process")
|
| 48 |
+
|
| 49 |
+
def read_and_get_output(self):
|
| 50 |
+
"""Read available subprocess output and return the captured output."""
|
| 51 |
+
if self.process and self.process.stdout:
|
| 52 |
+
try:
|
| 53 |
+
while True:
|
| 54 |
+
line = self.process.stdout.readline()
|
| 55 |
+
if line:
|
| 56 |
+
self.output_stream.write(line) # Capture in StringIO
|
| 57 |
+
else:
|
| 58 |
+
break
|
| 59 |
+
except BlockingIOError:
|
| 60 |
+
pass
|
| 61 |
+
return self.output_stream.getvalue()
|
| 62 |
+
|
| 63 |
+
def stop_process(self):
|
| 64 |
+
"""Terminate the subprocess."""
|
| 65 |
+
if not self.is_running():
|
| 66 |
+
logger.info("Process is not running")
|
| 67 |
+
return
|
| 68 |
+
logger.info("Sending SIGTERM to the Process")
|
| 69 |
+
self.process.terminate()
|
| 70 |
+
exit_code = self.process.wait() # Wait for process to terminate
|
| 71 |
+
logger.info(f"Process stopped exit code {exit_code}")
|
| 72 |
+
#return exit_code
|
| 73 |
+
|
| 74 |
+
def kill_process(self):
|
| 75 |
+
"""Forcefully kill the subprocess."""
|
| 76 |
+
if not self.is_running():
|
| 77 |
+
logger.info("Process is not running")
|
| 78 |
+
return
|
| 79 |
+
logger.info("Sending SIGKILL to the Process")
|
| 80 |
+
self.process.kill()
|
| 81 |
+
exit_code = self.process.wait() # Wait for process to be killed
|
| 82 |
+
logger.info(f"Process killed exit code {exit_code}")
|
| 83 |
+
#return exit_code
|
| 84 |
+
|
| 85 |
+
def is_running(self):
|
| 86 |
+
"""Check if the subprocess is still running."""
|
| 87 |
+
return self.process and self.process.poll() is None
|