Ajedrel's picture
Update app.py
47013eb verified
import os
import tempfile
import subprocess
import streamlit as st
from huggingface_hub import InferenceClient
st.set_page_config(page_title="Python to C++17 Translator", layout="wide")
if "messages" not in st.session_state:
st.session_state.messages = []
MODEL_ID = "Qwen/Qwen2.5-Coder-32B-Instruct"
with st.sidebar:
if st.button("Limpiar Historial"):
st.session_state.messages = []
st.rerun()
def get_system_prompt() -> str:
return (
"You are an expert compiler and AI engineer specializing in migrating Python code to high-performance C++17. "
"Your task is to provide direct, efficient, and thread-safe C++17 code. "
"Include all necessary headers (<iostream>, <vector>, <memory>, etc.). "
"Ensure strict type safety and prevent integer overflows. "
"Respond ONLY with the raw C++ code enclosed in ```cpp block. Do not add explanations."
)
def extract_cpp_code(response_text: str) -> str:
"""Extrae el c贸digo C++ limpiando los bloques de markdown."""
if "```cpp" in response_text:
code = response_text.split("```cpp")[1].split("```")[0]
elif "```c++" in response_text:
code = response_text.split("```c++")[1].split("```")[0]
elif "```" in response_text:
code = response_text.split("```")[1].split("```")[0]
else:
code = response_text
return code.strip()
def compile_and_run_cpp(cpp_code: str) -> str:
"""Compila y ejecuta el c贸digo C++ en el entorno Linux de HF Spaces."""
with tempfile.TemporaryDirectory() as tmpdir:
cpp_file = os.path.join(tmpdir, "optimized.cpp")
exe_file = os.path.join(tmpdir, "optimized_bin")
with open(cpp_file, "w") as f:
f.write(cpp_code)
compile_cmd = ["g++", "-O3", "-std=c++17", cpp_file, "-o", exe_file]
try:
comp_process = subprocess.run(compile_cmd, capture_output=True, text=True, check=True)
run_process = subprocess.run([exe_file], capture_output=True, text=True, timeout=10)
output = run_process.stdout
if run_process.stderr:
output += f"\nWarnings/Errors:\n{run_process.stderr}"
return output if output else "Ejecuci贸n completada sin salida (stdout vac铆o)."
except subprocess.CalledProcessError as e:
return f" Error de Compilaci贸n:\n{e.stderr}"
except subprocess.TimeoutExpired:
return " Timeout: La ejecuci贸n excedi贸 los 10 segundos."
st.title("Python to C++ Auto-Translator")
st.markdown("Pega tu c贸digo de Python en el chat. El sistema generara la traducci贸n a C++ y la ejecutara automaticamente")
for msg in st.session_state.messages:
with st.chat_message(msg["role"]):
if msg["role"] == "user":
st.code(msg["content"], language="python")
else:
st.code(msg["content"]["code"], language="cpp")
with st.expander("Ver salida de ejecuci贸n"):
st.text(msg["content"]["output"])
if prompt := st.chat_input("Escribe o pega tu c贸digo Python aqui:"):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.code(prompt, language="python")
hf_token = os.getenv("HF_TOKEN")
if not hf_token:
st.error("ERROR CR脥TICO: No se encontr贸 el HF_TOKEN. Ve a Settings -> Variables and secrets y configuralo.")
st.stop()
client = InferenceClient(model=MODEL_ID, token=hf_token)
messages_for_hf = [
{"role": "system", "content": get_system_prompt()},
{"role": "user", "content": f"Translate this Python code to C++:\n\n{prompt}"}
]
with st.chat_message("assistant"):
with st.spinner("Conectando con la API y traduciendo a C++"):
try:
response = client.chat_completion(
messages=messages_for_hf,
max_tokens=3000,
temperature=0.1
)
generated_text = response.choices[0].message.content
clean_cpp = extract_cpp_code(generated_text)
st.code(clean_cpp, language="cpp")
st.markdown("---")
st.markdown(" **Ejecutando binario...**")
with st.spinner("Compilando y ejecutando..."):
execution_output = compile_and_run_cpp(clean_cpp)
with st.expander("Salida de ejecuci贸n (stdout)", expanded=True):
st.text(execution_output)
st.session_state.messages.append({
"role": "assistant",
"content": {
"code": clean_cpp,
"output": execution_output
}
})
except Exception as e:
st.error(f"Error detallado de la API: {type(e).__name__} - {str(e)}")