akhaliq HF Staff commited on
Commit
bc202f9
·
1 Parent(s): 1866adf

add deepseek v3.2

Browse files
backend_api.py CHANGED
@@ -93,11 +93,11 @@ def get_cached_client(model_id: str, provider: str = "auto"):
93
 
94
  # Define models and languages here to avoid importing Gradio UI
95
  AVAILABLE_MODELS = [
 
 
96
  {"name": "Gemini 3.0 Pro", "id": "gemini-3.0-pro", "description": "Google Gemini 3.0 Pro via Poe with advanced reasoning"},
97
  {"name": "Grok 4.1 Fast", "id": "x-ai/grok-4.1-fast", "description": "Grok 4.1 Fast model via OpenRouter (20 req/min on free tier)"},
98
  {"name": "MiniMax M2", "id": "MiniMaxAI/MiniMax-M2", "description": "MiniMax M2 model via HuggingFace InferenceClient with Novita provider"},
99
- {"name": "DeepSeek V3.2-Exp", "id": "deepseek-ai/DeepSeek-V3.2-Exp", "description": "DeepSeek V3.2 Experimental via HuggingFace"},
100
- {"name": "DeepSeek R1", "id": "deepseek-ai/DeepSeek-R1-0528", "description": "DeepSeek R1 model for code generation"},
101
  {"name": "GPT-5.1", "id": "gpt-5.1", "description": "OpenAI GPT-5.1 model via Poe for advanced code generation and general tasks"},
102
  {"name": "GPT-5.1 Instant", "id": "gpt-5.1-instant", "description": "OpenAI GPT-5.1 Instant model via Poe for fast responses"},
103
  {"name": "GPT-5.1 Codex", "id": "gpt-5.1-codex", "description": "OpenAI GPT-5.1 Codex model via Poe optimized for code generation"},
 
93
 
94
  # Define models and languages here to avoid importing Gradio UI
95
  AVAILABLE_MODELS = [
96
+ {"name": "DeepSeek V3.2", "id": "deepseek-ai/DeepSeek-V3.2-Exp", "description": "DeepSeek V3.2 Experimental - Default model for code generation via HuggingFace Router with Novita provider"},
97
+ {"name": "DeepSeek R1", "id": "deepseek-ai/DeepSeek-R1-0528", "description": "DeepSeek R1 model for code generation"},
98
  {"name": "Gemini 3.0 Pro", "id": "gemini-3.0-pro", "description": "Google Gemini 3.0 Pro via Poe with advanced reasoning"},
99
  {"name": "Grok 4.1 Fast", "id": "x-ai/grok-4.1-fast", "description": "Grok 4.1 Fast model via OpenRouter (20 req/min on free tier)"},
100
  {"name": "MiniMax M2", "id": "MiniMaxAI/MiniMax-M2", "description": "MiniMax M2 model via HuggingFace InferenceClient with Novita provider"},
 
 
101
  {"name": "GPT-5.1", "id": "gpt-5.1", "description": "OpenAI GPT-5.1 model via Poe for advanced code generation and general tasks"},
102
  {"name": "GPT-5.1 Instant", "id": "gpt-5.1-instant", "description": "OpenAI GPT-5.1 Instant model via Poe for fast responses"},
103
  {"name": "GPT-5.1 Codex", "id": "gpt-5.1-codex", "description": "OpenAI GPT-5.1 Codex model via Poe optimized for code generation"},
backend_models.py CHANGED
@@ -269,8 +269,8 @@ def get_real_model_id(model_id: str) -> str:
269
  # Kimi K2 Instruct needs Groq provider
270
  return "moonshotai/Kimi-K2-Instruct:groq"
271
 
272
- elif model_id.startswith("deepseek-ai/DeepSeek-V3"):
273
- # DeepSeek V3 models need Novita provider
274
  return f"{model_id}:novita"
275
 
276
  elif model_id == "zai-org/GLM-4.5":
 
269
  # Kimi K2 Instruct needs Groq provider
270
  return "moonshotai/Kimi-K2-Instruct:groq"
271
 
272
+ elif model_id.startswith("deepseek-ai/DeepSeek-V3") or model_id.startswith("deepseek-ai/DeepSeek-R1"):
273
+ # DeepSeek V3 and R1 models need Novita provider
274
  return f"{model_id}:novita"
275
 
276
  elif model_id == "zai-org/GLM-4.5":
frontend/src/app/page.tsx CHANGED
@@ -17,7 +17,7 @@ export default function Home() {
17
 
18
  const [generatedCode, setGeneratedCode] = useState('');
19
  const [selectedLanguage, setSelectedLanguage] = useState<Language>('html');
20
- const [selectedModel, setSelectedModel] = useState('claude-opus-4.5');
21
  const [isGenerating, setIsGenerating] = useState(false);
22
  const [isAuthenticated, setIsAuthenticated] = useState(false);
23
  const [currentRepoId, setCurrentRepoId] = useState<string | null>(null); // Track imported/deployed space
 
17
 
18
  const [generatedCode, setGeneratedCode] = useState('');
19
  const [selectedLanguage, setSelectedLanguage] = useState<Language>('html');
20
+ const [selectedModel, setSelectedModel] = useState('deepseek-ai/DeepSeek-V3.2-Exp');
21
  const [isGenerating, setIsGenerating] = useState(false);
22
  const [isAuthenticated, setIsAuthenticated] = useState(false);
23
  const [currentRepoId, setCurrentRepoId] = useState<string | null>(null); // Track imported/deployed space
frontend/src/components/LandingPage.tsx CHANGED
@@ -28,7 +28,7 @@ export default function LandingPage({
28
  onImport,
29
  isAuthenticated,
30
  initialLanguage = 'html',
31
- initialModel = 'claude-opus-4.5',
32
  onAuthChange
33
  }: LandingPageProps) {
34
  const [prompt, setPrompt] = useState('');
 
28
  onImport,
29
  isAuthenticated,
30
  initialLanguage = 'html',
31
+ initialModel = 'deepseek-ai/DeepSeek-V3.2-Exp',
32
  onAuthChange
33
  }: LandingPageProps) {
34
  const [prompt, setPrompt] = useState('');