diff --git a/apps/routers/auth.py b/apps/routers/auth.py index 252462a8121a21a83bfb33a1c1a6373b9e5492cc..561f8065193b326767366f211833e7b04095352c 100644 --- a/apps/routers/auth.py +++ b/apps/routers/auth.py @@ -70,23 +70,15 @@ async def oidc_login(request: Request, code: str, redirect_index: Optional[str] await UserManager.update_userinfo_by_user_sub(user_sub) - current_session = request.cookies["ECSESSION"] try: + current_session = request.cookies["ECSESSION"] await SessionManager.delete_session(current_session) - current_session = await SessionManager.create_session(user_host, extra_keys={ - "user_sub": user_sub, - }) except Exception as e: LOGGER.error(f"Change session failed: {e}") - data = Audit( - user_sub=user_sub, - http_method="get", - module="auth", - client_ip=user_host, - message="/api/auth/login: Change session failed.", - ) - await AuditLogManager.add_audit_log(data) - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User login failed.") from e + + current_session = await SessionManager.create_session(user_host, extra_keys={ + "user_sub": user_sub, + }) new_csrf_token = await SessionManager.create_csrf_token(current_session) if config["COOKIE_MODE"] == "DEBUG": diff --git a/deploy/chart/euler_copilot/configs/rag/.env b/deploy/chart/euler_copilot/configs/rag/.env index 79f7b3b774016de532d3a2a764cb8ac3a4993c62..ca0a9c33cf92b1b5bf320fee10990fbf5109e74f 100644 --- a/deploy/chart/euler_copilot/configs/rag/.env +++ b/deploy/chart/euler_copilot/configs/rag/.env @@ -43,8 +43,8 @@ HALF_KEY2=${halfKey2} HALF_KEY3=${halfKey3} #LLM config -MODEL_NAME={{ .Values.models.answer.name }} -OPENAI_API_BASE={{ .Values.models.answer.url }}/v1 -OPENAI_API_KEY={{ default "" .Values.models.answer.key }} -REQUEST_TIMEOUT=60 -MAX_TOKENS={{ default 2048 .Values.models.answer.max_tokens }} +MODEL_1_MODEL_NAME={{ .Values.models.answer.name }} +MODEL_1_MODEL_TYPE=deepseek +MODEL_1_OPENAI_API_BASE={{ .Values.models.answer.url }}/v1 +MODEL_1_OPENAI_API_KEY={{ default "" .Values.models.answer.key }} +MODEL_1_MAX_TOKENS={{ default 2048 .Values.models.answer.max_tokens }}