diff --git a/app.py b/app.py index 675be7d..351aade 100644 --- a/app.py +++ b/app.py @@ -102,4 +102,4 @@ if __name__ == "__main__": host = os.getenv("FLASK_HOST", "0.0.0.0") port = int(os.getenv("FLASK_PORT", 5000)) debug = os.getenv("FLASK_DEBUG", "true").lower() == "true" - socketio.run(app, host=host, port=port, debug=debug) + socketio.run(app, host=host, port=port, debug=debug, allow_unsafe_werkzeug=True) diff --git a/backend/routes/__init__.py b/backend/routes/__init__.py index 79ccd39..e878e66 100644 --- a/backend/routes/__init__.py +++ b/backend/routes/__init__.py @@ -7,7 +7,7 @@ from .main import register_main_routes from .xml import register_xml_routes from .utilities import register_util_routes from .file_view import register_file_view - +from .jobs import register_jobs_routes def register_routes(app: Flask, socketio=None) -> None: """블루프린트 일괄 등록. socketio는 main 라우트에서만 사용.""" @@ -17,4 +17,5 @@ def register_routes(app: Flask, socketio=None) -> None: register_main_routes(app, socketio) register_xml_routes(app) register_util_routes(app) - register_file_view(app) \ No newline at end of file + register_file_view(app) + register_jobs_routes(app) \ No newline at end of file diff --git a/backend/routes/jobs.py b/backend/routes/jobs.py new file mode 100644 index 0000000..32bcae3 --- /dev/null +++ b/backend/routes/jobs.py @@ -0,0 +1,279 @@ +""" +Flask Blueprint for iDRAC Job Monitoring (Redfish 버전) +기존 routes/jobs.py 또는 backend/routes/jobs.py를 이 파일로 교체하세요. +""" +import time +import logging +from flask import Blueprint, render_template, jsonify, request +from flask_login import login_required + +from backend.services.idrac_jobs import ( + scan_all, + parse_ip_list, + load_ip_list, + LRUJobCache, + is_active_status, + is_done_status, + parse_iso_datetime, + iso_now +) +import os + +logger = logging.getLogger(__name__) + +# Blueprint 생성 +jobs_bp = Blueprint("jobs", __name__, url_prefix="/jobs") + +# Job 캐시 (전역) +MAX_CACHE_SIZE = int(os.getenv("MAX_CACHE_SIZE", "10000")) +CACHE_GC_INTERVAL = int(os.getenv("CACHE_GC_INTERVAL", "3600")) +JOB_GRACE_MINUTES = int(os.getenv("JOB_GRACE_MINUTES", "60")) +JOB_RECENCY_HOURS = int(os.getenv("JOB_RECENCY_HOURS", "24")) + +JOB_CACHE = LRUJobCache(max_size=MAX_CACHE_SIZE) + + +# ──────────────────────────────────────────────────────────── +# Routes +# ──────────────────────────────────────────────────────────── + +@jobs_bp.route("", methods=["GET"]) +@login_required +def jobs_page(): + """메인 페이지""" + return render_template("jobs.html") + + +@jobs_bp.route("/config", methods=["GET"]) +@login_required +def jobs_config(): + """프론트엔드 설정 제공""" + return jsonify({ + "ok": True, + "config": { + "grace_minutes": JOB_GRACE_MINUTES, + "recency_hours": JOB_RECENCY_HOURS, + "poll_interval_ms": int(os.getenv("POLL_INTERVAL_MS", "10000")), + } + }) + + +@jobs_bp.route("/iplist", methods=["GET"]) +@login_required +def get_ip_list(): + """IP 목록 조회 (파일에서)""" + try: + ips = load_ip_list() + return jsonify({ + "ok": True, + "ips": ips, + "count": len(ips) + }) + except Exception as e: + logger.exception("Failed to load IP list") + return jsonify({ + "ok": False, + "error": str(e) + }), 500 + + +@jobs_bp.route("/scan", methods=["POST"]) +@login_required +def scan_jobs(): + """ + Job 스캔 및 모니터링 + + Request Body: + { + "ips": List[str] (optional), + "method": "redfish" (기본값), + "recency_hours": int (기본: 24), + "grace_minutes": int (기본: 60), + "include_tracked_done": bool (기본: True) + } + + Response: + { + "ok": True, + "count": int, + "items": [ + { + "ip": str, + "ok": bool, + "error": str (if not ok), + "jobs": List[Dict] + } + ] + } + """ + data = request.get_json(silent=True) or {} + + # IP 목록 + ip_input = data.get("ips") + if ip_input: + ips = parse_ip_list("\n".join(ip_input) if isinstance(ip_input, list) else str(ip_input)) + else: + ips = load_ip_list() + + if not ips: + return jsonify({ + "ok": False, + "error": "No IPs provided", + "items": [] + }), 400 + + # 파라미터 + method = data.get("method", "redfish") # redfish가 기본값 + recency_hours = int(data.get("recency_hours", JOB_RECENCY_HOURS)) + grace_minutes = int(data.get("grace_minutes", JOB_GRACE_MINUTES)) + include_tracked_done = bool(data.get("include_tracked_done", True)) + + grace_sec = grace_minutes * 60 + cutoff = time.time() - recency_hours * 3600 + + # 현재 IP 목록과 다른 캐시 항목 제거 + JOB_CACHE.clear_for_ips(set(ips)) + + # 스캔 실행 + try: + items = scan_all(ips, method=method) + except Exception as e: + logger.exception("Scan failed") + return jsonify({ + "ok": False, + "error": str(e), + "items": [] + }), 500 + + now = time.time() + + # 캐시 업데이트 + for item in items: + ip = item.get("ip", "") + if not item.get("ok") or not isinstance(item.get("jobs"), list): + continue + + for job in item["jobs"]: + status = job.get("Status") + message = job.get("Message") + active_now = is_active_status(status, message) + done_now = is_done_status(status) + + # 시작 시간 파싱 + start_ts = parse_iso_datetime(job.get("StartTime")) + + # 리센시 판정 + if not active_now: + if start_ts is None or start_ts < cutoff: + continue + + # 캐시 키 생성 + key = _make_cache_key(ip, job) + entry = JOB_CACHE.get(key) + + if entry is None: + JOB_CACHE.set(key, { + "record": dict(job), + "first_seen_active": (now if active_now else None), + "became_done_at": (now if done_now else None), + "first_seen": now, + "last_seen": now, + "start_ts": start_ts, + }) + else: + entry["record"] = dict(job) + entry["last_seen"] = now + + if active_now and not entry.get("first_seen_active"): + entry["first_seen_active"] = now + + if done_now and not entry.get("became_done_at"): + entry["became_done_at"] = now + elif not done_now: + entry["became_done_at"] = None + + if start_ts: + entry["start_ts"] = start_ts + + JOB_CACHE.set(key, entry) + + # 응답 생성 + out_items = [] + for item in items: + ip = item.get("ip", "") + shown_jobs = [] + + # 현재 Active Job + current_active = [] + if item.get("ok") and isinstance(item.get("jobs"), list): + for job in item["jobs"]: + if is_active_status(job.get("Status"), job.get("Message")): + key = _make_cache_key(ip, job) + if key in JOB_CACHE.keys(): + current_active.append(JOB_CACHE.get(key)["record"]) + + if current_active: + shown_jobs = current_active + else: + # Active가 없을 때: 추적된 최근 완료 Job 표시 + if include_tracked_done: + for key in JOB_CACHE.keys(): + if key[0] != ip: + continue + + entry = JOB_CACHE.get(key) + if not entry: + continue + + start_ok = (entry.get("start_ts") or 0) >= cutoff + done_at = entry.get("became_done_at") + done_ok = bool(done_at and now - done_at <= grace_sec) + still_active = entry.get("became_done_at") is None + + if still_active and start_ok: + shown_jobs.append(entry["record"]) + elif done_ok and start_ok: + rec = dict(entry["record"]) + rec["RecentlyCompleted"] = True + rec["CompletedAt"] = iso_now() + shown_jobs.append(rec) + + out_items.append({ + "ip": ip, + "ok": item.get("ok"), + "error": item.get("error"), + "jobs": sorted(shown_jobs, key=lambda r: r.get("JID", "")) + }) + + # 캐시 GC (조건부) + if now - JOB_CACHE.last_gc >= CACHE_GC_INTERVAL: + JOB_CACHE.gc(max_age_seconds=24 * 3600) + + return jsonify({ + "ok": True, + "count": len(out_items), + "items": out_items + }) + + +def _make_cache_key(ip: str, job: dict): + """캐시 키 생성""" + jid = (job.get("JID") or "").strip() + if jid: + return (ip, jid) + name = (job.get("Name") or "").strip() + return (ip, f"NOJID::{name}") + + +# ──────────────────────────────────────────────────────────── +# 기존 패턴에 맞는 register 함수 추가 +# ──────────────────────────────────────────────────────────── + +def register_jobs_routes(app): + """ + iDRAC Job 모니터링 라우트 등록 + 기존 프로젝트 패턴에 맞춘 함수 + """ + from flask import Flask + app.register_blueprint(jobs_bp) + logger.info("Jobs routes registered at /jobs") \ No newline at end of file diff --git a/backend/routes/utilities.py b/backend/routes/utilities.py index e443d7b..34cf208 100644 --- a/backend/routes/utilities.py +++ b/backend/routes/utilities.py @@ -299,4 +299,4 @@ def download_excel(): return redirect(url_for("main.index")) logging.info(f"엑셀 파일 다운로드: {path}") - return send_file(str(path), as_attachment=True, download_name="mac_info.xlsx") + return send_file(str(path), as_attachment=True, download_name="mac_info.xlsx") \ No newline at end of file diff --git a/backend/services/idrac_jobs.py b/backend/services/idrac_jobs.py new file mode 100644 index 0000000..c29428e --- /dev/null +++ b/backend/services/idrac_jobs.py @@ -0,0 +1,315 @@ +""" +iDRAC Job Monitoring Service (Redfish 버전) +기존 Flask 앱의 backend/services/ 디렉토리에 추가하세요. +기존 idrac_jobs.py를 이 파일로 교체하거나 redfish_jobs.py로 저장하세요. +""" +import logging +logging.basicConfig(level=logging.DEBUG) +import ipaddress +import time +import logging +import os +from datetime import datetime, timezone +from typing import List, Dict, Any, Optional, Tuple +from collections import OrderedDict +from concurrent.futures import ThreadPoolExecutor, as_completed +from pathlib import Path + +from .redfish_client import RedfishClient, AuthenticationError, NotSupportedError + +logger = logging.getLogger(__name__) + + +# ──────────────────────────────────────────────────────────── +# 설정 (환경변수 또는 기본값) +# ──────────────────────────────────────────────────────────── +IDRAC_USER = os.getenv("IDRAC_USER", "root") +IDRAC_PASS = os.getenv("IDRAC_PASS", "calvin") +MAX_WORKERS = int(os.getenv("MAX_WORKERS", "32")) +REDFISH_TIMEOUT = int(os.getenv("REDFISH_TIMEOUT", "15")) +VERIFY_SSL = os.getenv("VERIFY_SSL", "False").lower() == "true" +IP_LIST_PATH = os.getenv("IDRAC_IP_LIST", "data/server_list/idrac_ip_list.txt") + + +# ──────────────────────────────────────────────────────────── +# IP 유효성 검증 +# ──────────────────────────────────────────────────────────── +def validate_ip(ip: str) -> bool: + """IP 주소 유효성 검증""" + try: + ipaddress.ip_address(ip.strip()) + return True + except ValueError: + return False + + +def parse_ip_list(text: str) -> List[str]: + """텍스트에서 IP 목록 파싱""" + if not text: + return [] + + raw = text.replace(",", "\n").replace(";", "\n") + ips = [] + seen = set() + + for line in raw.splitlines(): + line = line.strip() + if not line or line.startswith("#"): + continue + + for part in line.split(): + part = part.strip() + if not part or part.startswith("#"): + continue + + if validate_ip(part) and part not in seen: + seen.add(part) + ips.append(part) + elif not validate_ip(part): + logger.warning(f"Invalid IP address: {part}") + + return ips + + +def load_ip_list(path: str = IP_LIST_PATH) -> List[str]: + """파일에서 IP 목록 로드""" + try: + file_path = Path(path) + if not file_path.exists(): + logger.warning(f"IP list file not found: {path}") + return [] + + with open(file_path, "r", encoding="utf-8") as f: + content = f.read() + + ips = parse_ip_list(content) + logger.info(f"Loaded {len(ips)} IPs from {path}") + return ips + except Exception as e: + logger.error(f"Failed to load IP list from {path}: {e}") + return [] + + +# ──────────────────────────────────────────────────────────── +# Job 상태 판별 +# ──────────────────────────────────────────────────────────── +ACTIVE_KEYWORDS = ( + "running", "scheduled", "progress", "starting", + "queued", "pending", "preparing", "applying" +) + +DONE_KEYWORDS = ( + "completed", "success", "succeeded", + "failed", "error", "aborted", + "canceled", "cancelled" +) + + +def is_active_status(status: Optional[str], message: Optional[str] = None) -> bool: + """Job이 활성 상태인지 확인""" + s = (status or "").strip().lower() + m = (message or "").strip().lower() + return any(k in s for k in ACTIVE_KEYWORDS) or any(k in m for k in ACTIVE_KEYWORDS) + + +def is_done_status(status: Optional[str]) -> bool: + """Job이 완료 상태인지 확인""" + s = (status or "").strip().lower() + return any(k in s for k in DONE_KEYWORDS) + + +# ──────────────────────────────────────────────────────────── +# 날짜/시간 파싱 +# ──────────────────────────────────────────────────────────── +def parse_iso_datetime(dt_str: Optional[str]) -> Optional[float]: + """ISO 8601 날짜 문자열을 timestamp로 변환""" + if not dt_str: + return None + + try: + dt = datetime.fromisoformat(dt_str.replace("Z", "+00:00")) + return dt.timestamp() + except Exception as e: + logger.debug(f"Failed to parse datetime '{dt_str}': {e}") + return None + + +def iso_now() -> str: + """현재 시간을 ISO 8601 포맷으로 반환""" + return datetime.now(timezone.utc).isoformat() + + +# ──────────────────────────────────────────────────────────── +# LRU 캐시 +# ──────────────────────────────────────────────────────────── +class LRUJobCache: + """Job 캐시 (LRU 방식)""" + + def __init__(self, max_size: int = 10000): + self.cache: OrderedDict[Tuple[str, str], Dict[str, Any]] = OrderedDict() + self.max_size = max_size + self.last_gc = time.time() + + def _make_key(self, ip: str, job: Dict[str, Any]) -> Tuple[str, str]: + """캐시 키 생성""" + jid = (job.get("JID") or "").strip() + if jid: + return (ip, jid) + name = (job.get("Name") or "").strip() + return (ip, f"NOJID::{name}") + + def get(self, key: Tuple[str, str]) -> Optional[Dict[str, Any]]: + """캐시에서 조회""" + if key in self.cache: + self.cache.move_to_end(key) + return self.cache[key] + return None + + def set(self, key: Tuple[str, str], value: Dict[str, Any]): + """캐시에 저장""" + if key in self.cache: + self.cache.move_to_end(key) + + self.cache[key] = value + + if len(self.cache) > self.max_size: + self.cache.popitem(last=False) + + def keys(self) -> List[Tuple[str, str]]: + """모든 키 반환""" + return list(self.cache.keys()) + + def pop(self, key: Tuple[str, str], default=None): + """캐시에서 제거""" + return self.cache.pop(key, default) + + def clear_for_ips(self, current_ips: set): + """현재 IP 목록에 없는 항목 제거""" + removed = 0 + for key in list(self.cache.keys()): + if key[0] not in current_ips: + self.cache.pop(key) + removed += 1 + + if removed > 0: + logger.info(f"Cleared {removed} cache entries for removed IPs") + + def gc(self, max_age_seconds: float): + """오래된 캐시 항목 제거""" + now = time.time() + cutoff = now - max_age_seconds + removed = 0 + + for key in list(self.cache.keys()): + entry = self.cache[key] + if entry.get("last_seen", 0) < cutoff: + self.cache.pop(key) + removed += 1 + + if removed > 0: + logger.info(f"Cache GC: removed {removed} entries") + + self.last_gc = now + + +# ──────────────────────────────────────────────────────────── +# Job 스캐너 +# ──────────────────────────────────────────────────────────── +def scan_single_ip(ip: str) -> Dict[str, Any]: + """ + 단일 IP에서 Job 조회 + + Returns: + { + "ip": str, + "ok": bool, + "error": str (실패 시), + "jobs": List[Dict] + } + """ + if not validate_ip(ip): + return { + "ip": ip, + "ok": False, + "error": "Invalid IP address", + "jobs": [] + } + + try: + with RedfishClient(ip, IDRAC_USER, IDRAC_PASS, REDFISH_TIMEOUT, VERIFY_SSL) as client: + jobs = client.get_jobs() + return { + "ip": ip, + "ok": True, + "jobs": jobs + } + except AuthenticationError: + return { + "ip": ip, + "ok": False, + "error": "Authentication failed", + "jobs": [] + } + except NotSupportedError: + return { + "ip": ip, + "ok": False, + "error": "Redfish API not supported (old iDRAC?)", + "jobs": [] + } + except TimeoutError as e: + return { + "ip": ip, + "ok": False, + "error": f"Timeout: {str(e)}", + "jobs": [] + } + except ConnectionError as e: + return { + "ip": ip, + "ok": False, + "error": f"Connection failed: {str(e)}", + "jobs": [] + } + except Exception as e: + logger.exception(f"Unexpected error for {ip}") + return { + "ip": ip, + "ok": False, + "error": f"Unexpected error: {str(e)[:100]}", + "jobs": [] + } + + +def scan_all(ips: List[str], method: str = "redfish", max_workers: int = MAX_WORKERS) -> List[Dict[str, Any]]: + """ + 여러 IP를 병렬로 스캔 + + Args: + ips: IP 목록 + method: "redfish" (racadm은 하위 호환용) + max_workers: 병렬 워커 수 + + Returns: + IP별 결과 리스트 (정렬됨) + """ + if not ips: + return [] + + logger.info(f"Scanning {len(ips)} IPs with {max_workers} workers (method: {method})") + start_time = time.time() + + results = [] + with ThreadPoolExecutor(max_workers=max_workers) as executor: + futures = { + executor.submit(scan_single_ip, ip): ip + for ip in ips + } + + for future in as_completed(futures): + results.append(future.result()) + + elapsed = time.time() - start_time + logger.info(f"Scan completed in {elapsed:.2f}s") + + return sorted(results, key=lambda x: x["ip"]) diff --git a/backend/services/redfish_client.py b/backend/services/redfish_client.py new file mode 100644 index 0000000..36d18af --- /dev/null +++ b/backend/services/redfish_client.py @@ -0,0 +1,241 @@ +""" +Dell iDRAC Redfish API Client (수정 버전) +절대 경로와 상대 경로 모두 처리 +""" +import requests +import urllib3 +from typing import Dict, Any, Optional, List +import logging +from functools import wraps +import time +import os + +# SSL 경고 비활성화 +urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + +logger = logging.getLogger(__name__) + + +def retry_on_failure(max_attempts: int = 2, delay: float = 2.0): + """재시도 데코레이터""" + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + last_exception = None + for attempt in range(max_attempts): + try: + return func(*args, **kwargs) + except (requests.Timeout, requests.ConnectionError) as e: + last_exception = e + if attempt < max_attempts - 1: + logger.warning(f"Attempt {attempt + 1} failed, retrying in {delay}s: {e}") + time.sleep(delay * (attempt + 1)) + except Exception as e: + raise + raise last_exception + return wrapper + return decorator + + +class RedfishClient: + """Dell iDRAC Redfish API 클라이언트""" + + def __init__( + self, + ip: str, + username: str, + password: str, + timeout: int = 15, + verify_ssl: bool = False + ): + self.ip = ip + self.base_url = f"https://{ip}/redfish/v1" + self.host_url = f"https://{ip}" # ← 추가: 호스트 URL + self.timeout = timeout + self.verify_ssl = verify_ssl + + self.session = requests.Session() + self.session.auth = (username, password) + self.session.verify = verify_ssl + self.session.headers.update({ + "Content-Type": "application/json", + "Accept": "application/json" + }) + + @retry_on_failure(max_attempts=2, delay=2.0) + def get(self, endpoint: str) -> Dict[str, Any]: + """ + GET 요청 + 절대 경로와 상대 경로 모두 처리 + """ + # 절대 경로 처리 (이미 /redfish/v1로 시작하는 경우) + if endpoint.startswith('/redfish/v1'): + url = f"{self.host_url}{endpoint}" + # 상대 경로 처리 + else: + url = f"{self.base_url}{endpoint}" + + logger.debug(f"GET {url}") + response = self.session.get(url, timeout=self.timeout) + response.raise_for_status() + return response.json() + + def get_jobs(self) -> List[Dict[str, Any]]: + """ + 모든 Job 조회 + 표준 경로와 Dell OEM 경로 모두 시도 + """ + jobs = [] + + # 1. 표준 Redfish Jobs 경로 시도 + try: + standard_jobs = self._get_jobs_standard() + jobs.extend(standard_jobs) + except Exception as e: + logger.warning(f"{self.ip}: Standard Jobs endpoint failed: {e}") + + # 2. Dell OEM Jobs 경로 시도 + try: + oem_jobs = self._get_jobs_dell_oem() + jobs.extend(oem_jobs) + except Exception as e: + logger.warning(f"{self.ip}: Dell OEM Jobs endpoint failed: {e}") + + if not jobs: + logger.info(f"{self.ip}: No jobs found") + return [] + + # 중복 제거 (JID 기준) + seen_jids = set() + unique_jobs = [] + for job in jobs: + jid = job.get("JID", "") + if jid and jid not in seen_jids: + seen_jids.add(jid) + unique_jobs.append(job) + + logger.info(f"{self.ip}: Retrieved {len(unique_jobs)} unique jobs") + return sorted(unique_jobs, key=lambda x: x.get("JID", "")) + + def _get_jobs_standard(self) -> List[Dict[str, Any]]: + """표준 Redfish Jobs 조회""" + jobs_endpoint = "/Managers/iDRAC.Embedded.1/Jobs" + jobs_collection = self.get(jobs_endpoint) + + members = jobs_collection.get("Members", []) + if not members: + return [] + + jobs = [] + for member in members: + job_path = member.get("@odata.id", "") + if not job_path: + continue + + try: + job_data = self.get(job_path) + normalized_job = self._normalize_job(job_data) + jobs.append(normalized_job) + except Exception as e: + logger.warning(f"{self.ip}: Failed to get job {job_path}: {e}") + continue + + return jobs + + def _get_jobs_dell_oem(self) -> List[Dict[str, Any]]: + """Dell OEM Jobs 조회""" + oem_endpoint = "/Managers/iDRAC.Embedded.1/Oem/Dell/Jobs" + + try: + jobs_collection = self.get(oem_endpoint) + except requests.HTTPError as e: + if e.response.status_code == 404: + logger.debug(f"{self.ip}: Dell OEM endpoint not available") + return [] + raise + + members = jobs_collection.get("Members", []) + if not members: + return [] + + jobs = [] + for member in members: + job_path = member.get("@odata.id", "") + if not job_path: + continue + + try: + job_data = self.get(job_path) + normalized_job = self._normalize_job(job_data) + jobs.append(normalized_job) + except Exception as e: + logger.warning(f"{self.ip}: Failed to get Dell OEM job {job_path}: {e}") + continue + + return jobs + + def _normalize_job(self, job_data: Dict[str, Any]) -> Dict[str, Any]: + """Redfish Job 데이터를 표준 포맷으로 변환""" + percent = job_data.get("PercentComplete", 0) + if percent is None: + percent = 0 + + # JobState 매핑 + job_state = job_data.get("JobState", "Unknown") + status_map = { + "New": "Scheduled", + "Starting": "Starting", + "Running": "Running", + "Completed": "Completed", + "Failed": "Failed", + "CompletedWithErrors": "Completed with Errors", + "Pending": "Pending", + "Paused": "Paused", + "Stopping": "Stopping", + "Cancelled": "Cancelled", + "Cancelling": "Cancelling" + } + status = status_map.get(job_state, job_state) + + # 메시지 처리 + messages = job_data.get("Messages", []) + message_text = "" + if messages and isinstance(messages, list): + if messages[0] and isinstance(messages[0], dict): + message_text = messages[0].get("Message", "") + + if not message_text: + message_text = job_data.get("Message", "") + + return { + "JID": job_data.get("Id", ""), + "Name": job_data.get("Name", ""), + "Status": status, + "PercentComplete": str(percent), + "Message": message_text, + "ScheduledStartTime": job_data.get("ScheduledStartTime", ""), + "StartTime": job_data.get("StartTime", ""), + "EndTime": job_data.get("EndTime", ""), + "LastUpdateTime": job_data.get("EndTime") or job_data.get("StartTime", ""), + } + + def close(self): + """세션 종료""" + self.session.close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + +# 커스텀 예외 +class AuthenticationError(Exception): + """인증 실패""" + pass + + +class NotSupportedError(Exception): + """지원하지 않는 기능""" + pass \ No newline at end of file diff --git a/backend/templates/base.html b/backend/templates/base.html index d171aac..eadd2f0 100644 --- a/backend/templates/base.html +++ b/backend/templates/base.html @@ -63,6 +63,11 @@ XML Management + {% if current_user.is_admin %}