feat: 初始化HEFA-L2 PDI管理系统项目

添加前端Vue2项目结构,包括ElementUI集成、路由配置和API模块
实现后端FastAPI服务,包含Oracle数据库连接和PDI CRUD接口
添加OPC-UA轮询服务,支持跟踪图数据同步到Oracle
提供SQLite镜像数据库用于本地开发和快速查询
包含完整的部署脚本和文档说明
This commit is contained in:
2026-04-09 16:05:20 +08:00
commit d8b142bb4a
24 changed files with 18820 additions and 0 deletions

9
backend/.env.example Normal file
View File

@@ -0,0 +1,9 @@
# Oracle Database
ORACLE_DSN=localhost:1521/orcl
ORACLE_USER=pltm
ORACLE_PASSWORD=pltm123
# OPC-UA Server
OPC_URL=opc.tcp://192.168.1.100:4840
OPC_COUNTER_NODE=ns=2;s=PL.TRACKMAP.COUNTER
OPC_POLL_INTERVAL=2

19
backend/database.py Normal file
View File

@@ -0,0 +1,19 @@
import os
import cx_Oracle
from dotenv import load_dotenv
load_dotenv()
ORACLE_DSN = os.getenv("ORACLE_DSN", "localhost:1521/ORCL")
ORACLE_USER = os.getenv("ORACLE_USER", "pltm")
ORACLE_PASSWORD = os.getenv("ORACLE_PASSWORD", "password")
def get_connection() -> cx_Oracle.Connection:
"""Return a new Oracle DB connection. Caller is responsible for closing."""
return cx_Oracle.connect(
user=ORACLE_USER,
password=ORACLE_PASSWORD,
dsn=ORACLE_DSN,
encoding="UTF-8",
)

330
backend/main.py Normal file
View File

@@ -0,0 +1,330 @@
import asyncio
import logging
import os
from contextlib import asynccontextmanager
from typing import Optional
from dotenv import load_dotenv
from fastapi import FastAPI, HTTPException, Query
from fastapi.middleware.cors import CORSMiddleware
load_dotenv()
from database import get_connection
from models import PDIPLTMCreate, PDIPLTMUpdate, OpcConfig
from opc_service import opc_service
from sqlite_sync import (
init_db, sync_all_from_oracle,
sqlite_upsert_pdi, sqlite_delete_pdi
)
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
@asynccontextmanager
async def lifespan(app: FastAPI):
# Init SQLite schema
init_db()
# Sync Oracle -> SQLite on startup (in thread pool to avoid blocking)
loop = asyncio.get_event_loop()
try:
result = await loop.run_in_executor(None, sync_all_from_oracle)
logger.info("Startup sync: %s", result)
except Exception as e:
logger.warning("Startup sync failed (Oracle may be unreachable): %s", e)
# Start OPC polling
asyncio.create_task(opc_service.start_polling())
logger.info("OPC polling task started")
yield
await opc_service.stop_polling()
logger.info("OPC polling task stopped")
app = FastAPI(title="HEFA-L2 PDI管理系统", version="1.0.0", lifespan=lifespan)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# ─────────────────────────────────────────────
# Sync endpoint
# ─────────────────────────────────────────────
@app.post("/api/sync")
async def trigger_sync():
"""Manually trigger a full Oracle -> SQLite sync."""
loop = asyncio.get_event_loop()
try:
result = await loop.run_in_executor(None, sync_all_from_oracle)
return {"message": "同步成功", "rows": result}
except Exception as e:
raise HTTPException(status_code=500, detail=f"同步失败: {e}")
# ─────────────────────────────────────────────
# PDI_PLTM CRUD
# ─────────────────────────────────────────────
@app.get("/api/pdi")
def list_pdi(
page: int = Query(1, ge=1),
page_size: int = Query(20, ge=1, le=200),
coilid: Optional[str] = None,
status: Optional[int] = None,
steel_grade: Optional[str] = None,
):
conn = get_connection()
cursor = conn.cursor()
try:
conditions = []
params = {}
if coilid:
conditions.append("COILID LIKE :coilid")
params["coilid"] = f"%{coilid}%"
if status is not None:
conditions.append("STATUS = :status")
params["status"] = status
if steel_grade:
conditions.append("STEEL_GRADE LIKE :steel_grade")
params["steel_grade"] = f"%{steel_grade}%"
where = ("WHERE " + " AND ".join(conditions)) if conditions else ""
count_sql = f"SELECT COUNT(*) FROM PDI_PLTM {where}"
cursor.execute(count_sql, params)
total = cursor.fetchone()[0]
offset = (page - 1) * page_size
sql = f"""
SELECT * FROM (
SELECT a.*, ROWNUM rn FROM (
SELECT SID, ROLLPROGRAMNB, SEQUENCENB, STATUS, SCHEDULE_CODE,
COILID, ENTRY_COIL_THICKNESS, ENTRY_COIL_WIDTH,
ENTRY_COIL_WEIGHT, ENTRY_OF_COIL_LENGTH,
EXIT_COIL_NO, EXIT_COIL_THICKNESS, EXIT_COIL_WIDTH,
EXIT_COIL_WEIGHT, WORK_ORDER_NO, ORDER_QUALITY,
STEEL_GRADE, SG_SIGN, ORDER_THICKNESS, ORDER_WIDTH,
COILER_DIAMETER, L2_GRADE, WEIGHT_MODE,
CREATED_DT, UPDATED_DT, SEND_FLAG,
ENTRY_COIL_THICKNESS_MAX, ENTRY_COIL_THICKNESS_MIN,
ENTRY_COIL_WIDTH_MAX, ENTRY_COIL_WIDTH_MIN,
EXIT_COIL_THICKNESS_MAX, EXIT_COIL_THICKNESS_MIN,
EXIT_COIL_WIDTH_MAX, EXIT_COIL_WIDTH_MIN,
CROSS_SECTION_AREA, UNCOILER_TENSION,
LOOPER_TENSION_1, PL_TENSION,
LOOPER_TENSION_2, LOOPER_TENSION_3,
DUMMY_COIL_MRK, CUT_MODE, TRIMMING, TRIMMING_WIDTH
FROM PDI_PLTM {where}
ORDER BY COILID DESC
) a WHERE ROWNUM <= :end_row
) WHERE rn > :start_row
"""
params["end_row"] = offset + page_size
params["start_row"] = offset
cursor.execute(sql, params)
columns = [col[0].lower() for col in cursor.description]
rows = [dict(zip(columns, row)) for row in cursor.fetchall()]
for row in rows:
for k, v in row.items():
if hasattr(v, 'isoformat'):
row[k] = v.isoformat()
return {"total": total, "page": page, "page_size": page_size, "data": rows}
finally:
cursor.close()
conn.close()
@app.get("/api/pdi/{coilid}")
def get_pdi(coilid: str):
conn = get_connection()
cursor = conn.cursor()
try:
cursor.execute("SELECT * FROM PDI_PLTM WHERE COILID = :coilid", {"coilid": coilid})
columns = [col[0].lower() for col in cursor.description]
row = cursor.fetchone()
if not row:
raise HTTPException(status_code=404, detail="记录不存在")
result = dict(zip(columns, row))
for k, v in result.items():
if hasattr(v, 'isoformat'):
result[k] = v.isoformat()
return result
finally:
cursor.close()
conn.close()
@app.post("/api/pdi", status_code=201)
def create_pdi(data: PDIPLTMCreate):
conn = get_connection()
cursor = conn.cursor()
try:
fields = {k: v for k, v in data.model_dump(exclude_none=True).items()}
cols = ", ".join(f.upper() for f in fields.keys())
vals = ", ".join([f":{k}" for k in fields.keys()])
sql = f"INSERT INTO PDI_PLTM ({cols}) VALUES ({vals})"
cursor.execute(sql, fields)
conn.commit()
# Mirror to SQLite
try:
sqlite_upsert_pdi(fields)
except Exception as e:
logger.warning("SQLite mirror failed on create: %s", e)
return {"message": "创建成功", "coilid": data.coilid}
except HTTPException:
raise
except Exception as e:
conn.rollback()
raise HTTPException(status_code=400, detail=str(e))
finally:
cursor.close()
conn.close()
@app.put("/api/pdi/{coilid}")
def update_pdi(coilid: str, data: PDIPLTMUpdate):
conn = get_connection()
cursor = conn.cursor()
try:
fields = {k: v for k, v in data.model_dump(exclude_none=True).items()}
if not fields:
raise HTTPException(status_code=400, detail="无更新字段")
set_clause = ", ".join([f"{k.upper()} = :{k}" for k in fields.keys()])
fields["coilid_"] = coilid
sql = f"UPDATE PDI_PLTM SET {set_clause} WHERE COILID = :coilid_"
cursor.execute(sql, fields)
if cursor.rowcount == 0:
raise HTTPException(status_code=404, detail="记录不存在")
conn.commit()
# Mirror to SQLite: re-fetch the updated row
try:
cursor2 = conn.cursor()
cursor2.execute("SELECT * FROM PDI_PLTM WHERE COILID = :c", {"c": coilid})
cols = [d[0].lower() for d in cursor2.description]
row = cursor2.fetchone()
cursor2.close()
if row:
row_dict = dict(zip(cols, row))
for k, v in row_dict.items():
if hasattr(v, 'isoformat'):
row_dict[k] = v.isoformat()
sqlite_upsert_pdi(row_dict)
except Exception as e:
logger.warning("SQLite mirror failed on update: %s", e)
return {"message": "更新成功"}
except HTTPException:
raise
except Exception as e:
conn.rollback()
raise HTTPException(status_code=400, detail=str(e))
finally:
cursor.close()
conn.close()
@app.delete("/api/pdi/{coilid}")
def delete_pdi(coilid: str):
conn = get_connection()
cursor = conn.cursor()
try:
cursor.execute("DELETE FROM PDI_PLTM WHERE COILID = :coilid", {"coilid": coilid})
if cursor.rowcount == 0:
raise HTTPException(status_code=404, detail="记录不存在")
conn.commit()
# Mirror to SQLite
try:
sqlite_delete_pdi(coilid)
except Exception as e:
logger.warning("SQLite mirror failed on delete: %s", e)
return {"message": "删除成功"}
except HTTPException:
raise
except Exception as e:
conn.rollback()
raise HTTPException(status_code=400, detail=str(e))
finally:
cursor.close()
conn.close()
# ─────────────────────────────────────────────
# CMPT_PL_TRACKMAP
# ─────────────────────────────────────────────
@app.get("/api/trackmap")
def list_trackmap():
conn = get_connection()
cursor = conn.cursor()
try:
cursor.execute(
"SELECT POSITION, COILID, BEF_ES, ES, ENT_LOO, PL, INT_LOO, "
"ST, EXI_LOO, RUN_SPEED_MIN, RUN_SPEED_MAX, "
"WELD_SPEED_MIN, WELD_SPEED_MAX, TOC, TOM, MOP "
"FROM PLTM.CMPT_PL_TRACKMAP ORDER BY POSITION"
)
columns = [col[0].lower() for col in cursor.description]
rows = [dict(zip(columns, row)) for row in cursor.fetchall()]
for row in rows:
for k, v in row.items():
if hasattr(v, 'isoformat'):
row[k] = v.isoformat()
return rows
finally:
cursor.close()
conn.close()
# ─────────────────────────────────────────────
# OPC Configuration & Status
# ─────────────────────────────────────────────
@app.get("/api/opc/config")
def get_opc_config():
return {
"opc_url": opc_service.opc_url,
"counter_node": opc_service.counter_node,
"trackmap_nodes": opc_service.trackmap_nodes,
"poll_interval": opc_service.poll_interval,
"running": opc_service.running,
"last_counter": opc_service.last_counter,
"last_update": opc_service.last_update,
}
@app.post("/api/opc/config")
async def save_opc_config(config: OpcConfig):
await opc_service.stop_polling()
opc_service.opc_url = config.opc_url
opc_service.counter_node = config.counter_node
opc_service.trackmap_nodes = config.trackmap_nodes
opc_service.poll_interval = config.poll_interval
try:
opc_service.save_config()
except Exception as e:
logger.warning("Persist OPC config failed: %s", e)
raise HTTPException(status_code=500, detail=f"配置保存失败: {e}")
asyncio.create_task(opc_service.start_polling())
return {"message": "OPC配置已保存并重启轮询"}
@app.get("/api/opc/status")
def opc_status():
return {
"running": opc_service.running,
"last_counter": opc_service.last_counter,
"last_update": opc_service.last_update,
"log": opc_service.event_log[-50:],
}
@app.post("/api/opc/restart")
async def restart_opc():
await opc_service.stop_polling()
asyncio.create_task(opc_service.start_polling())
return {"message": "OPC服务已重启"}

184
backend/models.py Normal file
View File

@@ -0,0 +1,184 @@
from typing import Optional, List, Dict
from pydantic import BaseModel
class PDIPLTMCreate(BaseModel):
coilid: str
rollprogramnb: Optional[int] = None
sequencenb: Optional[int] = None
schedule_code: Optional[str] = None
entry_coil_thickness: Optional[float] = None
entry_coil_thickness_max: Optional[float] = None
entry_coil_thickness_min: Optional[float] = None
entry_coil_width: Optional[float] = None
entry_coil_width_max: Optional[float] = None
entry_coil_width_min: Optional[float] = None
entry_coil_weight: Optional[float] = None
entry_of_coil_length: Optional[float] = None
entry_of_coil_inner_diameter: Optional[float] = None
entry_of_coil_outer_diameter: Optional[float] = None
trimming: Optional[int] = None
trimming_width: Optional[float] = None
smp_length: Optional[float] = None
smp_num: Optional[float] = None
smp_frq: Optional[str] = None
preceding_process_code: Optional[str] = None
next_process_code: Optional[str] = None
hot_mill_delivery_temp: Optional[float] = None
finished_coil_temp: Optional[float] = None
crown_average: Optional[float] = None
coil_flatness_average: Optional[float] = None
coil_flatness_max_value: Optional[float] = None
coil_flatness_min_value: Optional[float] = None
material_yield_point: Optional[float] = None
material_tensile: Optional[float] = None
hotactfmwedgeavg: Optional[float] = None
weight_mode: Optional[str] = None
dummy_coil_mrk: Optional[str] = None
cut_mode: Optional[str] = None
off_gauge_head_length: Optional[float] = None
off_gauge_tail_length: Optional[float] = None
exit_coil_no: Optional[str] = None
exit_coil_weight: Optional[float] = None
exit_coil_weight_max: Optional[float] = None
exit_coil_weight_min: Optional[float] = None
exit_coil_thickness: Optional[float] = None
exit_coil_thickness_max: Optional[float] = None
exit_coil_thickness_min: Optional[float] = None
exit_coil_width: Optional[float] = None
exit_coil_width_max: Optional[float] = None
exit_coil_width_min: Optional[float] = None
work_order_no: Optional[str] = None
order_quality: Optional[str] = None
steel_grade: Optional[str] = None
sg_sign: Optional[str] = None
order_thickness: Optional[float] = None
order_thickness_max: Optional[float] = None
order_thickness_min: Optional[float] = None
order_width: Optional[float] = None
order_width_max: Optional[float] = None
order_width_min: Optional[float] = None
sleeve_code_of_cold_coil: Optional[str] = None
packing_type_code: Optional[str] = None
thk_ds: Optional[str] = None
ext_num_01: Optional[str] = None
# chemical elements
c: Optional[float] = None
si: Optional[float] = None
mn: Optional[float] = None
p: Optional[float] = None
s: Optional[float] = None
cu: Optional[float] = None
ni: Optional[float] = None
cr: Optional[float] = None
mo: Optional[float] = None
v: Optional[float] = None
ti: Optional[float] = None
sol_al: Optional[float] = None
fe: Optional[float] = None
nb: Optional[float] = None
n: Optional[float] = None
b: Optional[float] = None
send_flag: Optional[str] = None
work_order_no: Optional[str] = None
coiler_diameter: Optional[int] = None
l2_grade: Optional[str] = None
scrap_cut_head_len: Optional[float] = None
scrap_cut_tail_len: Optional[float] = None
meterweight: Optional[float] = None
meter_d_outside: Optional[float] = None
meter_width: Optional[float] = None
uncoiler_tension: Optional[float] = None
looper_tension_1: Optional[float] = None
pl_tension: Optional[float] = None
looper_tension_2: Optional[float] = None
looper_tension_3: Optional[float] = None
class PDIPLTMUpdate(BaseModel):
rollprogramnb: Optional[int] = None
sequencenb: Optional[int] = None
schedule_code: Optional[str] = None
entry_coil_thickness: Optional[float] = None
entry_coil_thickness_max: Optional[float] = None
entry_coil_thickness_min: Optional[float] = None
entry_coil_width: Optional[float] = None
entry_coil_width_max: Optional[float] = None
entry_coil_width_min: Optional[float] = None
entry_coil_weight: Optional[float] = None
entry_of_coil_length: Optional[float] = None
entry_of_coil_inner_diameter: Optional[float] = None
entry_of_coil_outer_diameter: Optional[float] = None
trimming: Optional[int] = None
trimming_width: Optional[float] = None
smp_length: Optional[float] = None
smp_num: Optional[float] = None
smp_frq: Optional[str] = None
preceding_process_code: Optional[str] = None
next_process_code: Optional[str] = None
hot_mill_delivery_temp: Optional[float] = None
finished_coil_temp: Optional[float] = None
crown_average: Optional[float] = None
coil_flatness_average: Optional[float] = None
material_yield_point: Optional[float] = None
material_tensile: Optional[float] = None
weight_mode: Optional[str] = None
dummy_coil_mrk: Optional[str] = None
cut_mode: Optional[str] = None
off_gauge_head_length: Optional[float] = None
off_gauge_tail_length: Optional[float] = None
exit_coil_no: Optional[str] = None
exit_coil_weight: Optional[float] = None
exit_coil_weight_max: Optional[float] = None
exit_coil_weight_min: Optional[float] = None
exit_coil_thickness: Optional[float] = None
exit_coil_thickness_max: Optional[float] = None
exit_coil_thickness_min: Optional[float] = None
exit_coil_width: Optional[float] = None
exit_coil_width_max: Optional[float] = None
exit_coil_width_min: Optional[float] = None
work_order_no: Optional[str] = None
order_quality: Optional[str] = None
steel_grade: Optional[str] = None
sg_sign: Optional[str] = None
order_thickness: Optional[float] = None
order_thickness_max: Optional[float] = None
order_thickness_min: Optional[float] = None
order_width: Optional[float] = None
order_width_max: Optional[float] = None
order_width_min: Optional[float] = None
packing_type_code: Optional[str] = None
thk_ds: Optional[str] = None
c: Optional[float] = None
si: Optional[float] = None
mn: Optional[float] = None
p: Optional[float] = None
s: Optional[float] = None
cu: Optional[float] = None
ni: Optional[float] = None
cr: Optional[float] = None
mo: Optional[float] = None
v: Optional[float] = None
ti: Optional[float] = None
sol_al: Optional[float] = None
nb: Optional[float] = None
n: Optional[float] = None
b: Optional[float] = None
send_flag: Optional[str] = None
coiler_diameter: Optional[int] = None
l2_grade: Optional[str] = None
uncoiler_tension: Optional[float] = None
looper_tension_1: Optional[float] = None
pl_tension: Optional[float] = None
looper_tension_2: Optional[float] = None
looper_tension_3: Optional[float] = None
scrap_cut_head_len: Optional[float] = None
scrap_cut_tail_len: Optional[float] = None
meterweight: Optional[float] = None
class OpcConfig(BaseModel):
opc_url: str
counter_node: str
poll_interval: int = 2
trackmap_nodes: Dict[str, str] = {}

244
backend/opc_service.py Normal file
View File

@@ -0,0 +1,244 @@
"""
OPC-UA polling service.
Logic:
1. Connect to the OPC-UA server at `opc_url`.
2. Read the counter node (`counter_node`) every `poll_interval` seconds.
3. When the counter value changes, read the trackmap nodes listed in
`trackmap_nodes` (a dict: {oracle_column -> node_id}).
4. For each row returned by the trackmap nodes build an UPDATE statement
and apply it to PLTM.CMPT_PL_TRACKMAP.
`trackmap_nodes` example (stored in .env or configured via UI):
{
"COILID": "ns=2;s=PL.TRACKMAP.P01.COILID",
"BEF_ES": "ns=2;s=PL.TRACKMAP.P01.BEF_ES",
...
}
For multi-position setups, define one entry per position column and handle
positional logic accordingly. The current implementation reads a flat set of
nodes and stores them against the POSITION value also read from OPC.
"""
import asyncio
import json
import logging
import os
from datetime import datetime
from typing import Any, Dict, List, Optional
from dotenv import load_dotenv
load_dotenv()
logger = logging.getLogger(__name__)
class OpcService:
def __init__(self):
self.config_path = os.path.join(os.path.dirname(__file__), "opc_config.json")
self.opc_url: str = os.getenv("OPC_URL", "opc.tcp://192.168.1.100:4840")
self.counter_node: str = os.getenv(
"OPC_COUNTER_NODE", "ns=2;s=PL.TRACKMAP.COUNTER"
)
self.poll_interval: int = int(os.getenv("OPC_POLL_INTERVAL", "2"))
# Mapping: oracle_column_name -> OPC node id
# Populated from .env or via API
self.trackmap_nodes: Dict[str, str] = self._load_trackmap_nodes()
# Load persisted config if present
self._load_persisted_config()
self.running: bool = False
self.last_counter: Optional[Any] = None
self.last_update: Optional[str] = None
self.event_log: List[str] = []
self._stop_event = asyncio.Event()
self._task: Optional[asyncio.Task] = None
# ------------------------------------------------------------------
def _load_trackmap_nodes(self) -> Dict[str, str]:
"""Load trackmap node mapping from env vars prefixed OPC_NODE_."""
nodes = {}
for key, val in os.environ.items():
if key.startswith("OPC_NODE_"):
col = key[len("OPC_NODE_"):].lower()
nodes[col] = val
return nodes
def _load_persisted_config(self):
"""Load OPC config from local json file if exists."""
if not os.path.exists(self.config_path):
return
try:
with open(self.config_path, "r", encoding="utf-8") as f:
cfg = json.load(f)
self.opc_url = cfg.get("opc_url", self.opc_url)
self.counter_node = cfg.get("counter_node", self.counter_node)
self.poll_interval = int(cfg.get("poll_interval", self.poll_interval))
self.trackmap_nodes = cfg.get("trackmap_nodes", self.trackmap_nodes) or {}
self._log(f"Loaded OPC config from {self.config_path}")
except Exception as exc:
logger.warning("Failed to load OPC config %s: %s", self.config_path, exc)
def save_config(self):
"""Persist current OPC config to local json file."""
cfg = {
"opc_url": self.opc_url,
"counter_node": self.counter_node,
"poll_interval": self.poll_interval,
"trackmap_nodes": self.trackmap_nodes,
}
os.makedirs(os.path.dirname(self.config_path), exist_ok=True)
with open(self.config_path, "w", encoding="utf-8") as f:
json.dump(cfg, f, ensure_ascii=False, indent=2)
def _log(self, msg: str):
ts = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
entry = f"[{ts}] {msg}"
logger.info(entry)
self.event_log.append(entry)
if len(self.event_log) > 500:
self.event_log = self.event_log[-500:]
# ------------------------------------------------------------------
async def start_polling(self):
if self.running:
return
self._stop_event.clear()
self._task = asyncio.create_task(self._poll_loop())
async def stop_polling(self):
self._stop_event.set()
if self._task:
self._task.cancel()
try:
await self._task
except asyncio.CancelledError:
pass
self.running = False
# ------------------------------------------------------------------
async def _poll_loop(self):
self.running = True
self._log(f"OPC polling started: {self.opc_url}")
try:
from opcua import Client # type: ignore
except ImportError:
self._log("opcua package not installed running in SIMULATION mode")
await self._simulate_loop()
return
while not self._stop_event.is_set():
try:
client = Client(self.opc_url)
client.connect()
self._log(f"Connected to OPC server: {self.opc_url}")
try:
while not self._stop_event.is_set():
await self._tick(client)
await asyncio.sleep(self.poll_interval)
finally:
client.disconnect()
self._log("Disconnected from OPC server")
except Exception as exc:
self._log(f"OPC connection error: {exc}. Retrying in 5s...")
await asyncio.sleep(5)
self.running = False
self._log("OPC polling stopped")
# ------------------------------------------------------------------
async def _tick(self, client):
"""Read counter; if changed, fetch trackmap nodes and update Oracle."""
try:
counter_node = client.get_node(self.counter_node)
current_counter = counter_node.get_value()
except Exception as exc:
self._log(f"Failed to read counter node: {exc}")
return
if current_counter == self.last_counter:
return # nothing changed
self._log(
f"Counter changed: {self.last_counter} -> {current_counter}. "
"Fetching trackmap data..."
)
self.last_counter = current_counter
self.last_update = datetime.now().isoformat()
if not self.trackmap_nodes:
self._log("No trackmap nodes configured skipping DB update")
return
# Read all configured nodes
data: Dict[str, Any] = {}
for col, node_id in self.trackmap_nodes.items():
try:
node = client.get_node(node_id)
data[col] = node.get_value()
except Exception as exc:
self._log(f"Failed to read node {node_id}: {exc}")
if not data:
return
# Determine POSITION from data (must be one of the mapped columns)
position = data.get("position")
if position is None:
self._log("'position' not in trackmap_nodes data cannot update row")
return
await self._update_oracle(position, data)
# ------------------------------------------------------------------
async def _update_oracle(self, position: Any, data: Dict[str, Any]):
"""Write fetched OPC values into PLTM.CMPT_PL_TRACKMAP."""
import threading
def _do_update():
try:
from database import get_connection
conn = get_connection()
cursor = conn.cursor()
try:
updatable = {k: v for k, v in data.items() if k != "position"}
if not updatable:
return
set_clause = ", ".join(
[f"{k.upper()} = :{k}" for k in updatable.keys()]
)
updatable["position_"] = position
sql = (
f"UPDATE PLTM.CMPT_PL_TRACKMAP "
f"SET {set_clause} WHERE POSITION = :position_"
)
cursor.execute(sql, updatable)
conn.commit()
self._log(
f"Updated CMPT_PL_TRACKMAP POSITION={position}: "
+ ", ".join(f"{k}={v}" for k, v in updatable.items() if k != "position_")
)
finally:
cursor.close()
conn.close()
except Exception as exc:
self._log(f"Oracle update failed: {exc}")
loop = asyncio.get_event_loop()
await loop.run_in_executor(None, _do_update)
# ------------------------------------------------------------------
async def _simulate_loop(self):
"""Simulation mode when opcua is not installed."""
counter = 0
while not self._stop_event.is_set():
await asyncio.sleep(self.poll_interval)
counter += 1
self.last_counter = counter
self.last_update = datetime.now().isoformat()
self._log(f"[SIM] Counter tick: {counter}")
self.running = False
opc_service = OpcService()

9
backend/requirements.txt Normal file
View File

@@ -0,0 +1,9 @@
fastapi==0.111.0
uvicorn[standard]==0.29.0
cx_Oracle==8.3.0
pydantic==2.7.1
python-dotenv==1.0.1
opcua==0.98.13
asyncio==3.4.3
httpx==0.27.0
python-multipart==0.0.9

303
backend/sqlite_sync.py Normal file
View File

@@ -0,0 +1,303 @@
"""
SQLite mirror for PDI_PLTM and CMPT_PL_TRACKMAP.
On startup (and on demand) the service pulls all rows from Oracle and
upserts them into a local SQLite file (hefa_l2.db).
Whenever the FastAPI endpoints write to Oracle they also call the
corresponding sqlite_* helper here so the two databases stay in sync.
"""
import sqlite3
import logging
import os
from typing import Any, Dict, List, Optional
logger = logging.getLogger(__name__)
DB_PATH = os.path.join(os.path.dirname(__file__), "hefa_l2.db")
# ─────────────────────────────────────────────────────────────
# Connection helper
# ─────────────────────────────────────────────────────────────
def get_sqlite() -> sqlite3.Connection:
conn = sqlite3.connect(DB_PATH)
conn.row_factory = sqlite3.Row
conn.execute("PRAGMA journal_mode=WAL")
return conn
# ─────────────────────────────────────────────────────────────
# Schema bootstrap
# ─────────────────────────────────────────────────────────────
PDI_DDL = """
CREATE TABLE IF NOT EXISTS PDI_PLTM (
SID INTEGER,
ROLLPROGRAMNB INTEGER,
SEQUENCENB INTEGER,
STATUS INTEGER DEFAULT 0,
SCHEDULE_CODE TEXT,
COILID TEXT NOT NULL PRIMARY KEY,
ENTRY_COIL_THICKNESS REAL,
ENTRY_COIL_THICKNESS_MAX REAL,
ENTRY_COIL_THICKNESS_MIN REAL,
ENTRY_COIL_WIDTH REAL,
ENTRY_COIL_WIDTH_MAX REAL,
ENTRY_COIL_WIDTH_MIN REAL,
ENTRY_COIL_WEIGHT REAL,
ENTRY_OF_COIL_LENGTH REAL,
ENTRY_OF_COIL_INNER_DIAMETER REAL,
ENTRY_OF_COIL_OUTER_DIAMETER REAL,
TRIMMING INTEGER,
TRIMMING_WIDTH REAL,
SMP_LENGTH REAL,
SMP_NUM REAL,
SMP_FRQ TEXT,
SMP_NUM_HEAD REAL,
SMP_NUM_MID REAL,
SMP_NUM_TAIL REAL,
PRECEDING_PROCESS_CODE TEXT,
NEXT_PROCESS_CODE TEXT,
HOT_MILL_DELIVERY_TEMP REAL,
FINISHED_COIL_TEMP REAL,
CROWN_AVERAGE REAL,
COIL_FLATNESS_AVERAGE REAL,
COIL_FLATNESS_MAX_VALUE REAL,
COIL_FLATNESS_MIN_VALUE REAL,
MATERIAL_YIELD_POINT REAL,
MATERIAL_TENSILE REAL,
HOTACTFMWEDGEAVG REAL,
WEIGHT_MODE TEXT,
DUMMY_COIL_MRK TEXT,
CUT_MODE TEXT,
OFF_GAUGE_HEAD_LENGTH REAL,
OFF_GAUGE_TAIL_LENGTH REAL,
EXIT_COIL_NO TEXT,
EXIT_COIL_WEIGHT REAL,
EXIT_COIL_WEIGHT_MAX REAL,
EXIT_COIL_WEIGHT_MIN REAL,
EXIT_COIL_THICKNESS REAL,
EXIT_COIL_THICKNESS_MAX REAL,
EXIT_COIL_THICKNESS_MIN REAL,
EXIT_COIL_WIDTH REAL,
EXIT_COIL_WIDTH_MAX REAL,
EXIT_COIL_WIDTH_MIN REAL,
WORK_ORDER_NO TEXT,
ORDER_QUALITY TEXT,
STEEL_GRADE TEXT,
SG_SIGN TEXT,
ORDER_THICKNESS REAL,
ORDER_THICKNESS_MAX REAL,
ORDER_THICKNESS_MIN REAL,
ORDER_WIDTH REAL,
ORDER_WIDTH_MAX REAL,
ORDER_WIDTH_MIN REAL,
SLEEVE_CODE_OF_COLD_COIL TEXT,
PACKING_TYPE_CODE TEXT,
THK_DS TEXT,
EXT_NUM_01 TEXT,
C REAL, SI REAL, MN REAL, P REAL, S REAL,
CU REAL, NI REAL, CR REAL, MO REAL, V REAL,
TI REAL, SOL_AL REAL, FE REAL, NB REAL, N REAL, B REAL,
SEND_FLAG TEXT,
SEND_DATE TEXT,
TRANSACTION_ID TEXT,
VERSION INTEGER,
TEXT1 TEXT, TEXT2 TEXT, TEXT3 TEXT, TEXT4 TEXT, TEXT5 TEXT,
TOC TEXT, TOM TEXT, MOP TEXT,
POSITION INTEGER DEFAULT 0,
CROSS_SECTION_AREA REAL,
UNCOILER_TENSION REAL,
LOOPER_TENSION_1 REAL,
PL_TENSION REAL,
LOOPER_TENSION_2 REAL,
LOOPER_TENSION_3 REAL,
METERWEIGHT REAL,
METER_D_OUTSIDE REAL,
METER_WIDTH REAL,
SCRAP_CUT_HEAD_LEN REAL,
SCRAP_CUT_TAIL_LEN REAL,
COILER_DIAMETER INTEGER,
L2_GRADE TEXT,
CREATED_BY TEXT,
CREATED_DT TEXT,
CREATED_BY_NAME TEXT,
UPDATED_BY TEXT,
UPDATED_DT TEXT,
UPDATED_BY_NAME TEXT
)
"""
TRACKMAP_DDL = """
CREATE TABLE IF NOT EXISTS CMPT_PL_TRACKMAP (
POSITION INTEGER PRIMARY KEY,
COILID TEXT,
BEF_ES INTEGER,
ES INTEGER,
ENT_LOO INTEGER,
PL INTEGER,
INT_LOO INTEGER,
ST INTEGER,
EXI_LOO INTEGER,
RUN_SPEED_MIN REAL,
RUN_SPEED_MAX REAL,
WELD_SPEED_MIN REAL,
WELD_SPEED_MAX REAL,
TOC TEXT,
TOM TEXT,
MOP TEXT
)
"""
def init_db():
"""Create tables if they don't exist."""
conn = get_sqlite()
try:
conn.execute(PDI_DDL)
conn.execute(TRACKMAP_DDL)
conn.commit()
logger.info("SQLite schema ready: %s", DB_PATH)
finally:
conn.close()
# ─────────────────────────────────────────────────────────────
# Full sync from Oracle → SQLite
# ─────────────────────────────────────────────────────────────
def _oracle_rows_to_dicts(cursor) -> List[Dict[str, Any]]:
columns = [col[0].upper() for col in cursor.description]
rows = []
for raw in cursor.fetchall():
row = {}
for col, val in zip(columns, raw):
if hasattr(val, 'isoformat'):
val = val.isoformat()
row[col] = val
rows.append(row)
return rows
def sync_pdi_from_oracle() -> int:
"""Pull all PDI_PLTM rows from Oracle and UPSERT into SQLite."""
from database import get_connection
oc = get_connection()
oc_cur = oc.cursor()
try:
oc_cur.execute("SELECT * FROM PDI_PLTM")
rows = _oracle_rows_to_dicts(oc_cur)
finally:
oc_cur.close()
oc.close()
if not rows:
return 0
sc = get_sqlite()
try:
cols = list(rows[0].keys())
placeholders = ", ".join([f":{c}" for c in cols])
col_list = ", ".join(cols)
sql = (
f"INSERT OR REPLACE INTO PDI_PLTM ({col_list}) "
f"VALUES ({placeholders})"
)
sc.executemany(sql, rows)
sc.commit()
logger.info("Synced %d PDI_PLTM rows to SQLite", len(rows))
return len(rows)
finally:
sc.close()
def sync_trackmap_from_oracle() -> int:
"""Pull all CMPT_PL_TRACKMAP rows from Oracle and UPSERT into SQLite."""
from database import get_connection
oc = get_connection()
oc_cur = oc.cursor()
try:
oc_cur.execute(
"SELECT POSITION, COILID, BEF_ES, ES, ENT_LOO, PL, INT_LOO, "
"ST, EXI_LOO, RUN_SPEED_MIN, RUN_SPEED_MAX, "
"WELD_SPEED_MIN, WELD_SPEED_MAX, TOC, TOM, MOP "
"FROM PLTM.CMPT_PL_TRACKMAP ORDER BY POSITION"
)
rows = _oracle_rows_to_dicts(oc_cur)
finally:
oc_cur.close()
oc.close()
if not rows:
return 0
sc = get_sqlite()
try:
cols = list(rows[0].keys())
placeholders = ", ".join([f":{c}" for c in cols])
col_list = ", ".join(cols)
sql = (
f"INSERT OR REPLACE INTO CMPT_PL_TRACKMAP ({col_list}) "
f"VALUES ({placeholders})"
)
sc.executemany(sql, rows)
sc.commit()
logger.info("Synced %d CMPT_PL_TRACKMAP rows to SQLite", len(rows))
return len(rows)
finally:
sc.close()
def sync_all_from_oracle() -> Dict[str, int]:
pdi = sync_pdi_from_oracle()
tm = sync_trackmap_from_oracle()
return {"pdi_pltm": pdi, "cmpt_pl_trackmap": tm}
# ─────────────────────────────────────────────────────────────
# Incremental write-through helpers (called after Oracle commits)
# ─────────────────────────────────────────────────────────────
def sqlite_upsert_pdi(row: Dict[str, Any]):
"""Insert or replace one PDI_PLTM row in SQLite."""
sc = get_sqlite()
try:
upper = {k.upper(): v for k, v in row.items()}
cols = list(upper.keys())
placeholders = ", ".join([f":{c}" for c in cols])
col_list = ", ".join(cols)
sc.execute(
f"INSERT OR REPLACE INTO PDI_PLTM ({col_list}) VALUES ({placeholders})",
upper
)
sc.commit()
finally:
sc.close()
def sqlite_delete_pdi(coilid: str):
sc = get_sqlite()
try:
sc.execute("DELETE FROM PDI_PLTM WHERE COILID = ?", (coilid,))
sc.commit()
finally:
sc.close()
def sqlite_upsert_trackmap(row: Dict[str, Any]):
sc = get_sqlite()
try:
upper = {k.upper(): v for k, v in row.items()}
cols = list(upper.keys())
placeholders = ", ".join([f":{c}" for c in cols])
col_list = ", ".join(cols)
sc.execute(
f"INSERT OR REPLACE INTO CMPT_PL_TRACKMAP ({col_list}) VALUES ({placeholders})",
upper
)
sc.commit()
finally:
sc.close()