feat: CONAI Phase 1 MVP 초기 구현

소형 건설업체(100억 미만)를 위한 AI 기반 토목공사 통합관리 플랫폼

Backend (FastAPI):
- SQLAlchemy 모델 13개 (users, projects, wbs, tasks, daily_reports, reports, inspections, quality, weather, permits, rag, settings)
- API 라우터 11개 (auth, projects, tasks, daily_reports, reports, inspections, weather, rag, kakao, permits, settings)
- Services: Claude AI 래퍼, CPM Gantt 계산, 기상청 API, RAG(pgvector), 카카오 Skill API
- Alembic 마이그레이션 (pgvector 포함)
- pytest 테스트 (CPM, 날씨 경보)

Frontend (Next.js 15):
- 11개 페이지 (대시보드, 프로젝트, Gantt, 일보, 검측, 품질, 날씨, 인허가, RAG, 설정)
- TanStack Query + Zustand + Tailwind CSS

인프라:
- Docker Compose (PostgreSQL pgvector + backend + frontend)
- 한국어 README 및 설치 가이드

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
sinmb79
2026-03-24 20:06:36 +09:00
commit 2a4950d8a0
99 changed files with 7447 additions and 0 deletions
+77
View File
@@ -0,0 +1,77 @@
# =====================================================
# CONAI 환경변수 설정 파일
# =====================================================
# 이 파일을 .env 로 복사한 후 실제 값을 입력하세요:
# cp .env.example .env
#
# ⚠️ .env 파일은 절대 GitHub에 올리지 마세요!
# .gitignore에 이미 포함되어 있습니다.
# =====================================================
# -----------------------------------------------
# [필수] 앱 보안 키
# 아래 명령으로 랜덤 키를 생성하세요:
# python -c "import secrets; print(secrets.token_hex(32))"
# -----------------------------------------------
SECRET_KEY=여기에-랜덤-키-입력-필수
# 개발 중에는 true, 운영에서는 반드시 false
DEBUG=true
# -----------------------------------------------
# [필수] Supabase 데이터베이스
# https://supabase.com 에서 프로젝트 생성 후 복사
# Settings → Database → Connection string
# -----------------------------------------------
DATABASE_URL=postgresql+asyncpg://postgres:비밀번호@db.프로젝트ID.supabase.co:5432/postgres
DATABASE_URL_SYNC=postgresql://postgres:비밀번호@db.프로젝트ID.supabase.co:5432/postgres
SUPABASE_URL=https://프로젝트ID.supabase.co
SUPABASE_ANON_KEY=여기에-anon-key-입력
SUPABASE_SERVICE_KEY=여기에-service-role-key-입력
SUPABASE_STORAGE_BUCKET=conai-documents
# -----------------------------------------------
# [필수] Claude AI (Anthropic)
# https://console.anthropic.com 에서 발급
# -----------------------------------------------
ANTHROPIC_API_KEY=sk-ant-여기에입력
CLAUDE_MODEL=claude-sonnet-4-5
# -----------------------------------------------
# [필수] 기상청 Open API (무료)
# https://www.data.go.kr 에서 회원가입 후 발급
# "기상청_단기예보 조회서비스" 검색하여 활용신청
# -----------------------------------------------
KMA_API_KEY=여기에입력
# -----------------------------------------------
# [필수] 법규 Q&A 임베딩 - 둘 중 하나 선택
# Voyage AI (추천): https://www.voyageai.com
# OpenAI: https://platform.openai.com
# -----------------------------------------------
VOYAGE_API_KEY=pa-여기에입력
# OPENAI_API_KEY=sk-여기에입력
# -----------------------------------------------
# [선택] 카카오 챗봇
# https://i.kakao.com 에서 챗봇 생성 후 발급
# 없으면 웹 대시보드만 사용 가능 (기능 제한 없음)
# -----------------------------------------------
KAKAO_APP_KEY=
KAKAO_ADMIN_KEY=
KAKAO_CHANNEL_ID=
# -----------------------------------------------
# [선택] AWS S3 (파일 저장)
# Supabase Storage 사용 시 불필요
# -----------------------------------------------
# AWS_ACCESS_KEY_ID=
# AWS_SECRET_ACCESS_KEY=
# AWS_REGION=ap-northeast-2
# S3_BUCKET=conai-files
# -----------------------------------------------
# CORS 허용 주소 (프론트엔드 URL)
# -----------------------------------------------
CORS_ORIGINS=["http://localhost:3000"]
+17
View File
@@ -0,0 +1,17 @@
FROM python:3.11-slim
WORKDIR /app
# System dependencies (for WeasyPrint)
RUN apt-get update && apt-get install -y \
libpango-1.0-0 libpangoft2-1.0-0 libcairo2 libgdk-pixbuf2.0-0 \
libffi-dev libssl-dev \
&& rm -rf /var/lib/apt/lists/*
# Python dependencies
COPY pyproject.toml .
RUN pip install --no-cache-dir pip setuptools && pip install --no-cache-dir -e .
COPY . .
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
+41
View File
@@ -0,0 +1,41 @@
[alembic]
script_location = alembic
prepend_sys_path = .
version_path_separator = os
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
+62
View File
@@ -0,0 +1,62 @@
import asyncio
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
import sys, os
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from app.config import settings
from app.core.database import Base
# Import all models to ensure they're registered
import app.models # noqa: F401
config = context.config
config.set_main_option("sqlalchemy.url", settings.DATABASE_URL)
if config.config_file_name is not None:
fileConfig(config.config_file_name)
target_metadata = Base.metadata
def run_migrations_offline() -> None:
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
@@ -0,0 +1,370 @@
"""Initial schema with all Phase 1 tables
Revision ID: 001
Revises:
Create Date: 2026-03-24
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import UUID, JSONB
revision = '001'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# Enable pgvector extension
op.execute("CREATE EXTENSION IF NOT EXISTS vector")
# users
op.create_table(
'users',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('email', sa.String(255), unique=True, nullable=False),
sa.Column('hashed_password', sa.String(255), nullable=False),
sa.Column('name', sa.String(100), nullable=False),
sa.Column('role', sa.Enum('admin', 'site_manager', 'supervisor', 'worker', name='user_role'), nullable=False, server_default='site_manager'),
sa.Column('phone', sa.String(20), nullable=True),
sa.Column('kakao_user_key', sa.String(100), unique=True, nullable=True),
sa.Column('is_active', sa.Boolean, nullable=False, server_default='true'),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
op.create_index('ix_users_email', 'users', ['email'])
op.create_index('ix_users_kakao_user_key', 'users', ['kakao_user_key'])
# client_profiles (before projects since projects FK to this)
op.create_table(
'client_profiles',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('name', sa.String(100), nullable=False),
sa.Column('report_frequency', sa.String(20), nullable=False, server_default='weekly'),
sa.Column('template_config', JSONB, nullable=True),
sa.Column('contact_info', JSONB, nullable=True),
sa.Column('is_default', sa.Boolean, nullable=False, server_default='false'),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# projects
op.create_table(
'projects',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('name', sa.String(200), nullable=False),
sa.Column('code', sa.String(50), unique=True, nullable=False),
sa.Column('client_profile_id', UUID(as_uuid=True), sa.ForeignKey('client_profiles.id'), nullable=True),
sa.Column('construction_type', sa.Enum('road', 'sewer', 'water', 'bridge', 'site_work', 'other', name='construction_type'), nullable=False, server_default='other'),
sa.Column('contract_amount', sa.BigInteger, nullable=True),
sa.Column('start_date', sa.Date, nullable=True),
sa.Column('end_date', sa.Date, nullable=True),
sa.Column('location_address', sa.Text, nullable=True),
sa.Column('location_lat', sa.Float, nullable=True),
sa.Column('location_lng', sa.Float, nullable=True),
sa.Column('weather_grid_x', sa.Integer, nullable=True),
sa.Column('weather_grid_y', sa.Integer, nullable=True),
sa.Column('status', sa.Enum('planning', 'active', 'suspended', 'completed', name='project_status'), nullable=False, server_default='planning'),
sa.Column('owner_id', UUID(as_uuid=True), sa.ForeignKey('users.id'), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
op.create_index('ix_projects_code', 'projects', ['code'])
# wbs_items
op.create_table(
'wbs_items',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('project_id', UUID(as_uuid=True), sa.ForeignKey('projects.id', ondelete='CASCADE'), nullable=False),
sa.Column('parent_id', UUID(as_uuid=True), sa.ForeignKey('wbs_items.id'), nullable=True),
sa.Column('code', sa.String(50), nullable=False),
sa.Column('name', sa.String(200), nullable=False),
sa.Column('level', sa.Integer, nullable=False, server_default='1'),
sa.Column('unit', sa.String(20), nullable=True),
sa.Column('design_qty', sa.Float, nullable=True),
sa.Column('unit_price', sa.Float, nullable=True),
sa.Column('sort_order', sa.Integer, nullable=False, server_default='0'),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# tasks
op.create_table(
'tasks',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('project_id', UUID(as_uuid=True), sa.ForeignKey('projects.id', ondelete='CASCADE'), nullable=False),
sa.Column('wbs_item_id', UUID(as_uuid=True), sa.ForeignKey('wbs_items.id'), nullable=True),
sa.Column('name', sa.String(200), nullable=False),
sa.Column('planned_start', sa.Date, nullable=True),
sa.Column('planned_end', sa.Date, nullable=True),
sa.Column('actual_start', sa.Date, nullable=True),
sa.Column('actual_end', sa.Date, nullable=True),
sa.Column('progress_pct', sa.Float, nullable=False, server_default='0'),
sa.Column('is_milestone', sa.Boolean, nullable=False, server_default='false'),
sa.Column('is_critical', sa.Boolean, nullable=False, server_default='false'),
sa.Column('early_start', sa.Date, nullable=True),
sa.Column('early_finish', sa.Date, nullable=True),
sa.Column('late_start', sa.Date, nullable=True),
sa.Column('late_finish', sa.Date, nullable=True),
sa.Column('total_float', sa.Integer, nullable=True),
sa.Column('sort_order', sa.Integer, nullable=False, server_default='0'),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# task_dependencies
op.create_table(
'task_dependencies',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('predecessor_id', UUID(as_uuid=True), sa.ForeignKey('tasks.id', ondelete='CASCADE'), nullable=False),
sa.Column('successor_id', UUID(as_uuid=True), sa.ForeignKey('tasks.id', ondelete='CASCADE'), nullable=False),
sa.Column('dependency_type', sa.Enum('FS', 'SS', 'FF', 'SF', name='dependency_type'), nullable=False, server_default='FS'),
sa.Column('lag_days', sa.Integer, nullable=False, server_default='0'),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# daily_reports
op.create_table(
'daily_reports',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('project_id', UUID(as_uuid=True), sa.ForeignKey('projects.id', ondelete='CASCADE'), nullable=False),
sa.Column('report_date', sa.Date, nullable=False),
sa.Column('weather_summary', sa.String(100), nullable=True),
sa.Column('temperature_high', sa.Float, nullable=True),
sa.Column('temperature_low', sa.Float, nullable=True),
sa.Column('workers_count', JSONB, nullable=True),
sa.Column('equipment_list', JSONB, nullable=True),
sa.Column('work_content', sa.Text, nullable=True),
sa.Column('issues', sa.Text, nullable=True),
sa.Column('input_source', sa.Enum('kakao', 'web', 'api', name='input_source'), nullable=False, server_default='web'),
sa.Column('raw_kakao_input', sa.Text, nullable=True),
sa.Column('ai_generated', sa.Boolean, nullable=False, server_default='false'),
sa.Column('status', sa.Enum('draft', 'confirmed', 'submitted', name='daily_report_status'), nullable=False, server_default='draft'),
sa.Column('confirmed_by', UUID(as_uuid=True), sa.ForeignKey('users.id'), nullable=True),
sa.Column('confirmed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('pdf_s3_key', sa.String(500), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
op.create_index('ix_daily_reports_date', 'daily_reports', ['report_date'])
# daily_report_photos
op.create_table(
'daily_report_photos',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('daily_report_id', UUID(as_uuid=True), sa.ForeignKey('daily_reports.id', ondelete='CASCADE'), nullable=False),
sa.Column('s3_key', sa.String(500), nullable=False),
sa.Column('caption', sa.String(200), nullable=True),
sa.Column('sort_order', sa.Integer, nullable=False, server_default='0'),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# reports (weekly/monthly)
op.create_table(
'reports',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('project_id', UUID(as_uuid=True), sa.ForeignKey('projects.id', ondelete='CASCADE'), nullable=False),
sa.Column('report_type', sa.Enum('weekly', 'monthly', name='report_type'), nullable=False),
sa.Column('period_start', sa.Date, nullable=False),
sa.Column('period_end', sa.Date, nullable=False),
sa.Column('content_json', JSONB, nullable=True),
sa.Column('ai_draft_text', sa.Text, nullable=True),
sa.Column('status', sa.Enum('draft', 'reviewed', 'submitted', name='report_status'), nullable=False, server_default='draft'),
sa.Column('pdf_s3_key', sa.String(500), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# inspection_requests
op.create_table(
'inspection_requests',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('project_id', UUID(as_uuid=True), sa.ForeignKey('projects.id', ondelete='CASCADE'), nullable=False),
sa.Column('wbs_item_id', UUID(as_uuid=True), sa.ForeignKey('wbs_items.id'), nullable=True),
sa.Column('inspection_type', sa.String(50), nullable=False),
sa.Column('requested_date', sa.Date, nullable=False),
sa.Column('location_detail', sa.String(200), nullable=True),
sa.Column('checklist_items', JSONB, nullable=True),
sa.Column('result', sa.Enum('pass', 'fail', 'conditional_pass', name='inspection_result'), nullable=True),
sa.Column('inspector_name', sa.String(100), nullable=True),
sa.Column('notes', sa.Text, nullable=True),
sa.Column('ai_generated', sa.Boolean, nullable=False, server_default='false'),
sa.Column('status', sa.Enum('draft', 'sent', 'completed', name='inspection_status'), nullable=False, server_default='draft'),
sa.Column('pdf_s3_key', sa.String(500), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# quality_tests
op.create_table(
'quality_tests',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('project_id', UUID(as_uuid=True), sa.ForeignKey('projects.id', ondelete='CASCADE'), nullable=False),
sa.Column('wbs_item_id', UUID(as_uuid=True), sa.ForeignKey('wbs_items.id'), nullable=True),
sa.Column('test_type', sa.String(50), nullable=False),
sa.Column('test_date', sa.Date, nullable=False),
sa.Column('location_detail', sa.String(200), nullable=True),
sa.Column('design_value', sa.Float, nullable=True),
sa.Column('measured_value', sa.Float, nullable=False),
sa.Column('unit', sa.String(20), nullable=False),
sa.Column('result', sa.Enum('pass', 'fail', name='quality_result'), nullable=False),
sa.Column('lab_name', sa.String(100), nullable=True),
sa.Column('report_number', sa.String(100), nullable=True),
sa.Column('notes', sa.Text, nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# weather_data
op.create_table(
'weather_data',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('project_id', UUID(as_uuid=True), sa.ForeignKey('projects.id', ondelete='CASCADE'), nullable=False),
sa.Column('forecast_date', sa.Date, nullable=False),
sa.Column('forecast_type', sa.Enum('short_term', 'medium_term', 'observed', name='forecast_type'), nullable=False),
sa.Column('temperature_high', sa.Float, nullable=True),
sa.Column('temperature_low', sa.Float, nullable=True),
sa.Column('precipitation_mm', sa.Float, nullable=True),
sa.Column('wind_speed_ms', sa.Float, nullable=True),
sa.Column('weather_code', sa.String(20), nullable=True),
sa.Column('raw_data', JSONB, nullable=True),
sa.Column('fetched_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# weather_alerts
op.create_table(
'weather_alerts',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('project_id', UUID(as_uuid=True), sa.ForeignKey('projects.id', ondelete='CASCADE'), nullable=False),
sa.Column('task_id', UUID(as_uuid=True), sa.ForeignKey('tasks.id'), nullable=True),
sa.Column('alert_date', sa.Date, nullable=False),
sa.Column('alert_type', sa.String(50), nullable=False),
sa.Column('severity', sa.Enum('warning', 'critical', name='alert_severity'), nullable=False),
sa.Column('message', sa.Text, nullable=False),
sa.Column('is_acknowledged', sa.Boolean, nullable=False, server_default='false'),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# permit_items
op.create_table(
'permit_items',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('project_id', UUID(as_uuid=True), sa.ForeignKey('projects.id', ondelete='CASCADE'), nullable=False),
sa.Column('permit_type', sa.String(100), nullable=False),
sa.Column('authority', sa.String(100), nullable=True),
sa.Column('required', sa.Boolean, nullable=False, server_default='true'),
sa.Column('deadline', sa.Date, nullable=True),
sa.Column('status', sa.Enum('not_started', 'submitted', 'in_review', 'approved', 'rejected', name='permit_status'), nullable=False, server_default='not_started'),
sa.Column('submitted_date', sa.Date, nullable=True),
sa.Column('approved_date', sa.Date, nullable=True),
sa.Column('document_s3_key', sa.String(500), nullable=True),
sa.Column('notes', sa.Text, nullable=True),
sa.Column('sort_order', sa.Integer, nullable=False, server_default='0'),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# rag_sources
op.create_table(
'rag_sources',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('title', sa.String(300), nullable=False),
sa.Column('source_type', sa.Enum('kcs', 'law', 'regulation', 'guideline', name='rag_source_type'), nullable=False),
sa.Column('source_url', sa.Text, nullable=True),
sa.Column('file_s3_key', sa.String(500), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# rag_chunks (with pgvector)
op.create_table(
'rag_chunks',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('source_id', UUID(as_uuid=True), sa.ForeignKey('rag_sources.id', ondelete='CASCADE'), nullable=False),
sa.Column('chunk_index', sa.Integer, nullable=False),
sa.Column('content', sa.Text, nullable=False),
sa.Column('metadata', JSONB, nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# Add vector column separately (pgvector syntax)
op.execute("ALTER TABLE rag_chunks ADD COLUMN IF NOT EXISTS embedding vector(1024)")
op.execute("CREATE INDEX IF NOT EXISTS rag_chunks_embedding_idx ON rag_chunks USING ivfflat (embedding vector_cosine_ops) WITH (lists = 100)")
# alert_rules
op.create_table(
'alert_rules',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('project_id', UUID(as_uuid=True), sa.ForeignKey('projects.id'), nullable=True),
sa.Column('rule_name', sa.String(100), nullable=False),
sa.Column('condition', JSONB, nullable=True),
sa.Column('channels', JSONB, nullable=True),
sa.Column('recipients', JSONB, nullable=True),
sa.Column('is_active', sa.Boolean, nullable=False, server_default='true'),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# work_type_library
op.create_table(
'work_type_library',
sa.Column('id', UUID(as_uuid=True), primary_key=True, server_default=sa.text('gen_random_uuid()')),
sa.Column('code', sa.String(50), unique=True, nullable=False),
sa.Column('name', sa.String(100), nullable=False),
sa.Column('category', sa.String(50), nullable=False),
sa.Column('weather_constraints', JSONB, nullable=True),
sa.Column('default_checklist', JSONB, nullable=True),
sa.Column('is_system', sa.Boolean, nullable=False, server_default='true'),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
)
# Seed default work types
op.execute("""
INSERT INTO work_type_library (code, name, category, weather_constraints, is_system) VALUES
('CONCRETE', '콘크리트 타설', 'concrete', '{"min_temp": 5, "no_rain": true}', true),
('REBAR', '철근 공사', 'concrete', '{"no_rain": false}', true),
('FORMWORK', '거푸집 공사', 'concrete', '{"max_wind": 14}', true),
('HIGH_WORK', '고소 작업', 'safety', '{"max_wind": 10}', true),
('CRANE', '크레인 작업', 'safety', '{"max_wind": 10}', true),
('EARTHWORK', '토공 (절토/성토)', 'earthwork', '{"no_rain": true}', true),
('EXCAVATION', '굴착 공사', 'earthwork', '{"no_rain": true}', true),
('PIPE_BURIAL', '관로 매설', 'utilities', '{"no_rain": false}', true),
('ASPHALT', '아스팔트 포장', 'road', '{"min_temp": 10, "no_rain": true}', true),
('COMPACTION', '다짐 공사', 'earthwork', '{"no_rain": true}', true)
ON CONFLICT (code) DO NOTHING
""")
def downgrade() -> None:
op.drop_table('work_type_library')
op.drop_table('alert_rules')
op.drop_table('rag_chunks')
op.drop_table('rag_sources')
op.drop_table('permit_items')
op.drop_table('weather_alerts')
op.drop_table('weather_data')
op.drop_table('quality_tests')
op.drop_table('inspection_requests')
op.drop_table('reports')
op.drop_table('daily_report_photos')
op.drop_table('daily_reports')
op.drop_table('task_dependencies')
op.drop_table('tasks')
op.drop_table('wbs_items')
op.drop_table('projects')
op.drop_table('client_profiles')
op.drop_table('users')
# Drop enums
for enum in ['user_role', 'construction_type', 'project_status', 'dependency_type',
'input_source', 'daily_report_status', 'report_type', 'report_status',
'inspection_result', 'inspection_status', 'quality_result',
'forecast_type', 'alert_severity', 'permit_status', 'rag_source_type']:
op.execute(f"DROP TYPE IF EXISTS {enum}")
View File
View File
+74
View File
@@ -0,0 +1,74 @@
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi.security import OAuth2PasswordRequestForm
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from typing import Annotated
from app.core.database import get_db
from app.core.security import verify_password, get_password_hash, create_access_token, create_refresh_token, decode_token
from app.models.user import User
from app.schemas.user import UserCreate, UserResponse, TokenResponse
from app.deps import CurrentUser
router = APIRouter(prefix="/auth", tags=["인증"])
@router.post("/register", response_model=UserResponse, status_code=status.HTTP_201_CREATED)
async def register(data: UserCreate, db: Annotated[AsyncSession, Depends(get_db)]):
result = await db.execute(select(User).where(User.email == data.email))
if result.scalar_one_or_none():
raise HTTPException(status_code=400, detail="이미 등록된 이메일입니다")
user = User(
email=data.email,
hashed_password=get_password_hash(data.password),
name=data.name,
role=data.role,
phone=data.phone,
)
db.add(user)
await db.commit()
await db.refresh(user)
return user
@router.post("/login", response_model=TokenResponse)
async def login(
form_data: Annotated[OAuth2PasswordRequestForm, Depends()],
db: Annotated[AsyncSession, Depends(get_db)],
):
result = await db.execute(select(User).where(User.email == form_data.username, User.is_active == True))
user = result.scalar_one_or_none()
if not user or not verify_password(form_data.password, user.hashed_password):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="이메일 또는 비밀번호가 올바르지 않습니다",
headers={"WWW-Authenticate": "Bearer"},
)
return TokenResponse(
access_token=create_access_token(str(user.id)),
refresh_token=create_refresh_token(str(user.id)),
user=UserResponse.model_validate(user),
)
@router.post("/refresh", response_model=TokenResponse)
async def refresh_token(refresh_token_str: str, db: Annotated[AsyncSession, Depends(get_db)]):
payload = decode_token(refresh_token_str)
if not payload or payload.get("type") != "refresh":
raise HTTPException(status_code=401, detail="유효하지 않은 리프레시 토큰입니다")
result = await db.execute(select(User).where(User.id == payload["sub"], User.is_active == True))
user = result.scalar_one_or_none()
if not user:
raise HTTPException(status_code=401, detail="사용자를 찾을 수 없습니다")
return TokenResponse(
access_token=create_access_token(str(user.id)),
refresh_token=create_refresh_token(str(user.id)),
user=UserResponse.model_validate(user),
)
@router.get("/me", response_model=UserResponse)
async def get_me(current_user: CurrentUser):
return current_user
+107
View File
@@ -0,0 +1,107 @@
import uuid
from datetime import date
from fastapi import APIRouter, HTTPException, status
from sqlalchemy import select
from app.deps import CurrentUser, DB
from app.models.daily_report import DailyReport, InputSource
from app.models.project import Project
from app.schemas.daily_report import (
DailyReportCreate, DailyReportUpdate, DailyReportGenerateRequest, DailyReportResponse
)
from app.services.daily_report_gen import generate_work_content
router = APIRouter(prefix="/projects/{project_id}/daily-reports", tags=["작업일보"])
async def _get_project_or_404(project_id: uuid.UUID, db: DB) -> Project:
result = await db.execute(select(Project).where(Project.id == project_id))
p = result.scalar_one_or_none()
if not p:
raise HTTPException(status_code=404, detail="프로젝트를 찾을 수 없습니다")
return p
@router.get("", response_model=list[DailyReportResponse])
async def list_reports(project_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(
select(DailyReport)
.where(DailyReport.project_id == project_id)
.order_by(DailyReport.report_date.desc())
)
return result.scalars().all()
@router.post("", response_model=DailyReportResponse, status_code=status.HTTP_201_CREATED)
async def create_report(project_id: uuid.UUID, data: DailyReportCreate, db: DB, current_user: CurrentUser):
await _get_project_or_404(project_id, db)
report = DailyReport(**data.model_dump(), project_id=project_id, input_source=InputSource.WEB)
db.add(report)
await db.commit()
await db.refresh(report)
return report
@router.post("/generate", response_model=DailyReportResponse, status_code=status.HTTP_201_CREATED)
async def generate_report(project_id: uuid.UUID, data: DailyReportGenerateRequest, db: DB, current_user: CurrentUser):
"""AI-generate daily report content from structured input."""
project = await _get_project_or_404(project_id, db)
work_content = await generate_work_content(
project_name=project.name,
report_date=str(data.report_date),
weather_summary="맑음", # Will be filled from weather data
temperature_high=None,
temperature_low=None,
workers_count=data.workers_count,
equipment_list=data.equipment_list or [],
work_items=data.work_items,
issues=data.issues,
)
report = DailyReport(
project_id=project_id,
report_date=data.report_date,
workers_count=data.workers_count,
equipment_list=data.equipment_list,
work_content=work_content,
issues=data.issues,
input_source=InputSource.WEB,
ai_generated=True,
)
db.add(report)
await db.commit()
await db.refresh(report)
return report
@router.get("/{report_id}", response_model=DailyReportResponse)
async def get_report(project_id: uuid.UUID, report_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(select(DailyReport).where(DailyReport.id == report_id, DailyReport.project_id == project_id))
report = result.scalar_one_or_none()
if not report:
raise HTTPException(status_code=404, detail="일보를 찾을 수 없습니다")
return report
@router.put("/{report_id}", response_model=DailyReportResponse)
async def update_report(project_id: uuid.UUID, report_id: uuid.UUID, data: DailyReportUpdate, db: DB, current_user: CurrentUser):
result = await db.execute(select(DailyReport).where(DailyReport.id == report_id, DailyReport.project_id == project_id))
report = result.scalar_one_or_none()
if not report:
raise HTTPException(status_code=404, detail="일보를 찾을 수 없습니다")
for field, value in data.model_dump(exclude_none=True).items():
setattr(report, field, value)
await db.commit()
await db.refresh(report)
return report
@router.delete("/{report_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_report(project_id: uuid.UUID, report_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(select(DailyReport).where(DailyReport.id == report_id, DailyReport.project_id == project_id))
report = result.scalar_one_or_none()
if not report:
raise HTTPException(status_code=404, detail="일보를 찾을 수 없습니다")
await db.delete(report)
await db.commit()
+106
View File
@@ -0,0 +1,106 @@
import uuid
from fastapi import APIRouter, HTTPException, status
from sqlalchemy import select
from app.deps import CurrentUser, DB
from app.models.inspection import InspectionRequest
from app.models.project import Project, WBSItem
from app.schemas.inspection import InspectionCreate, InspectionUpdate, InspectionGenerateRequest, InspectionResponse
from app.services.inspection_gen import generate_checklist
router = APIRouter(prefix="/projects/{project_id}/inspections", tags=["검측요청서"])
async def _get_project_or_404(project_id: uuid.UUID, db: DB) -> Project:
result = await db.execute(select(Project).where(Project.id == project_id))
p = result.scalar_one_or_none()
if not p:
raise HTTPException(status_code=404, detail="프로젝트를 찾을 수 없습니다")
return p
@router.get("", response_model=list[InspectionResponse])
async def list_inspections(project_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(
select(InspectionRequest)
.where(InspectionRequest.project_id == project_id)
.order_by(InspectionRequest.requested_date.desc())
)
return result.scalars().all()
@router.post("", response_model=InspectionResponse, status_code=status.HTTP_201_CREATED)
async def create_inspection(project_id: uuid.UUID, data: InspectionCreate, db: DB, current_user: CurrentUser):
await _get_project_or_404(project_id, db)
inspection = InspectionRequest(**data.model_dump(), project_id=project_id)
db.add(inspection)
await db.commit()
await db.refresh(inspection)
return inspection
@router.post("/generate", response_model=InspectionResponse, status_code=status.HTTP_201_CREATED)
async def generate_inspection(project_id: uuid.UUID, data: InspectionGenerateRequest, db: DB, current_user: CurrentUser):
"""AI-generate inspection request checklist."""
project = await _get_project_or_404(project_id, db)
# Get WBS item name if provided
wbs_name = None
if data.wbs_item_id:
wbs_result = await db.execute(select(WBSItem).where(WBSItem.id == data.wbs_item_id))
wbs = wbs_result.scalar_one_or_none()
if wbs:
wbs_name = wbs.name
checklist = await generate_checklist(
project_name=project.name,
inspection_type=data.inspection_type,
location_detail=data.location_detail,
requested_date=str(data.requested_date),
wbs_name=wbs_name,
)
inspection = InspectionRequest(
project_id=project_id,
wbs_item_id=data.wbs_item_id,
inspection_type=data.inspection_type,
requested_date=data.requested_date,
location_detail=data.location_detail,
checklist_items=checklist,
ai_generated=True,
)
db.add(inspection)
await db.commit()
await db.refresh(inspection)
return inspection
@router.get("/{inspection_id}", response_model=InspectionResponse)
async def get_inspection(project_id: uuid.UUID, inspection_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(select(InspectionRequest).where(InspectionRequest.id == inspection_id, InspectionRequest.project_id == project_id))
insp = result.scalar_one_or_none()
if not insp:
raise HTTPException(status_code=404, detail="검측요청서를 찾을 수 없습니다")
return insp
@router.put("/{inspection_id}", response_model=InspectionResponse)
async def update_inspection(project_id: uuid.UUID, inspection_id: uuid.UUID, data: InspectionUpdate, db: DB, current_user: CurrentUser):
result = await db.execute(select(InspectionRequest).where(InspectionRequest.id == inspection_id, InspectionRequest.project_id == project_id))
insp = result.scalar_one_or_none()
if not insp:
raise HTTPException(status_code=404, detail="검측요청서를 찾을 수 없습니다")
for field, value in data.model_dump(exclude_none=True).items():
setattr(insp, field, value)
await db.commit()
await db.refresh(insp)
return insp
@router.delete("/{inspection_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_inspection(project_id: uuid.UUID, inspection_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(select(InspectionRequest).where(InspectionRequest.id == inspection_id, InspectionRequest.project_id == project_id))
insp = result.scalar_one_or_none()
if not insp:
raise HTTPException(status_code=404, detail="검측요청서를 찾을 수 없습니다")
await db.delete(insp)
await db.commit()
+154
View File
@@ -0,0 +1,154 @@
"""
Kakao Chatbot Skill API webhook endpoints.
"""
from fastapi import APIRouter, Request, HTTPException
from sqlalchemy import select
from app.deps import DB
from app.models.user import User
from app.models.project import Project
from app.models.daily_report import DailyReport, InputSource
from app.services.kakao_service import (
detect_intent, parse_daily_report_input, make_help_response,
simple_text, basic_card, KakaoIntent,
)
from app.services.daily_report_gen import generate_work_content
from app.services.rag_service import ask as rag_ask
router = APIRouter(prefix="/kakao", tags=["카카오 챗봇"])
@router.post("/webhook")
async def kakao_webhook(request: Request, db: DB):
"""Main Kakao Skill webhook. Routes to appropriate handler."""
body = await request.json()
# Extract user info and utterance
user_request = body.get("userRequest", {})
utterance = user_request.get("utterance", "")
user_key = user_request.get("user", {}).get("id", "")
# Find linked user
user = None
if user_key:
result = await db.execute(select(User).where(User.kakao_user_key == user_key, User.is_active == True))
user = result.scalar_one_or_none()
if not user:
return simple_text(
"안녕하세요! CONAI 현장 관리 시스템입니다.\n"
"서비스를 이용하시려면 웹에서 계정을 연결해주세요.\n"
"📱 conai.app에서 카카오 연동 설정"
)
intent = detect_intent(utterance)
if intent == KakaoIntent.DAILY_REPORT:
return await _handle_daily_report(utterance, user, db)
elif intent == KakaoIntent.RAG_QUESTION:
return await _handle_rag_question(utterance, user, db)
elif intent == KakaoIntent.WEATHER:
return await _handle_weather(user, db)
elif intent == KakaoIntent.HELP:
return make_help_response()
else:
return make_help_response()
async def _handle_daily_report(utterance: str, user: User, db: DB) -> dict:
"""Parse utterance and generate/save daily report."""
# Get user's active project
project_result = await db.execute(
select(Project).where(Project.owner_id == user.id, Project.status == "active").limit(1)
)
project = project_result.scalar_one_or_none()
if not project:
return simple_text("현재 진행 중인 현장이 없습니다. CONAI 웹에서 현장을 등록해주세요.")
parsed = parse_daily_report_input(utterance)
if not parsed.get("work_items"):
return simple_text(
"작업 내용을 입력해주세요.\n\n"
"예시:\n"
"일보: 콘크리트 5명, 철근 3명\n"
"- 관로매설 50m 완료\n"
"- 되메우기 작업"
)
try:
work_content = await generate_work_content(
project_name=project.name,
report_date=parsed["report_date"],
weather_summary="맑음",
temperature_high=None,
temperature_low=None,
workers_count=parsed["workers_count"],
equipment_list=[],
work_items=parsed["work_items"],
issues=parsed.get("issues"),
)
except Exception as e:
work_content = "\n".join(parsed["work_items"])
from datetime import date
report = DailyReport(
project_id=project.id,
report_date=date.fromisoformat(parsed["report_date"]),
workers_count=parsed.get("workers_count"),
work_content=work_content,
issues=parsed.get("issues"),
input_source=InputSource.KAKAO,
raw_kakao_input=utterance,
ai_generated=True,
)
db.add(report)
await db.commit()
workers_text = ", ".join([f"{k} {v}" for k, v in (parsed.get("workers_count") or {}).items()])
return basic_card(
title=f"📋 {parsed['report_date']} 작업일보 생성완료",
description=f"현장: {project.name}\n투입인원: {workers_text or '미기입'}\n\n{work_content[:200]}...",
buttons=[{"action": "webLink", "label": "일보 확인/수정", "webLinkUrl": f"https://conai.app/projects/{project.id}/reports"}],
)
async def _handle_rag_question(utterance: str, user: User, db: DB) -> dict:
"""Handle RAG Q&A from Kakao."""
question = utterance.replace("질문:", "").replace("질문 ", "").strip()
if not question:
return simple_text("질문 내용을 입력해주세요.\n예: 질문: 굴착 5m 흙막이 기준은?")
try:
result = await rag_ask(db, question, top_k=3)
answer = result.get("answer", "답변을 생성할 수 없습니다")
# Truncate for Kakao (2000 char limit)
if len(answer) > 900:
answer = answer[:900] + "...\n\n[전체 답변은 CONAI 웹에서 확인하세요]"
return simple_text(f"📚 {question}\n\n{answer}\n\n⚠️ 이 답변은 참고용이며 법률 자문이 아닙니다.")
except Exception as e:
return simple_text("현재 Q&A 서비스를 이용할 수 없습니다. 잠시 후 다시 시도해주세요.")
async def _handle_weather(user: User, db: DB) -> dict:
"""Return weather summary for user's active project."""
project_result = await db.execute(
select(Project).where(Project.owner_id == user.id, Project.status == "active").limit(1)
)
project = project_result.scalar_one_or_none()
if not project:
return simple_text("진행 중인 현장이 없습니다.")
from app.models.weather import WeatherAlert
from datetime import date
alerts_result = await db.execute(
select(WeatherAlert)
.where(WeatherAlert.project_id == project.id, WeatherAlert.alert_date >= date.today(), WeatherAlert.is_acknowledged == False)
.limit(5)
)
alerts = alerts_result.scalars().all()
if not alerts:
return simple_text(f"🌤 {project.name}\n\n현재 날씨 경보가 없습니다.")
alert_text = "\n".join([f"⚠️ {a.alert_date}: {a.message}" for a in alerts])
return simple_text(f"🌦 {project.name} 날씨 경보\n\n{alert_text}")
+94
View File
@@ -0,0 +1,94 @@
import uuid
from fastapi import APIRouter, HTTPException, status
from sqlalchemy import select
from app.deps import CurrentUser, DB
from app.models.permit import PermitItem, PermitStatus
from app.models.project import Project
from pydantic import BaseModel
from datetime import date, datetime
class PermitCreate(BaseModel):
permit_type: str
authority: str | None = None
required: bool = True
deadline: date | None = None
notes: str | None = None
sort_order: int = 0
class PermitUpdate(BaseModel):
status: PermitStatus | None = None
submitted_date: date | None = None
approved_date: date | None = None
notes: str | None = None
deadline: date | None = None
class PermitResponse(BaseModel):
id: uuid.UUID
project_id: uuid.UUID
permit_type: str
authority: str | None
required: bool
deadline: date | None
status: PermitStatus
submitted_date: date | None
approved_date: date | None
notes: str | None
sort_order: int
created_at: datetime
model_config = {"from_attributes": True}
router = APIRouter(prefix="/projects/{project_id}/permits", tags=["인허가 체크리스트"])
async def _get_project_or_404(project_id: uuid.UUID, db: DB) -> Project:
result = await db.execute(select(Project).where(Project.id == project_id))
p = result.scalar_one_or_none()
if not p:
raise HTTPException(status_code=404, detail="프로젝트를 찾을 수 없습니다")
return p
@router.get("", response_model=list[PermitResponse])
async def list_permits(project_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(
select(PermitItem).where(PermitItem.project_id == project_id).order_by(PermitItem.sort_order)
)
return result.scalars().all()
@router.post("", response_model=PermitResponse, status_code=status.HTTP_201_CREATED)
async def create_permit(project_id: uuid.UUID, data: PermitCreate, db: DB, current_user: CurrentUser):
await _get_project_or_404(project_id, db)
permit = PermitItem(**data.model_dump(), project_id=project_id)
db.add(permit)
await db.commit()
await db.refresh(permit)
return permit
@router.put("/{permit_id}", response_model=PermitResponse)
async def update_permit(project_id: uuid.UUID, permit_id: uuid.UUID, data: PermitUpdate, db: DB, current_user: CurrentUser):
result = await db.execute(select(PermitItem).where(PermitItem.id == permit_id, PermitItem.project_id == project_id))
permit = result.scalar_one_or_none()
if not permit:
raise HTTPException(status_code=404, detail="인허가 항목을 찾을 수 없습니다")
for field, value in data.model_dump(exclude_none=True).items():
setattr(permit, field, value)
await db.commit()
await db.refresh(permit)
return permit
@router.delete("/{permit_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_permit(project_id: uuid.UUID, permit_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(select(PermitItem).where(PermitItem.id == permit_id, PermitItem.project_id == project_id))
permit = result.scalar_one_or_none()
if not permit:
raise HTTPException(status_code=404, detail="인허가 항목을 찾을 수 없습니다")
await db.delete(permit)
await db.commit()
+161
View File
@@ -0,0 +1,161 @@
import uuid
from fastapi import APIRouter, HTTPException, status
from sqlalchemy import select
from sqlalchemy.orm import selectinload
from app.deps import CurrentUser, DB
from app.models.project import Project, WBSItem
from app.schemas.project import ProjectCreate, ProjectUpdate, ProjectResponse, WBSItemCreate, WBSItemResponse
router = APIRouter(prefix="/projects", tags=["프로젝트"])
@router.get("", response_model=list[ProjectResponse])
async def list_projects(db: DB, current_user: CurrentUser):
result = await db.execute(select(Project).order_by(Project.created_at.desc()))
return result.scalars().all()
@router.post("", response_model=ProjectResponse, status_code=status.HTTP_201_CREATED)
async def create_project(data: ProjectCreate, db: DB, current_user: CurrentUser):
# Check for duplicate code
existing = await db.execute(select(Project).where(Project.code == data.code))
if existing.scalar_one_or_none():
raise HTTPException(status_code=400, detail=f"프로젝트 코드 '{data.code}'가 이미 존재합니다")
project = Project(**data.model_dump(), owner_id=current_user.id)
# Auto-compute KMA grid from lat/lng
if data.location_lat and data.location_lng:
grid_x, grid_y = _latlon_to_kma_grid(data.location_lat, data.location_lng)
project.weather_grid_x = grid_x
project.weather_grid_y = grid_y
db.add(project)
await db.commit()
await db.refresh(project)
return project
@router.get("/{project_id}", response_model=ProjectResponse)
async def get_project(project_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(select(Project).where(Project.id == project_id))
project = result.scalar_one_or_none()
if not project:
raise HTTPException(status_code=404, detail="프로젝트를 찾을 수 없습니다")
return project
@router.put("/{project_id}", response_model=ProjectResponse)
async def update_project(project_id: uuid.UUID, data: ProjectUpdate, db: DB, current_user: CurrentUser):
result = await db.execute(select(Project).where(Project.id == project_id))
project = result.scalar_one_or_none()
if not project:
raise HTTPException(status_code=404, detail="프로젝트를 찾을 수 없습니다")
for field, value in data.model_dump(exclude_none=True).items():
setattr(project, field, value)
# Recompute grid if location changed
if (data.location_lat or data.location_lng) and project.location_lat and project.location_lng:
grid_x, grid_y = _latlon_to_kma_grid(project.location_lat, project.location_lng)
project.weather_grid_x = grid_x
project.weather_grid_y = grid_y
await db.commit()
await db.refresh(project)
return project
@router.delete("/{project_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_project(project_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(select(Project).where(Project.id == project_id))
project = result.scalar_one_or_none()
if not project:
raise HTTPException(status_code=404, detail="프로젝트를 찾을 수 없습니다")
await db.delete(project)
await db.commit()
# WBS endpoints
@router.get("/{project_id}/wbs", response_model=list[WBSItemResponse])
async def get_wbs(project_id: uuid.UUID, db: DB, current_user: CurrentUser):
"""Return WBS tree (top-level items with nested children)."""
result = await db.execute(
select(WBSItem)
.where(WBSItem.project_id == project_id, WBSItem.parent_id == None)
.options(selectinload(WBSItem.children).selectinload(WBSItem.children))
.order_by(WBSItem.sort_order)
)
return result.scalars().all()
@router.post("/{project_id}/wbs", response_model=WBSItemResponse, status_code=status.HTTP_201_CREATED)
async def create_wbs_item(project_id: uuid.UUID, data: WBSItemCreate, db: DB, current_user: CurrentUser):
item = WBSItem(**data.model_dump(), project_id=project_id)
db.add(item)
await db.commit()
await db.refresh(item)
return item
@router.put("/{project_id}/wbs/{item_id}", response_model=WBSItemResponse)
async def update_wbs_item(project_id: uuid.UUID, item_id: uuid.UUID, data: WBSItemCreate, db: DB, current_user: CurrentUser):
result = await db.execute(select(WBSItem).where(WBSItem.id == item_id, WBSItem.project_id == project_id))
item = result.scalar_one_or_none()
if not item:
raise HTTPException(status_code=404, detail="WBS 항목을 찾을 수 없습니다")
for field, value in data.model_dump(exclude_none=True).items():
setattr(item, field, value)
await db.commit()
await db.refresh(item)
return item
@router.delete("/{project_id}/wbs/{item_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_wbs_item(project_id: uuid.UUID, item_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(select(WBSItem).where(WBSItem.id == item_id, WBSItem.project_id == project_id))
item = result.scalar_one_or_none()
if not item:
raise HTTPException(status_code=404, detail="WBS 항목을 찾을 수 없습니다")
await db.delete(item)
await db.commit()
def _latlon_to_kma_grid(lat: float, lng: float) -> tuple[int, int]:
"""Convert latitude/longitude to KMA forecast grid coordinates (Lambert Conformal Conic)."""
import math
RE = 6371.00877 # Earth radius (km)
GRID = 5.0 # Grid spacing (km)
SLAT1 = 30.0 # Standard latitude 1
SLAT2 = 60.0 # Standard latitude 2
OLON = 126.0 # Reference longitude
OLAT = 38.0 # Reference latitude
XO = 43 # Reference X
YO = 136 # Reference Y
DEGRAD = math.pi / 180.0
re = RE / GRID
slat1 = SLAT1 * DEGRAD
slat2 = SLAT2 * DEGRAD
olon = OLON * DEGRAD
olat = OLAT * DEGRAD
sn = math.tan(math.pi * 0.25 + slat2 * 0.5) / math.tan(math.pi * 0.25 + slat1 * 0.5)
sn = math.log(math.cos(slat1) / math.cos(slat2)) / math.log(sn)
sf = math.tan(math.pi * 0.25 + slat1 * 0.5)
sf = (sf ** sn) * math.cos(slat1) / sn
ro = math.tan(math.pi * 0.25 + olat * 0.5)
ro = re * sf / (ro ** sn)
ra = math.tan(math.pi * 0.25 + lat * DEGRAD * 0.5)
ra = re * sf / (ra ** sn)
theta = lng * DEGRAD - olon
if theta > math.pi:
theta -= 2.0 * math.pi
if theta < -math.pi:
theta += 2.0 * math.pi
theta *= sn
x = int(ra * math.sin(theta) + XO + 0.5)
y = int(ro - ra * math.cos(theta) + YO + 0.5)
return x, y
+73
View File
@@ -0,0 +1,73 @@
import uuid
from fastapi import APIRouter, HTTPException, status, UploadFile, File
from sqlalchemy import select, func
from app.deps import CurrentUser, DB
from app.models.rag import RagSource, RagChunk
from app.schemas.rag import RagAskRequest, RagAskResponse, RagSourceCreate, RagSourceResponse, RagSource as RagSourceSchema
from app.services.rag_service import ask
router = APIRouter(prefix="/rag", tags=["법규/시방서 Q&A (RAG)"])
@router.post("/ask", response_model=RagAskResponse)
async def ask_question(data: RagAskRequest, db: DB, current_user: CurrentUser):
"""Ask a question about construction laws and specifications."""
source_types = [st.value for st in data.source_types] if data.source_types else None
result = await ask(db, data.question, data.top_k, source_types)
sources = [
RagSourceSchema(
id=uuid.UUID(s["id"]),
title=s["title"],
source_type=s["source_type"],
chunk_content=s["content"][:500], # Truncate for response
relevance_score=s["relevance_score"],
)
for s in result.get("sources", [])
]
return RagAskResponse(
question=result["question"],
answer=result["answer"],
sources=sources,
)
@router.get("/sources", response_model=list[RagSourceResponse])
async def list_sources(db: DB, current_user: CurrentUser):
"""List all indexed RAG sources with chunk counts."""
result = await db.execute(
select(RagSource, func.count(RagChunk.id).label("chunk_count"))
.outerjoin(RagChunk, RagChunk.source_id == RagSource.id)
.group_by(RagSource.id)
.order_by(RagSource.created_at.desc())
)
rows = result.fetchall()
return [
RagSourceResponse(
id=row.RagSource.id,
title=row.RagSource.title,
source_type=row.RagSource.source_type,
source_url=row.RagSource.source_url,
chunk_count=row.chunk_count,
created_at=row.RagSource.created_at,
)
for row in rows
]
@router.post("/sources", response_model=RagSourceResponse, status_code=status.HTTP_201_CREATED)
async def create_source(data: RagSourceCreate, db: DB, current_user: CurrentUser):
"""Register a new RAG source (metadata only; content indexed separately)."""
source = RagSource(**data.model_dump())
db.add(source)
await db.commit()
await db.refresh(source)
return RagSourceResponse(
id=source.id,
title=source.title,
source_type=source.source_type,
source_url=source.source_url,
chunk_count=0,
created_at=source.created_at,
)
+144
View File
@@ -0,0 +1,144 @@
import uuid
from datetime import date
from fastapi import APIRouter, HTTPException, status
from sqlalchemy import select, func
from app.deps import CurrentUser, DB
from app.models.report import Report, ReportType
from app.models.daily_report import DailyReport
from app.models.weather import WeatherAlert
from app.models.project import Project
from app.schemas.report import ReportGenerateRequest, ReportResponse
from app.services.report_gen import generate_weekly_report, generate_monthly_report
router = APIRouter(prefix="/projects/{project_id}/reports", tags=["공정보고서"])
# Report schemas (inline for simplicity)
from pydantic import BaseModel
from app.models.report import ReportType, ReportStatus
class ReportGenerateRequest(BaseModel):
report_type: ReportType
period_start: date
period_end: date
class ReportResponse(BaseModel):
id: uuid.UUID
project_id: uuid.UUID
report_type: ReportType
period_start: date
period_end: date
ai_draft_text: str | None
status: ReportStatus
pdf_s3_key: str | None
model_config = {"from_attributes": True}
async def _get_project_or_404(project_id: uuid.UUID, db: DB) -> Project:
result = await db.execute(select(Project).where(Project.id == project_id))
p = result.scalar_one_or_none()
if not p:
raise HTTPException(status_code=404, detail="프로젝트를 찾을 수 없습니다")
return p
def _compute_overall_progress(tasks) -> float:
if not tasks:
return 0.0
total = sum(t.progress_pct for t in tasks)
return total / len(tasks)
@router.get("", response_model=list[ReportResponse])
async def list_reports(project_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(
select(Report)
.where(Report.project_id == project_id)
.order_by(Report.period_start.desc())
)
return result.scalars().all()
@router.post("/generate", response_model=ReportResponse, status_code=status.HTTP_201_CREATED)
async def generate_report(project_id: uuid.UUID, data: ReportGenerateRequest, db: DB, current_user: CurrentUser):
"""AI-generate weekly or monthly report draft."""
project = await _get_project_or_404(project_id, db)
# Get daily reports in period
daily_result = await db.execute(
select(DailyReport).where(
DailyReport.project_id == project_id,
DailyReport.report_date >= data.period_start,
DailyReport.report_date <= data.period_end,
).order_by(DailyReport.report_date)
)
daily_reports = daily_result.scalars().all()
# Get tasks for progress
from app.models.task import Task
tasks_result = await db.execute(select(Task).where(Task.project_id == project_id))
tasks = tasks_result.scalars().all()
overall_progress = _compute_overall_progress(tasks)
if data.report_type == ReportType.WEEKLY:
# Get weather alerts in period
alerts_result = await db.execute(
select(WeatherAlert).where(
WeatherAlert.project_id == project_id,
WeatherAlert.alert_date >= data.period_start,
WeatherAlert.alert_date <= data.period_end,
)
)
weather_alerts = alerts_result.scalars().all()
ai_text, content_json = await generate_weekly_report(
project_name=project.name,
period_start=str(data.period_start),
period_end=str(data.period_end),
daily_reports=daily_reports,
overall_progress_pct=overall_progress,
weather_alerts=weather_alerts,
)
else:
ai_text, content_json = await generate_monthly_report(
project_name=project.name,
period_start=str(data.period_start),
period_end=str(data.period_end),
daily_reports=daily_reports,
overall_progress_pct=overall_progress,
)
report = Report(
project_id=project_id,
report_type=data.report_type,
period_start=data.period_start,
period_end=data.period_end,
content_json=content_json,
ai_draft_text=ai_text,
)
db.add(report)
await db.commit()
await db.refresh(report)
return report
@router.get("/{report_id}", response_model=ReportResponse)
async def get_report(project_id: uuid.UUID, report_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(select(Report).where(Report.id == report_id, Report.project_id == project_id))
report = result.scalar_one_or_none()
if not report:
raise HTTPException(status_code=404, detail="보고서를 찾을 수 없습니다")
return report
@router.delete("/{report_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_report(project_id: uuid.UUID, report_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(select(Report).where(Report.id == report_id, Report.project_id == project_id))
report = result.scalar_one_or_none()
if not report:
raise HTTPException(status_code=404, detail="보고서를 찾을 수 없습니다")
await db.delete(report)
await db.commit()
+145
View File
@@ -0,0 +1,145 @@
import uuid
from datetime import datetime, timezone
from fastapi import APIRouter, HTTPException, status
from sqlalchemy import select
from app.deps import CurrentUser, DB
from app.models.settings import ClientProfile, AlertRule, WorkTypeLibrary
from app.schemas.settings import (
ClientProfileCreate, ClientProfileResponse,
WorkTypeCreate, WorkTypeResponse,
AlertRuleCreate, AlertRuleResponse,
SettingsExport,
)
router = APIRouter(prefix="/settings", tags=["커스텀 설정"])
# Client Profiles
@router.get("/client-profiles", response_model=list[ClientProfileResponse])
async def list_profiles(db: DB, current_user: CurrentUser):
result = await db.execute(select(ClientProfile).order_by(ClientProfile.name))
return result.scalars().all()
@router.post("/client-profiles", response_model=ClientProfileResponse, status_code=status.HTTP_201_CREATED)
async def create_profile(data: ClientProfileCreate, db: DB, current_user: CurrentUser):
profile = ClientProfile(**data.model_dump())
db.add(profile)
await db.commit()
await db.refresh(profile)
return profile
@router.put("/client-profiles/{profile_id}", response_model=ClientProfileResponse)
async def update_profile(profile_id: uuid.UUID, data: ClientProfileCreate, db: DB, current_user: CurrentUser):
result = await db.execute(select(ClientProfile).where(ClientProfile.id == profile_id))
profile = result.scalar_one_or_none()
if not profile:
raise HTTPException(status_code=404, detail="발주처 프로파일을 찾을 수 없습니다")
for field, value in data.model_dump(exclude_none=True).items():
setattr(profile, field, value)
await db.commit()
await db.refresh(profile)
return profile
@router.delete("/client-profiles/{profile_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_profile(profile_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(select(ClientProfile).where(ClientProfile.id == profile_id))
profile = result.scalar_one_or_none()
if not profile:
raise HTTPException(status_code=404, detail="발주처 프로파일을 찾을 수 없습니다")
await db.delete(profile)
await db.commit()
# Work Types
@router.get("/work-types", response_model=list[WorkTypeResponse])
async def list_work_types(db: DB, current_user: CurrentUser):
result = await db.execute(select(WorkTypeLibrary).order_by(WorkTypeLibrary.category, WorkTypeLibrary.name))
return result.scalars().all()
@router.post("/work-types", response_model=WorkTypeResponse, status_code=status.HTTP_201_CREATED)
async def create_work_type(data: WorkTypeCreate, db: DB, current_user: CurrentUser):
wt = WorkTypeLibrary(**data.model_dump(), is_system=False)
db.add(wt)
await db.commit()
await db.refresh(wt)
return wt
# Alert Rules
@router.get("/alert-rules", response_model=list[AlertRuleResponse])
async def list_alert_rules(db: DB, current_user: CurrentUser):
result = await db.execute(select(AlertRule).order_by(AlertRule.created_at.desc()))
return result.scalars().all()
@router.post("/alert-rules", response_model=AlertRuleResponse, status_code=status.HTTP_201_CREATED)
async def create_alert_rule(data: AlertRuleCreate, db: DB, current_user: CurrentUser):
rule = AlertRule(**data.model_dump())
db.add(rule)
await db.commit()
await db.refresh(rule)
return rule
@router.put("/alert-rules/{rule_id}", response_model=AlertRuleResponse)
async def update_alert_rule(rule_id: uuid.UUID, data: AlertRuleCreate, db: DB, current_user: CurrentUser):
result = await db.execute(select(AlertRule).where(AlertRule.id == rule_id))
rule = result.scalar_one_or_none()
if not rule:
raise HTTPException(status_code=404, detail="알림 규칙을 찾을 수 없습니다")
for field, value in data.model_dump(exclude_none=True).items():
setattr(rule, field, value)
await db.commit()
await db.refresh(rule)
return rule
# JSON Export / Import
@router.get("/export", response_model=SettingsExport)
async def export_settings(db: DB, current_user: CurrentUser):
profiles_result = await db.execute(select(ClientProfile))
work_types_result = await db.execute(select(WorkTypeLibrary))
rules_result = await db.execute(select(AlertRule))
return SettingsExport(
client_profiles=[ClientProfileResponse.model_validate(p) for p in profiles_result.scalars().all()],
work_types=[WorkTypeResponse.model_validate(wt) for wt in work_types_result.scalars().all()],
alert_rules=[AlertRuleResponse.model_validate(r) for r in rules_result.scalars().all()],
exported_at=datetime.now(timezone.utc),
)
@router.post("/import", status_code=status.HTTP_200_OK)
async def import_settings(data: SettingsExport, db: DB, current_user: CurrentUser):
"""Import settings from JSON. Does NOT overwrite existing records."""
imported = {"client_profiles": 0, "work_types": 0, "alert_rules": 0}
for profile in data.client_profiles:
existing = await db.execute(select(ClientProfile).where(ClientProfile.name == profile.name))
if not existing.scalar_one_or_none():
db.add(ClientProfile(
name=profile.name,
report_frequency=profile.report_frequency,
template_config=profile.template_config,
contact_info=profile.contact_info,
is_default=profile.is_default,
))
imported["client_profiles"] += 1
for wt in data.work_types:
existing = await db.execute(select(WorkTypeLibrary).where(WorkTypeLibrary.code == wt.code))
if not existing.scalar_one_or_none():
db.add(WorkTypeLibrary(
code=wt.code, name=wt.name, category=wt.category,
weather_constraints=wt.weather_constraints,
default_checklist=wt.default_checklist,
is_system=False,
))
imported["work_types"] += 1
await db.commit()
return {"message": "설정을 가져왔습니다", "imported": imported}
+125
View File
@@ -0,0 +1,125 @@
import uuid
from fastapi import APIRouter, HTTPException, status
from sqlalchemy import select
from sqlalchemy.orm import selectinload
from app.deps import CurrentUser, DB
from app.models.task import Task, TaskDependency
from app.models.project import Project
from app.schemas.task import TaskCreate, TaskUpdate, TaskResponse, TaskDependencyCreate, GanttData
from app.services.gantt import compute_cpm
router = APIRouter(prefix="/projects/{project_id}/tasks", tags=["공정관리 (Gantt)"])
async def _get_project_or_404(project_id: uuid.UUID, db: DB):
result = await db.execute(select(Project).where(Project.id == project_id))
project = result.scalar_one_or_none()
if not project:
raise HTTPException(status_code=404, detail="프로젝트를 찾을 수 없습니다")
return project
@router.get("", response_model=list[TaskResponse])
async def list_tasks(project_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(select(Task).where(Task.project_id == project_id).order_by(Task.sort_order))
return result.scalars().all()
@router.post("", response_model=TaskResponse, status_code=status.HTTP_201_CREATED)
async def create_task(project_id: uuid.UUID, data: TaskCreate, db: DB, current_user: CurrentUser):
await _get_project_or_404(project_id, db)
task = Task(**data.model_dump(), project_id=project_id)
db.add(task)
await db.commit()
await db.refresh(task)
return task
@router.get("/gantt", response_model=GanttData)
async def get_gantt(project_id: uuid.UUID, db: DB, current_user: CurrentUser):
"""Returns tasks with CPM computed values."""
tasks_result = await db.execute(select(Task).where(Task.project_id == project_id).order_by(Task.sort_order))
tasks = tasks_result.scalars().all()
deps_result = await db.execute(
select(TaskDependency).where(
TaskDependency.predecessor_id.in_([t.id for t in tasks])
)
)
deps = deps_result.scalars().all()
# Run CPM
cpm_result = compute_cpm(tasks, deps)
if cpm_result and isinstance(cpm_result, tuple):
cpm_data, project_duration = cpm_result
else:
cpm_data, project_duration = {}, None
# Update tasks with CPM results
critical_ids = []
for task in tasks:
if task.id in cpm_data:
data = cpm_data[task.id]
task.early_start = data["early_start"]
task.early_finish = data["early_finish"]
task.late_start = data["late_start"]
task.late_finish = data["late_finish"]
task.total_float = data["total_float"]
task.is_critical = data["is_critical"]
if data["is_critical"]:
critical_ids.append(task.id)
await db.commit()
return GanttData(
tasks=[TaskResponse.model_validate(t) for t in tasks],
critical_path=critical_ids,
project_duration_days=project_duration,
)
@router.put("/{task_id}", response_model=TaskResponse)
async def update_task(project_id: uuid.UUID, task_id: uuid.UUID, data: TaskUpdate, db: DB, current_user: CurrentUser):
result = await db.execute(select(Task).where(Task.id == task_id, Task.project_id == project_id))
task = result.scalar_one_or_none()
if not task:
raise HTTPException(status_code=404, detail="태스크를 찾을 수 없습니다")
for field, value in data.model_dump(exclude_none=True).items():
setattr(task, field, value)
await db.commit()
await db.refresh(task)
return task
@router.delete("/{task_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_task(project_id: uuid.UUID, task_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(select(Task).where(Task.id == task_id, Task.project_id == project_id))
task = result.scalar_one_or_none()
if not task:
raise HTTPException(status_code=404, detail="태스크를 찾을 수 없습니다")
await db.delete(task)
await db.commit()
@router.post("/{task_id}/dependencies", response_model=dict, status_code=status.HTTP_201_CREATED)
async def add_dependency(project_id: uuid.UUID, task_id: uuid.UUID, data: TaskDependencyCreate, db: DB, current_user: CurrentUser):
dep = TaskDependency(
predecessor_id=data.predecessor_id,
successor_id=data.successor_id,
dependency_type=data.dependency_type,
lag_days=data.lag_days,
)
db.add(dep)
await db.commit()
return {"message": "의존관계가 추가되었습니다"}
@router.delete("/{task_id}/dependencies/{dep_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_dependency(project_id: uuid.UUID, task_id: uuid.UUID, dep_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(select(TaskDependency).where(TaskDependency.id == dep_id))
dep = result.scalar_one_or_none()
if not dep:
raise HTTPException(status_code=404, detail="의존관계를 찾을 수 없습니다")
await db.delete(dep)
await db.commit()
+136
View File
@@ -0,0 +1,136 @@
import uuid
from datetime import datetime, timezone
from fastapi import APIRouter, HTTPException
from sqlalchemy import select
from app.deps import CurrentUser, DB
from app.models.project import Project
from app.models.weather import WeatherData, WeatherAlert, ForecastType
from app.models.task import Task
from app.schemas.weather import WeatherDataResponse, WeatherAlertResponse, WeatherForecastSummary
from app.services.weather_service import fetch_short_term_forecast, evaluate_weather_alerts
router = APIRouter(prefix="/projects/{project_id}/weather", tags=["날씨 연동"])
async def _get_project_or_404(project_id: uuid.UUID, db: DB) -> Project:
result = await db.execute(select(Project).where(Project.id == project_id))
p = result.scalar_one_or_none()
if not p:
raise HTTPException(status_code=404, detail="프로젝트를 찾을 수 없습니다")
return p
@router.get("", response_model=WeatherForecastSummary)
async def get_weather(project_id: uuid.UUID, db: DB, current_user: CurrentUser):
"""Get weather forecast and active alerts for a project."""
from datetime import date
today = date.today()
forecast_result = await db.execute(
select(WeatherData)
.where(WeatherData.project_id == project_id, WeatherData.forecast_date >= today)
.order_by(WeatherData.forecast_date)
)
forecast = forecast_result.scalars().all()
alerts_result = await db.execute(
select(WeatherAlert)
.where(WeatherAlert.project_id == project_id, WeatherAlert.alert_date >= today, WeatherAlert.is_acknowledged == False)
.order_by(WeatherAlert.alert_date)
)
alerts = alerts_result.scalars().all()
return WeatherForecastSummary(
forecast=[WeatherDataResponse.model_validate(f) for f in forecast],
active_alerts=[WeatherAlertResponse.model_validate(a) for a in alerts],
)
@router.post("/refresh")
async def refresh_weather(project_id: uuid.UUID, db: DB, current_user: CurrentUser):
"""Fetch fresh weather data from KMA and evaluate alerts."""
project = await _get_project_or_404(project_id, db)
if not project.weather_grid_x or not project.weather_grid_y:
raise HTTPException(status_code=400, detail="프로젝트에 위치 정보(위경도)가 설정되지 않았습니다")
forecasts = await fetch_short_term_forecast(project.weather_grid_x, project.weather_grid_y)
# Save/update weather data
for fc in forecasts:
from datetime import date
fc_date = date.fromisoformat(fc["date"])
existing = await db.execute(
select(WeatherData).where(
WeatherData.project_id == project_id,
WeatherData.forecast_date == fc_date,
WeatherData.forecast_type == ForecastType.SHORT_TERM,
)
)
wd = existing.scalar_one_or_none()
if not wd:
wd = WeatherData(project_id=project_id, forecast_type=ForecastType.SHORT_TERM)
db.add(wd)
wd.forecast_date = fc_date
wd.temperature_high = fc.get("temperature_high")
wd.temperature_low = fc.get("temperature_low")
wd.precipitation_mm = fc.get("precipitation_mm")
wd.wind_speed_ms = fc.get("wind_speed_ms")
wd.weather_code = fc.get("weather_code")
wd.raw_data = fc
wd.fetched_at = datetime.now(timezone.utc)
# Get tasks in upcoming forecast period
from datetime import timedelta
start_date = date.today()
end_date = start_date + timedelta(days=len(forecasts))
tasks_result = await db.execute(
select(Task).where(
Task.project_id == project_id,
Task.planned_start >= start_date,
Task.planned_start <= end_date,
)
)
upcoming_tasks = tasks_result.scalars().all()
# Evaluate and save alerts
for fc in forecasts:
from datetime import date as date_type
fc_date_obj = date_type.fromisoformat(fc["date"])
tasks_on_date = [t for t in upcoming_tasks if t.planned_start and t.planned_start <= fc_date_obj <= (t.planned_end or fc_date_obj)]
new_alerts = evaluate_weather_alerts(fc, tasks_on_date)
for alert_data in new_alerts:
existing_alert = await db.execute(
select(WeatherAlert).where(
WeatherAlert.project_id == project_id,
WeatherAlert.alert_date == fc_date_obj,
WeatherAlert.alert_type == alert_data["alert_type"],
)
)
if not existing_alert.scalar_one_or_none():
alert = WeatherAlert(
project_id=project_id,
task_id=uuid.UUID(alert_data["task_id"]) if alert_data.get("task_id") else None,
alert_date=fc_date_obj,
alert_type=alert_data["alert_type"],
severity=alert_data["severity"],
message=alert_data["message"],
)
db.add(alert)
await db.commit()
return {"message": f"날씨 정보가 업데이트되었습니다 ({len(forecasts)}일치)"}
@router.put("/alerts/{alert_id}/acknowledge")
async def acknowledge_alert(project_id: uuid.UUID, alert_id: uuid.UUID, db: DB, current_user: CurrentUser):
result = await db.execute(select(WeatherAlert).where(WeatherAlert.id == alert_id, WeatherAlert.project_id == project_id))
alert = result.scalar_one_or_none()
if not alert:
raise HTTPException(status_code=404, detail="경보를 찾을 수 없습니다")
alert.is_acknowledged = True
await db.commit()
return {"message": "경보가 확인 처리되었습니다"}
+76
View File
@@ -0,0 +1,76 @@
import secrets
import warnings
from pydantic_settings import BaseSettings, SettingsConfigDict
from typing import Optional
class Settings(BaseSettings):
model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8", extra="ignore")
# App
APP_NAME: str = "CONAI"
APP_VERSION: str = "1.0.0"
DEBUG: bool = False
SECRET_KEY: str = "" # 반드시 .env에서 설정 (예: python -c "import secrets; print(secrets.token_hex(32))")
ALGORITHM: str = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 * 24 # 24 hours
REFRESH_TOKEN_EXPIRE_DAYS: int = 30
# Database
DATABASE_URL: str = "postgresql+asyncpg://postgres:password@localhost:5432/conai"
DATABASE_URL_SYNC: str = "postgresql://postgres:password@localhost:5432/conai"
# Supabase
SUPABASE_URL: str = ""
SUPABASE_ANON_KEY: str = ""
SUPABASE_SERVICE_KEY: str = ""
SUPABASE_STORAGE_BUCKET: str = "conai-documents"
# Claude AI
ANTHROPIC_API_KEY: str = ""
CLAUDE_MODEL: str = "claude-sonnet-4-5"
CLAUDE_MAX_TOKENS: int = 4096
# Weather API (기상청)
KMA_API_KEY: str = ""
KMA_BASE_URL: str = "http://apis.data.go.kr/1360000/VilageFcstInfoService_2.0"
# Kakao
KAKAO_APP_KEY: str = ""
KAKAO_ADMIN_KEY: str = ""
KAKAO_CHANNEL_ID: str = ""
# S3 / Storage
AWS_ACCESS_KEY_ID: Optional[str] = None
AWS_SECRET_ACCESS_KEY: Optional[str] = None
AWS_REGION: str = "ap-northeast-2"
S3_BUCKET: str = "conai-files"
# CORS
CORS_ORIGINS: list[str] = ["http://localhost:3000", "https://conai.app"]
# Embedding (Voyage AI or OpenAI)
VOYAGE_API_KEY: Optional[str] = None
OPENAI_API_KEY: Optional[str] = None
EMBEDDING_MODEL: str = "voyage-3"
EMBEDDING_DIMENSIONS: int = 1024
settings = Settings()
# 운영 환경에서 기본 SECRET_KEY 사용 방지
if not settings.SECRET_KEY:
if not settings.DEBUG:
raise ValueError(
"SECRET_KEY가 설정되지 않았습니다.\n"
".env 파일에 SECRET_KEY를 추가하세요:\n"
" python -c \"import secrets; print(secrets.token_hex(32))\"\n"
"위 명령으로 생성한 값을 SECRET_KEY=값 형태로 .env에 입력하세요."
)
else:
# 개발 환경에서만 임시 키 허용 (경고 표시)
settings.SECRET_KEY = secrets.token_hex(32)
warnings.warn(
"개발 모드: SECRET_KEY가 자동 생성되었습니다. 운영 환경에서는 .env에 고정 값을 설정하세요.",
stacklevel=2,
)
View File
+32
View File
@@ -0,0 +1,32 @@
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
from sqlalchemy.orm import DeclarativeBase
from app.config import settings
engine = create_async_engine(
settings.DATABASE_URL,
echo=settings.DEBUG,
pool_pre_ping=True,
pool_size=10,
max_overflow=20,
)
AsyncSessionLocal = async_sessionmaker(
engine,
class_=AsyncSession,
expire_on_commit=False,
autocommit=False,
autoflush=False,
)
class Base(DeclarativeBase):
pass
async def get_db() -> AsyncSession:
async with AsyncSessionLocal() as session:
try:
yield session
finally:
await session.close()
+38
View File
@@ -0,0 +1,38 @@
from datetime import datetime, timedelta, timezone
from typing import Optional, Any
from jose import JWTError, jwt
from passlib.context import CryptContext
from app.config import settings
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
def verify_password(plain_password: str, hashed_password: str) -> bool:
return pwd_context.verify(plain_password, hashed_password)
def get_password_hash(password: str) -> str:
return pwd_context.hash(password)
def create_access_token(subject: Any, expires_delta: Optional[timedelta] = None) -> str:
if expires_delta:
expire = datetime.now(timezone.utc) + expires_delta
else:
expire = datetime.now(timezone.utc) + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES)
to_encode = {"exp": expire, "sub": str(subject), "type": "access"}
return jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM)
def create_refresh_token(subject: Any) -> str:
expire = datetime.now(timezone.utc) + timedelta(days=settings.REFRESH_TOKEN_EXPIRE_DAYS)
to_encode = {"exp": expire, "sub": str(subject), "type": "refresh"}
return jwt.encode(to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM)
def decode_token(token: str) -> dict:
try:
payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM])
return payload
except JWTError:
return {}
+44
View File
@@ -0,0 +1,44 @@
import uuid
from pathlib import Path
from typing import Optional
from supabase import create_client, Client
from app.config import settings
def get_supabase() -> Client:
return create_client(settings.SUPABASE_URL, settings.SUPABASE_SERVICE_KEY)
def upload_file(
file_bytes: bytes,
project_id: str,
file_type: str,
filename: str,
content_type: str = "application/octet-stream",
) -> str:
"""Upload file to Supabase Storage. Returns storage path (s3_key)."""
client = get_supabase()
ext = Path(filename).suffix
unique_name = f"{uuid.uuid4()}{ext}"
path = f"{project_id}/{file_type}/{unique_name}"
client.storage.from_(settings.SUPABASE_STORAGE_BUCKET).upload(
path,
file_bytes,
file_options={"content-type": content_type},
)
return path
def get_download_url(s3_key: str, expires_in: int = 3600) -> str:
"""Get a presigned download URL."""
client = get_supabase()
response = client.storage.from_(settings.SUPABASE_STORAGE_BUCKET).create_signed_url(
s3_key, expires_in
)
return response["signedURL"]
def delete_file(s3_key: str) -> None:
client = get_supabase()
client.storage.from_(settings.SUPABASE_STORAGE_BUCKET).remove([s3_key])
+56
View File
@@ -0,0 +1,56 @@
from typing import Annotated
from fastapi import Depends, HTTPException, status
from fastapi.security import OAuth2PasswordBearer
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from app.core.database import get_db
from app.core.security import decode_token
from app.models.user import User
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/v1/auth/login")
async def get_current_user(
token: Annotated[str, Depends(oauth2_scheme)],
db: Annotated[AsyncSession, Depends(get_db)],
) -> User:
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="인증 정보가 유효하지 않습니다",
headers={"WWW-Authenticate": "Bearer"},
)
payload = decode_token(token)
if not payload or payload.get("type") != "access":
raise credentials_exception
user_id: str = payload.get("sub")
if not user_id:
raise credentials_exception
result = await db.execute(select(User).where(User.id == user_id, User.is_active == True))
user = result.scalar_one_or_none()
if not user:
raise credentials_exception
return user
async def get_current_active_user(
current_user: Annotated[User, Depends(get_current_user)],
) -> User:
if not current_user.is_active:
raise HTTPException(status_code=400, detail="비활성 계정입니다")
return current_user
async def require_admin(
current_user: Annotated[User, Depends(get_current_active_user)],
) -> User:
from app.models.user import UserRole
if current_user.role != UserRole.ADMIN:
raise HTTPException(status_code=403, detail="관리자 권한이 필요합니다")
return current_user
# Convenience type aliases
CurrentUser = Annotated[User, Depends(get_current_active_user)]
DB = Annotated[AsyncSession, Depends(get_db)]
+53
View File
@@ -0,0 +1,53 @@
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from contextlib import asynccontextmanager
from app.config import settings
from app.api import auth, projects, tasks, daily_reports, reports, inspections, weather, rag, kakao, permits, settings as settings_router
@asynccontextmanager
async def lifespan(app: FastAPI):
# Startup: seed default work types, check DB connection
yield
# Shutdown: cleanup resources
def create_app() -> FastAPI:
app = FastAPI(
title="CONAI API",
description="소형 건설업체를 위한 AI 기반 토목공사 통합관리 플랫폼",
version=settings.APP_VERSION,
lifespan=lifespan,
)
# CORS
app.add_middleware(
CORSMiddleware,
allow_origins=settings.CORS_ORIGINS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# API routers
api_prefix = "/api/v1"
app.include_router(auth.router, prefix=api_prefix)
app.include_router(projects.router, prefix=api_prefix)
app.include_router(tasks.router, prefix=api_prefix)
app.include_router(daily_reports.router, prefix=api_prefix)
app.include_router(reports.router, prefix=api_prefix)
app.include_router(inspections.router, prefix=api_prefix)
app.include_router(weather.router, prefix=api_prefix)
app.include_router(rag.router, prefix=api_prefix)
app.include_router(kakao.router, prefix=api_prefix)
app.include_router(permits.router, prefix=api_prefix)
app.include_router(settings_router.router, prefix=api_prefix)
@app.get("/health")
async def health():
return {"status": "ok", "version": settings.APP_VERSION}
return app
app = create_app()
+25
View File
@@ -0,0 +1,25 @@
from .user import User
from .project import Project, WBSItem
from .task import Task, TaskDependency
from .daily_report import DailyReport, DailyReportPhoto
from .report import Report
from .inspection import InspectionRequest
from .quality import QualityTest
from .weather import WeatherData, WeatherAlert
from .permit import PermitItem
from .rag import RagSource, RagChunk
from .settings import ClientProfile, AlertRule, WorkTypeLibrary
__all__ = [
"User",
"Project", "WBSItem",
"Task", "TaskDependency",
"DailyReport", "DailyReportPhoto",
"Report",
"InspectionRequest",
"QualityTest",
"WeatherData", "WeatherAlert",
"PermitItem",
"RagSource", "RagChunk",
"ClientProfile", "AlertRule", "WorkTypeLibrary",
]
+28
View File
@@ -0,0 +1,28 @@
import uuid
from datetime import datetime, timezone
from sqlalchemy import DateTime, func
from sqlalchemy.orm import Mapped, mapped_column
from sqlalchemy.dialects.postgresql import UUID
from app.core.database import Base
class TimestampMixin:
created_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
nullable=False,
)
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
onupdate=func.now(),
nullable=False,
)
class UUIDMixin:
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True),
primary_key=True,
default=uuid.uuid4,
)
+64
View File
@@ -0,0 +1,64 @@
import uuid
from sqlalchemy import String, Integer, Boolean, Date, Text, ForeignKey, Enum as SAEnum
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.dialects.postgresql import UUID, JSONB
from app.core.database import Base
from app.models.base import TimestampMixin, UUIDMixin
from datetime import datetime
import enum
class ReportStatus(str, enum.Enum):
DRAFT = "draft"
CONFIRMED = "confirmed"
SUBMITTED = "submitted"
class InputSource(str, enum.Enum):
KAKAO = "kakao"
WEB = "web"
API = "api"
class DailyReport(Base, UUIDMixin, TimestampMixin):
__tablename__ = "daily_reports"
project_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("projects.id"), nullable=False)
report_date: Mapped[str] = mapped_column(Date, nullable=False, index=True)
weather_summary: Mapped[str | None] = mapped_column(String(100), nullable=True)
temperature_high: Mapped[float | None] = mapped_column(nullable=True)
temperature_low: Mapped[float | None] = mapped_column(nullable=True)
workers_count: Mapped[dict | None] = mapped_column(JSONB, nullable=True) # {"concrete": 5, ...}
equipment_list: Mapped[list | None] = mapped_column(JSONB, nullable=True) # [{"type": "backhoe", ...}]
work_content: Mapped[str | None] = mapped_column(Text, nullable=True)
issues: Mapped[str | None] = mapped_column(Text, nullable=True)
input_source: Mapped[InputSource] = mapped_column(
SAEnum(InputSource, name="input_source"), default=InputSource.WEB, nullable=False
)
raw_kakao_input: Mapped[str | None] = mapped_column(Text, nullable=True)
ai_generated: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
status: Mapped[ReportStatus] = mapped_column(
SAEnum(ReportStatus, name="daily_report_status"), default=ReportStatus.DRAFT, nullable=False
)
confirmed_by: Mapped[uuid.UUID | None] = mapped_column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=True)
confirmed_at: Mapped[datetime | None] = mapped_column(nullable=True)
pdf_s3_key: Mapped[str | None] = mapped_column(String(500), nullable=True)
# relationships
project: Mapped["Project"] = relationship("Project", back_populates="daily_reports")
photos: Mapped[list["DailyReportPhoto"]] = relationship(
"DailyReportPhoto", back_populates="daily_report", cascade="all, delete-orphan"
)
confirmed_user: Mapped["User | None"] = relationship("User", foreign_keys=[confirmed_by])
class DailyReportPhoto(Base, UUIDMixin, TimestampMixin):
__tablename__ = "daily_report_photos"
daily_report_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("daily_reports.id"), nullable=False)
s3_key: Mapped[str] = mapped_column(String(500), nullable=False)
caption: Mapped[str | None] = mapped_column(String(200), nullable=True)
sort_order: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
# relationships
daily_report: Mapped["DailyReport"] = relationship("DailyReport", back_populates="photos")
+44
View File
@@ -0,0 +1,44 @@
import uuid
from sqlalchemy import String, Boolean, Date, Text, ForeignKey, Enum as SAEnum
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.dialects.postgresql import UUID, JSONB
from app.core.database import Base
from app.models.base import TimestampMixin, UUIDMixin
import enum
class InspectionResult(str, enum.Enum):
PASS = "pass"
FAIL = "fail"
CONDITIONAL_PASS = "conditional_pass"
class InspectionStatus(str, enum.Enum):
DRAFT = "draft"
SENT = "sent"
COMPLETED = "completed"
class InspectionRequest(Base, UUIDMixin, TimestampMixin):
__tablename__ = "inspection_requests"
project_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("projects.id"), nullable=False)
wbs_item_id: Mapped[uuid.UUID | None] = mapped_column(UUID(as_uuid=True), ForeignKey("wbs_items.id"), nullable=True)
inspection_type: Mapped[str] = mapped_column(String(50), nullable=False) # rebar, formwork, pipe_burial, etc.
requested_date: Mapped[str] = mapped_column(Date, nullable=False)
location_detail: Mapped[str | None] = mapped_column(String(200), nullable=True)
checklist_items: Mapped[list | None] = mapped_column(JSONB, nullable=True)
result: Mapped[InspectionResult | None] = mapped_column(
SAEnum(InspectionResult, name="inspection_result"), nullable=True
)
inspector_name: Mapped[str | None] = mapped_column(String(100), nullable=True)
notes: Mapped[str | None] = mapped_column(Text, nullable=True)
ai_generated: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
status: Mapped[InspectionStatus] = mapped_column(
SAEnum(InspectionStatus, name="inspection_status"), default=InspectionStatus.DRAFT, nullable=False
)
pdf_s3_key: Mapped[str | None] = mapped_column(String(500), nullable=True)
# relationships
project: Mapped["Project"] = relationship("Project", back_populates="inspection_requests")
wbs_item: Mapped["WBSItem | None"] = relationship("WBSItem")
+36
View File
@@ -0,0 +1,36 @@
import uuid
from sqlalchemy import String, Boolean, Date, Text, Integer, ForeignKey, Enum as SAEnum
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.dialects.postgresql import UUID
from app.core.database import Base
from app.models.base import TimestampMixin, UUIDMixin
import enum
class PermitStatus(str, enum.Enum):
NOT_STARTED = "not_started"
SUBMITTED = "submitted"
IN_REVIEW = "in_review"
APPROVED = "approved"
REJECTED = "rejected"
class PermitItem(Base, UUIDMixin, TimestampMixin):
__tablename__ = "permit_items"
project_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("projects.id"), nullable=False)
permit_type: Mapped[str] = mapped_column(String(100), nullable=False) # 도로점용허가, 하천점용허가, etc.
authority: Mapped[str | None] = mapped_column(String(100), nullable=True)
required: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
deadline: Mapped[str | None] = mapped_column(Date, nullable=True)
status: Mapped[PermitStatus] = mapped_column(
SAEnum(PermitStatus, name="permit_status"), default=PermitStatus.NOT_STARTED, nullable=False
)
submitted_date: Mapped[str | None] = mapped_column(Date, nullable=True)
approved_date: Mapped[str | None] = mapped_column(Date, nullable=True)
document_s3_key: Mapped[str | None] = mapped_column(String(500), nullable=True)
notes: Mapped[str | None] = mapped_column(Text, nullable=True)
sort_order: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
# relationships
project: Mapped["Project"] = relationship("Project", back_populates="permit_items")
+79
View File
@@ -0,0 +1,79 @@
import uuid
from sqlalchemy import String, Integer, BigInteger, Date, Float, ForeignKey, Enum as SAEnum, Text
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.dialects.postgresql import UUID
from app.core.database import Base
from app.models.base import TimestampMixin, UUIDMixin
import enum
class ProjectStatus(str, enum.Enum):
PLANNING = "planning"
ACTIVE = "active"
SUSPENDED = "suspended"
COMPLETED = "completed"
class ConstructionType(str, enum.Enum):
ROAD = "road"
SEWER = "sewer"
WATER = "water"
BRIDGE = "bridge"
SITE_WORK = "site_work"
OTHER = "other"
class Project(Base, UUIDMixin, TimestampMixin):
__tablename__ = "projects"
name: Mapped[str] = mapped_column(String(200), nullable=False)
code: Mapped[str] = mapped_column(String(50), unique=True, nullable=False, index=True)
client_profile_id: Mapped[uuid.UUID | None] = mapped_column(UUID(as_uuid=True), ForeignKey("client_profiles.id"), nullable=True)
construction_type: Mapped[ConstructionType] = mapped_column(
SAEnum(ConstructionType, name="construction_type"), default=ConstructionType.OTHER, nullable=False
)
contract_amount: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
start_date: Mapped[str | None] = mapped_column(Date, nullable=True)
end_date: Mapped[str | None] = mapped_column(Date, nullable=True)
location_address: Mapped[str | None] = mapped_column(Text, nullable=True)
location_lat: Mapped[float | None] = mapped_column(Float, nullable=True)
location_lng: Mapped[float | None] = mapped_column(Float, nullable=True)
weather_grid_x: Mapped[int | None] = mapped_column(Integer, nullable=True)
weather_grid_y: Mapped[int | None] = mapped_column(Integer, nullable=True)
status: Mapped[ProjectStatus] = mapped_column(
SAEnum(ProjectStatus, name="project_status"), default=ProjectStatus.PLANNING, nullable=False
)
owner_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
# relationships
owner: Mapped["User"] = relationship("User", back_populates="owned_projects", foreign_keys=[owner_id])
wbs_items: Mapped[list["WBSItem"]] = relationship("WBSItem", back_populates="project", cascade="all, delete-orphan")
tasks: Mapped[list["Task"]] = relationship("Task", back_populates="project", cascade="all, delete-orphan")
daily_reports: Mapped[list["DailyReport"]] = relationship("DailyReport", back_populates="project", cascade="all, delete-orphan")
inspection_requests: Mapped[list["InspectionRequest"]] = relationship("InspectionRequest", back_populates="project", cascade="all, delete-orphan")
quality_tests: Mapped[list["QualityTest"]] = relationship("QualityTest", back_populates="project", cascade="all, delete-orphan")
weather_data: Mapped[list["WeatherData"]] = relationship("WeatherData", back_populates="project", cascade="all, delete-orphan")
weather_alerts: Mapped[list["WeatherAlert"]] = relationship("WeatherAlert", back_populates="project", cascade="all, delete-orphan")
permit_items: Mapped[list["PermitItem"]] = relationship("PermitItem", back_populates="project", cascade="all, delete-orphan")
reports: Mapped[list["Report"]] = relationship("Report", back_populates="project", cascade="all, delete-orphan")
client_profile: Mapped["ClientProfile | None"] = relationship("ClientProfile", back_populates="projects")
class WBSItem(Base, UUIDMixin, TimestampMixin):
__tablename__ = "wbs_items"
project_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("projects.id"), nullable=False)
parent_id: Mapped[uuid.UUID | None] = mapped_column(UUID(as_uuid=True), ForeignKey("wbs_items.id"), nullable=True)
code: Mapped[str] = mapped_column(String(50), nullable=False)
name: Mapped[str] = mapped_column(String(200), nullable=False)
level: Mapped[int] = mapped_column(Integer, default=1, nullable=False)
unit: Mapped[str | None] = mapped_column(String(20), nullable=True)
design_qty: Mapped[float | None] = mapped_column(Float, nullable=True)
unit_price: Mapped[float | None] = mapped_column(Float, nullable=True)
sort_order: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
# relationships
project: Mapped["Project"] = relationship("Project", back_populates="wbs_items")
parent: Mapped["WBSItem | None"] = relationship("WBSItem", remote_side="WBSItem.id", back_populates="children")
children: Mapped[list["WBSItem"]] = relationship("WBSItem", back_populates="parent")
tasks: Mapped[list["Task"]] = relationship("Task", back_populates="wbs_item")
+35
View File
@@ -0,0 +1,35 @@
import uuid
from sqlalchemy import String, Date, Text, Float, ForeignKey, Enum as SAEnum
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.dialects.postgresql import UUID
from app.core.database import Base
from app.models.base import TimestampMixin, UUIDMixin
import enum
class QualityResult(str, enum.Enum):
PASS = "pass"
FAIL = "fail"
class QualityTest(Base, UUIDMixin, TimestampMixin):
__tablename__ = "quality_tests"
project_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("projects.id"), nullable=False)
wbs_item_id: Mapped[uuid.UUID | None] = mapped_column(UUID(as_uuid=True), ForeignKey("wbs_items.id"), nullable=True)
test_type: Mapped[str] = mapped_column(String(50), nullable=False) # compression_strength, slump, compaction, etc.
test_date: Mapped[str] = mapped_column(Date, nullable=False)
location_detail: Mapped[str | None] = mapped_column(String(200), nullable=True)
design_value: Mapped[float | None] = mapped_column(Float, nullable=True)
measured_value: Mapped[float] = mapped_column(Float, nullable=False)
unit: Mapped[str] = mapped_column(String(20), nullable=False)
result: Mapped[QualityResult] = mapped_column(
SAEnum(QualityResult, name="quality_result"), nullable=False
)
lab_name: Mapped[str | None] = mapped_column(String(100), nullable=True)
report_number: Mapped[str | None] = mapped_column(String(100), nullable=True)
notes: Mapped[str | None] = mapped_column(Text, nullable=True)
# relationships
project: Mapped["Project"] = relationship("Project", back_populates="quality_tests")
wbs_item: Mapped["WBSItem | None"] = relationship("WBSItem")
+41
View File
@@ -0,0 +1,41 @@
import uuid
from sqlalchemy import String, Integer, Text, ForeignKey, Enum as SAEnum
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.dialects.postgresql import UUID, JSONB
from app.core.database import Base
from app.models.base import TimestampMixin, UUIDMixin
import enum
class RagSourceType(str, enum.Enum):
KCS = "kcs"
LAW = "law"
REGULATION = "regulation"
GUIDELINE = "guideline"
class RagSource(Base, UUIDMixin, TimestampMixin):
__tablename__ = "rag_sources"
title: Mapped[str] = mapped_column(String(300), nullable=False)
source_type: Mapped[RagSourceType] = mapped_column(
SAEnum(RagSourceType, name="rag_source_type"), nullable=False
)
source_url: Mapped[str | None] = mapped_column(Text, nullable=True)
file_s3_key: Mapped[str | None] = mapped_column(String(500), nullable=True)
# relationships
chunks: Mapped[list["RagChunk"]] = relationship("RagChunk", back_populates="source", cascade="all, delete-orphan")
class RagChunk(Base, UUIDMixin, TimestampMixin):
__tablename__ = "rag_chunks"
source_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("rag_sources.id"), nullable=False)
chunk_index: Mapped[int] = mapped_column(Integer, nullable=False)
content: Mapped[str] = mapped_column(Text, nullable=False)
# Note: embedding column (VECTOR) added via Alembic migration with pgvector extension
metadata_: Mapped[dict | None] = mapped_column("metadata", JSONB, nullable=True)
# relationships
source: Mapped["RagSource"] = relationship("RagSource", back_populates="chunks")
+38
View File
@@ -0,0 +1,38 @@
import uuid
from sqlalchemy import String, Date, Text, ForeignKey, Enum as SAEnum
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.dialects.postgresql import UUID, JSONB
from app.core.database import Base
from app.models.base import TimestampMixin, UUIDMixin
import enum
class ReportType(str, enum.Enum):
WEEKLY = "weekly"
MONTHLY = "monthly"
class ReportStatus(str, enum.Enum):
DRAFT = "draft"
REVIEWED = "reviewed"
SUBMITTED = "submitted"
class Report(Base, UUIDMixin, TimestampMixin):
__tablename__ = "reports"
project_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("projects.id"), nullable=False)
report_type: Mapped[ReportType] = mapped_column(
SAEnum(ReportType, name="report_type"), nullable=False
)
period_start: Mapped[str] = mapped_column(Date, nullable=False)
period_end: Mapped[str] = mapped_column(Date, nullable=False)
content_json: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
ai_draft_text: Mapped[str | None] = mapped_column(Text, nullable=True)
status: Mapped[ReportStatus] = mapped_column(
SAEnum(ReportStatus, name="report_status"), default=ReportStatus.DRAFT, nullable=False
)
pdf_s3_key: Mapped[str | None] = mapped_column(String(500), nullable=True)
# relationships
project: Mapped["Project"] = relationship("Project", back_populates="reports")
+41
View File
@@ -0,0 +1,41 @@
import uuid
from sqlalchemy import String, Boolean, ForeignKey, Enum as SAEnum
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.dialects.postgresql import UUID, JSONB
from app.core.database import Base
from app.models.base import TimestampMixin, UUIDMixin
class ClientProfile(Base, UUIDMixin, TimestampMixin):
__tablename__ = "client_profiles"
name: Mapped[str] = mapped_column(String(100), nullable=False)
report_frequency: Mapped[str] = mapped_column(String(20), default="weekly", nullable=False)
template_config: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
contact_info: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
is_default: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
# relationships
projects: Mapped[list["Project"]] = relationship("Project", back_populates="client_profile")
class AlertRule(Base, UUIDMixin, TimestampMixin):
__tablename__ = "alert_rules"
project_id: Mapped[uuid.UUID | None] = mapped_column(UUID(as_uuid=True), ForeignKey("projects.id"), nullable=True)
rule_name: Mapped[str] = mapped_column(String(100), nullable=False)
condition: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
channels: Mapped[list | None] = mapped_column(JSONB, nullable=True)
recipients: Mapped[list | None] = mapped_column(JSONB, nullable=True)
is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
class WorkTypeLibrary(Base, UUIDMixin, TimestampMixin):
__tablename__ = "work_type_library"
code: Mapped[str] = mapped_column(String(50), nullable=False, unique=True)
name: Mapped[str] = mapped_column(String(100), nullable=False)
category: Mapped[str] = mapped_column(String(50), nullable=False)
weather_constraints: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
default_checklist: Mapped[list | None] = mapped_column(JSONB, nullable=True)
is_system: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
+61
View File
@@ -0,0 +1,61 @@
import uuid
from sqlalchemy import String, Integer, Date, Boolean, Float, ForeignKey, Enum as SAEnum
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.dialects.postgresql import UUID
from app.core.database import Base
from app.models.base import TimestampMixin, UUIDMixin
import enum
class DependencyType(str, enum.Enum):
FS = "FS" # Finish-to-Start
SS = "SS" # Start-to-Start
FF = "FF" # Finish-to-Finish
SF = "SF" # Start-to-Finish
class Task(Base, UUIDMixin, TimestampMixin):
__tablename__ = "tasks"
project_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("projects.id"), nullable=False)
wbs_item_id: Mapped[uuid.UUID | None] = mapped_column(UUID(as_uuid=True), ForeignKey("wbs_items.id"), nullable=True)
name: Mapped[str] = mapped_column(String(200), nullable=False)
planned_start: Mapped[str | None] = mapped_column(Date, nullable=True)
planned_end: Mapped[str | None] = mapped_column(Date, nullable=True)
actual_start: Mapped[str | None] = mapped_column(Date, nullable=True)
actual_end: Mapped[str | None] = mapped_column(Date, nullable=True)
progress_pct: Mapped[float] = mapped_column(Float, default=0.0, nullable=False)
is_milestone: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
is_critical: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
early_start: Mapped[str | None] = mapped_column(Date, nullable=True) # CPM
early_finish: Mapped[str | None] = mapped_column(Date, nullable=True)
late_start: Mapped[str | None] = mapped_column(Date, nullable=True)
late_finish: Mapped[str | None] = mapped_column(Date, nullable=True)
total_float: Mapped[int | None] = mapped_column(Integer, nullable=True)
sort_order: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
# relationships
project: Mapped["Project"] = relationship("Project", back_populates="tasks")
wbs_item: Mapped["WBSItem | None"] = relationship("WBSItem", back_populates="tasks")
predecessors: Mapped[list["TaskDependency"]] = relationship(
"TaskDependency", foreign_keys="TaskDependency.successor_id", back_populates="successor"
)
successors: Mapped[list["TaskDependency"]] = relationship(
"TaskDependency", foreign_keys="TaskDependency.predecessor_id", back_populates="predecessor"
)
weather_alerts: Mapped[list["WeatherAlert"]] = relationship("WeatherAlert", back_populates="task")
class TaskDependency(Base, UUIDMixin, TimestampMixin):
__tablename__ = "task_dependencies"
predecessor_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("tasks.id"), nullable=False)
successor_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("tasks.id"), nullable=False)
dependency_type: Mapped[DependencyType] = mapped_column(
SAEnum(DependencyType, name="dependency_type"), default=DependencyType.FS, nullable=False
)
lag_days: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
# relationships
predecessor: Mapped["Task"] = relationship("Task", foreign_keys=[predecessor_id], back_populates="successors")
successor: Mapped["Task"] = relationship("Task", foreign_keys=[successor_id], back_populates="predecessors")
+31
View File
@@ -0,0 +1,31 @@
import uuid
from sqlalchemy import String, Boolean, Enum as SAEnum
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.dialects.postgresql import UUID
from app.core.database import Base
from app.models.base import TimestampMixin, UUIDMixin
import enum
class UserRole(str, enum.Enum):
ADMIN = "admin"
SITE_MANAGER = "site_manager"
SUPERVISOR = "supervisor"
WORKER = "worker"
class User(Base, UUIDMixin, TimestampMixin):
__tablename__ = "users"
email: Mapped[str] = mapped_column(String(255), unique=True, nullable=False, index=True)
hashed_password: Mapped[str] = mapped_column(String(255), nullable=False)
name: Mapped[str] = mapped_column(String(100), nullable=False)
role: Mapped[UserRole] = mapped_column(
SAEnum(UserRole, name="user_role"), default=UserRole.SITE_MANAGER, nullable=False
)
phone: Mapped[str | None] = mapped_column(String(20), nullable=True)
kakao_user_key: Mapped[str | None] = mapped_column(String(100), nullable=True, unique=True, index=True)
is_active: Mapped[bool] = mapped_column(Boolean, default=True, nullable=False)
# relationships
owned_projects: Mapped[list["Project"]] = relationship("Project", back_populates="owner", foreign_keys="Project.owner_id")
+57
View File
@@ -0,0 +1,57 @@
import uuid
from datetime import datetime
from sqlalchemy import String, Boolean, Date, Float, Integer, ForeignKey, Enum as SAEnum, Text
from sqlalchemy.orm import Mapped, mapped_column, relationship
from sqlalchemy.dialects.postgresql import UUID, JSONB
from app.core.database import Base
from app.models.base import TimestampMixin, UUIDMixin
import enum
class ForecastType(str, enum.Enum):
SHORT_TERM = "short_term"
MEDIUM_TERM = "medium_term"
OBSERVED = "observed"
class AlertSeverity(str, enum.Enum):
WARNING = "warning"
CRITICAL = "critical"
class WeatherData(Base, UUIDMixin, TimestampMixin):
__tablename__ = "weather_data"
project_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("projects.id"), nullable=False)
forecast_date: Mapped[str] = mapped_column(Date, nullable=False)
forecast_type: Mapped[ForecastType] = mapped_column(
SAEnum(ForecastType, name="forecast_type"), nullable=False
)
temperature_high: Mapped[float | None] = mapped_column(Float, nullable=True)
temperature_low: Mapped[float | None] = mapped_column(Float, nullable=True)
precipitation_mm: Mapped[float | None] = mapped_column(Float, nullable=True)
wind_speed_ms: Mapped[float | None] = mapped_column(Float, nullable=True)
weather_code: Mapped[str | None] = mapped_column(String(20), nullable=True)
raw_data: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
fetched_at: Mapped[datetime] = mapped_column(nullable=False)
# relationships
project: Mapped["Project"] = relationship("Project", back_populates="weather_data")
class WeatherAlert(Base, UUIDMixin, TimestampMixin):
__tablename__ = "weather_alerts"
project_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("projects.id"), nullable=False)
task_id: Mapped[uuid.UUID | None] = mapped_column(UUID(as_uuid=True), ForeignKey("tasks.id"), nullable=True)
alert_date: Mapped[str] = mapped_column(Date, nullable=False)
alert_type: Mapped[str] = mapped_column(String(50), nullable=False) # rain_concrete, wind_highwork, etc.
severity: Mapped[AlertSeverity] = mapped_column(
SAEnum(AlertSeverity, name="alert_severity"), nullable=False
)
message: Mapped[str] = mapped_column(Text, nullable=False)
is_acknowledged: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
# relationships
project: Mapped["Project"] = relationship("Project", back_populates="weather_alerts")
task: Mapped["Task | None"] = relationship("Task", back_populates="weather_alerts")
View File
+68
View File
@@ -0,0 +1,68 @@
import uuid
from datetime import date, datetime
from pydantic import BaseModel
from app.models.daily_report import ReportStatus, InputSource
class DailyReportCreate(BaseModel):
report_date: date
weather_summary: str | None = None
temperature_high: float | None = None
temperature_low: float | None = None
workers_count: dict | None = None
equipment_list: list | None = None
work_content: str | None = None
issues: str | None = None
class DailyReportGenerateRequest(BaseModel):
"""Request to AI-generate a daily report"""
report_date: date
workers_count: dict # {"직종명": 인원수}
equipment_list: list # [{"type": "장비명", "count": 1, "hours": 8}]
work_items: list[str] # List of work done
issues: str | None = None
photos_count: int = 0
class DailyReportUpdate(BaseModel):
weather_summary: str | None = None
temperature_high: float | None = None
temperature_low: float | None = None
workers_count: dict | None = None
equipment_list: list | None = None
work_content: str | None = None
issues: str | None = None
status: ReportStatus | None = None
class DailyReportPhotoResponse(BaseModel):
id: uuid.UUID
s3_key: str
caption: str | None
sort_order: int
model_config = {"from_attributes": True}
class DailyReportResponse(BaseModel):
id: uuid.UUID
project_id: uuid.UUID
report_date: date
weather_summary: str | None
temperature_high: float | None
temperature_low: float | None
workers_count: dict | None
equipment_list: list | None
work_content: str | None
issues: str | None
input_source: InputSource
ai_generated: bool
status: ReportStatus
confirmed_by: uuid.UUID | None
confirmed_at: datetime | None
pdf_s3_key: str | None
photos: list[DailyReportPhotoResponse] = []
created_at: datetime
model_config = {"from_attributes": True}
+47
View File
@@ -0,0 +1,47 @@
import uuid
from datetime import date, datetime
from pydantic import BaseModel
from app.models.inspection import InspectionResult, InspectionStatus
class InspectionCreate(BaseModel):
wbs_item_id: uuid.UUID | None = None
inspection_type: str
requested_date: date
location_detail: str | None = None
checklist_items: list | None = None
notes: str | None = None
class InspectionGenerateRequest(BaseModel):
wbs_item_id: uuid.UUID | None = None
inspection_type: str
requested_date: date
location_detail: str | None = None
class InspectionUpdate(BaseModel):
checklist_items: list | None = None
result: InspectionResult | None = None
inspector_name: str | None = None
notes: str | None = None
status: InspectionStatus | None = None
class InspectionResponse(BaseModel):
id: uuid.UUID
project_id: uuid.UUID
wbs_item_id: uuid.UUID | None
inspection_type: str
requested_date: date
location_detail: str | None
checklist_items: list | None
result: InspectionResult | None
inspector_name: str | None
notes: str | None
ai_generated: bool
status: InspectionStatus
pdf_s3_key: str | None
created_at: datetime
model_config = {"from_attributes": True}
+76
View File
@@ -0,0 +1,76 @@
import uuid
from datetime import date, datetime
from pydantic import BaseModel
from app.models.project import ProjectStatus, ConstructionType
class WBSItemCreate(BaseModel):
parent_id: uuid.UUID | None = None
code: str
name: str
level: int = 1
unit: str | None = None
design_qty: float | None = None
unit_price: float | None = None
sort_order: int = 0
class WBSItemResponse(BaseModel):
id: uuid.UUID
project_id: uuid.UUID
parent_id: uuid.UUID | None
code: str
name: str
level: int
unit: str | None
design_qty: float | None
unit_price: float | None
sort_order: int
children: list["WBSItemResponse"] = []
model_config = {"from_attributes": True}
class ProjectCreate(BaseModel):
name: str
code: str
client_profile_id: uuid.UUID | None = None
construction_type: ConstructionType = ConstructionType.OTHER
contract_amount: int | None = None
start_date: date | None = None
end_date: date | None = None
location_address: str | None = None
location_lat: float | None = None
location_lng: float | None = None
class ProjectUpdate(BaseModel):
name: str | None = None
client_profile_id: uuid.UUID | None = None
construction_type: ConstructionType | None = None
contract_amount: int | None = None
start_date: date | None = None
end_date: date | None = None
location_address: str | None = None
location_lat: float | None = None
location_lng: float | None = None
status: ProjectStatus | None = None
class ProjectResponse(BaseModel):
id: uuid.UUID
name: str
code: str
client_profile_id: uuid.UUID | None
construction_type: ConstructionType
contract_amount: int | None
start_date: date | None
end_date: date | None
location_address: str | None
location_lat: float | None
location_lng: float | None
status: ProjectStatus
owner_id: uuid.UUID
created_at: datetime
model_config = {"from_attributes": True}
+44
View File
@@ -0,0 +1,44 @@
import uuid
from datetime import datetime
from pydantic import BaseModel
from app.models.rag import RagSourceType
class RagAskRequest(BaseModel):
question: str
source_types: list[RagSourceType] | None = None # Filter by source type
top_k: int = 5
class RagSource(BaseModel):
id: uuid.UUID
title: str
source_type: RagSourceType
chunk_content: str
relevance_score: float
model_config = {"from_attributes": True}
class RagAskResponse(BaseModel):
question: str
answer: str
sources: list[RagSource]
disclaimer: str = "이 답변은 참고용이며 법률 자문이 아닙니다. 중요 사항은 전문가에게 확인하세요."
class RagSourceCreate(BaseModel):
title: str
source_type: RagSourceType
source_url: str | None = None
class RagSourceResponse(BaseModel):
id: uuid.UUID
title: str
source_type: RagSourceType
source_url: str | None
chunk_count: int = 0
created_at: datetime
model_config = {"from_attributes": True}
+73
View File
@@ -0,0 +1,73 @@
import uuid
from datetime import datetime
from pydantic import BaseModel
class ClientProfileCreate(BaseModel):
name: str
report_frequency: str = "weekly"
template_config: dict | None = None
contact_info: dict | None = None
is_default: bool = False
class ClientProfileResponse(BaseModel):
id: uuid.UUID
name: str
report_frequency: str
template_config: dict | None
contact_info: dict | None
is_default: bool
created_at: datetime
model_config = {"from_attributes": True}
class WorkTypeCreate(BaseModel):
code: str
name: str
category: str
weather_constraints: dict | None = None
default_checklist: list | None = None
class WorkTypeResponse(BaseModel):
id: uuid.UUID
code: str
name: str
category: str
weather_constraints: dict | None
default_checklist: list | None
is_system: bool
model_config = {"from_attributes": True}
class AlertRuleCreate(BaseModel):
project_id: uuid.UUID | None = None
rule_name: str
condition: dict | None = None
channels: list | None = None
recipients: list | None = None
is_active: bool = True
class AlertRuleResponse(BaseModel):
id: uuid.UUID
project_id: uuid.UUID | None
rule_name: str
condition: dict | None
channels: list | None
recipients: list | None
is_active: bool
created_at: datetime
model_config = {"from_attributes": True}
class SettingsExport(BaseModel):
version: str = "1.0"
client_profiles: list[ClientProfileResponse]
work_types: list[WorkTypeResponse]
alert_rules: list[AlertRuleResponse]
exported_at: datetime
+60
View File
@@ -0,0 +1,60 @@
import uuid
from datetime import date, datetime
from pydantic import BaseModel
from app.models.task import DependencyType
class TaskCreate(BaseModel):
wbs_item_id: uuid.UUID | None = None
name: str
planned_start: date | None = None
planned_end: date | None = None
is_milestone: bool = False
sort_order: int = 0
class TaskUpdate(BaseModel):
name: str | None = None
planned_start: date | None = None
planned_end: date | None = None
actual_start: date | None = None
actual_end: date | None = None
progress_pct: float | None = None
is_milestone: bool | None = None
sort_order: int | None = None
class TaskDependencyCreate(BaseModel):
predecessor_id: uuid.UUID
successor_id: uuid.UUID
dependency_type: DependencyType = DependencyType.FS
lag_days: int = 0
class TaskResponse(BaseModel):
id: uuid.UUID
project_id: uuid.UUID
wbs_item_id: uuid.UUID | None
name: str
planned_start: date | None
planned_end: date | None
actual_start: date | None
actual_end: date | None
progress_pct: float
is_milestone: bool
is_critical: bool
early_start: date | None
early_finish: date | None
late_start: date | None
late_finish: date | None
total_float: int | None
sort_order: int
created_at: datetime
model_config = {"from_attributes": True}
class GanttData(BaseModel):
tasks: list[TaskResponse]
critical_path: list[uuid.UUID]
project_duration_days: int | None
+44
View File
@@ -0,0 +1,44 @@
import uuid
from datetime import datetime
from pydantic import BaseModel, EmailStr
from app.models.user import UserRole
class UserCreate(BaseModel):
email: EmailStr
password: str
name: str
role: UserRole = UserRole.SITE_MANAGER
phone: str | None = None
class UserUpdate(BaseModel):
name: str | None = None
phone: str | None = None
role: UserRole | None = None
is_active: bool | None = None
class UserResponse(BaseModel):
id: uuid.UUID
email: str
name: str
role: UserRole
phone: str | None
kakao_user_key: str | None
is_active: bool
created_at: datetime
model_config = {"from_attributes": True}
class TokenResponse(BaseModel):
access_token: str
refresh_token: str
token_type: str = "bearer"
user: UserResponse
class LoginRequest(BaseModel):
email: EmailStr
password: str
+38
View File
@@ -0,0 +1,38 @@
import uuid
from datetime import date, datetime
from pydantic import BaseModel
from app.models.weather import ForecastType, AlertSeverity
class WeatherDataResponse(BaseModel):
id: uuid.UUID
project_id: uuid.UUID
forecast_date: date
forecast_type: ForecastType
temperature_high: float | None
temperature_low: float | None
precipitation_mm: float | None
wind_speed_ms: float | None
weather_code: str | None
fetched_at: datetime
model_config = {"from_attributes": True}
class WeatherAlertResponse(BaseModel):
id: uuid.UUID
project_id: uuid.UUID
task_id: uuid.UUID | None
alert_date: date
alert_type: str
severity: AlertSeverity
message: str
is_acknowledged: bool
created_at: datetime
model_config = {"from_attributes": True}
class WeatherForecastSummary(BaseModel):
forecast: list[WeatherDataResponse]
active_alerts: list[WeatherAlertResponse]
View File
+51
View File
@@ -0,0 +1,51 @@
"""
Core Claude API wrapper.
Shared by all AI-powered features: daily reports, inspection gen, report gen, RAG.
"""
import anthropic
from app.config import settings
_client: anthropic.AsyncAnthropic | None = None
def get_client() -> anthropic.AsyncAnthropic:
global _client
if _client is None:
_client = anthropic.AsyncAnthropic(api_key=settings.ANTHROPIC_API_KEY)
return _client
async def complete(
messages: list[dict],
system: str,
temperature: float = 0.3,
max_tokens: int | None = None,
) -> str:
"""
Call Claude and return the text response.
Logs token usage for cost monitoring.
"""
client = get_client()
response = await client.messages.create(
model=settings.CLAUDE_MODEL,
max_tokens=max_tokens or settings.CLAUDE_MAX_TOKENS,
temperature=temperature,
system=system,
messages=messages,
)
# Log token usage
usage = response.usage
print(f"[AI] input={usage.input_tokens} output={usage.output_tokens} total={usage.input_tokens + usage.output_tokens}")
return response.content[0].text
async def complete_json(
messages: list[dict],
system: str,
temperature: float = 0.3,
) -> str:
"""Call Claude with JSON output instruction."""
json_system = system + "\n\n반드시 유효한 JSON 형식으로만 응답하세요. 다른 텍스트를 포함하지 마세요."
return await complete(messages, json_system, temperature)
+41
View File
@@ -0,0 +1,41 @@
"""AI-powered daily report generation."""
from app.services.ai_engine import complete
from app.services.prompts.daily_report import SYSTEM_PROMPT, build_prompt
async def generate_work_content(
project_name: str,
report_date: str,
weather_summary: str,
temperature_high: float | None,
temperature_low: float | None,
workers_count: dict,
equipment_list: list,
work_items: list[str],
issues: str | None,
) -> str:
"""Generate the work content text for a daily report."""
temp_str = ""
if temperature_high is not None and temperature_low is not None:
temp_str = f"최고 {temperature_high}°C / 최저 {temperature_low}°C"
elif temperature_high is not None:
temp_str = f"최고 {temperature_high}°C"
else:
temp_str = "기온 정보 없음"
prompt = build_prompt(
project_name=project_name,
report_date=report_date,
weather_summary=weather_summary or "맑음",
temperature=temp_str,
workers=workers_count or {},
equipment=equipment_list or [],
work_items=work_items,
issues=issues,
)
return await complete(
messages=[{"role": "user", "content": prompt}],
system=SYSTEM_PROMPT,
temperature=0.3,
)
+111
View File
@@ -0,0 +1,111 @@
"""
CPM (Critical Path Method) calculation for Gantt chart.
"""
from datetime import date, timedelta
from typing import NamedTuple
import uuid
class TaskNode(NamedTuple):
id: uuid.UUID
planned_start: date | None
planned_end: date | None
duration_days: int
def compute_cpm(tasks: list, dependencies: list) -> dict[uuid.UUID, dict]:
"""
Compute CPM forward/backward pass.
Returns dict: task_id -> {early_start, early_finish, late_start, late_finish, total_float, is_critical}
"""
if not tasks:
return {}
# Build adjacency maps
task_map = {t.id: t for t in tasks}
successors: dict[uuid.UUID, list[uuid.UUID]] = {t.id: [] for t in tasks}
predecessors: dict[uuid.UUID, list[uuid.UUID]] = {t.id: [] for t in tasks}
for dep in dependencies:
successors[dep.predecessor_id].append(dep.successor_id)
predecessors[dep.successor_id].append(dep.predecessor_id)
def get_duration(task) -> int:
if task.planned_start and task.planned_end:
return max(1, (task.planned_end - task.planned_start).days + 1)
return 1
# Topological sort (Kahn's algorithm)
in_degree = {t.id: len(predecessors[t.id]) for t in tasks}
queue = [t.id for t in tasks if in_degree[t.id] == 0]
topo_order = []
while queue:
node = queue.pop(0)
topo_order.append(node)
for succ in successors[node]:
in_degree[succ] -= 1
if in_degree[succ] == 0:
queue.append(succ)
# Forward pass: compute Early Start (ES) and Early Finish (EF)
es: dict[uuid.UUID, int] = {} # days from project start
ef: dict[uuid.UUID, int] = {}
for tid in topo_order:
task = task_map[tid]
dur = get_duration(task)
if not predecessors[tid]:
es[tid] = 0
else:
es[tid] = max(ef[p] for p in predecessors[tid])
ef[tid] = es[tid] + dur
if not ef:
return {}
project_duration = max(ef.values())
# Backward pass: compute Late Finish (LF) and Late Start (LS)
lf: dict[uuid.UUID, int] = {}
ls: dict[uuid.UUID, int] = {}
for tid in reversed(topo_order):
task = task_map[tid]
dur = get_duration(task)
if not successors[tid]:
lf[tid] = project_duration
else:
lf[tid] = min(ls[s] for s in successors[tid])
ls[tid] = lf[tid] - dur
# Compute float and critical path
result = {}
# Find an actual project start date
project_start = None
for t in tasks:
if t.planned_start:
if project_start is None or t.planned_start < project_start:
project_start = t.planned_start
if not project_start:
project_start = date.today()
for tid in topo_order:
total_float = ls[tid] - es[tid]
is_critical = total_float == 0
early_start_date = project_start + timedelta(days=es[tid])
early_finish_date = project_start + timedelta(days=ef[tid] - 1)
late_start_date = project_start + timedelta(days=ls[tid])
late_finish_date = project_start + timedelta(days=lf[tid] - 1)
result[tid] = {
"early_start": early_start_date,
"early_finish": early_finish_date,
"late_start": late_start_date,
"late_finish": late_finish_date,
"total_float": total_float,
"is_critical": is_critical,
}
return result, project_duration
+34
View File
@@ -0,0 +1,34 @@
"""AI-powered inspection request generation."""
import json
from app.services.ai_engine import complete_json
from app.services.prompts.inspection import SYSTEM_PROMPT, build_prompt
async def generate_checklist(
project_name: str,
inspection_type: str,
location_detail: str | None,
requested_date: str,
wbs_name: str | None,
) -> list[dict]:
"""Generate inspection checklist items using Claude."""
prompt = build_prompt(
project_name=project_name,
inspection_type=inspection_type,
location_detail=location_detail,
requested_date=requested_date,
wbs_name=wbs_name,
)
raw = await complete_json(
messages=[{"role": "user", "content": prompt}],
system=SYSTEM_PROMPT,
temperature=0.2,
)
try:
data = json.loads(raw)
return data.get("checklist_items", [])
except (json.JSONDecodeError, KeyError):
# Fallback: return empty checklist
return []
+123
View File
@@ -0,0 +1,123 @@
"""
Kakao Chatbot Skill API service.
Parses incoming messages and routes to appropriate handlers.
"""
import re
from datetime import date
# Kakao Skill response builders
def simple_text(text: str) -> dict:
return {
"version": "2.0",
"template": {
"outputs": [{"simpleText": {"text": text}}]
}
}
def basic_card(title: str, description: str, buttons: list[dict] | None = None) -> dict:
card = {"title": title, "description": description}
if buttons:
card["buttons"] = buttons
return {
"version": "2.0",
"template": {
"outputs": [{"basicCard": card}]
}
}
def list_card(header_title: str, items: list[dict], buttons: list[dict] | None = None) -> dict:
card = {
"header": {"title": header_title},
"items": items,
}
if buttons:
card["buttons"] = buttons
return {
"version": "2.0",
"template": {
"outputs": [{"listCard": card}]
}
}
# Message routing
class KakaoIntent:
DAILY_REPORT = "daily_report"
RAG_QUESTION = "rag_question"
WEATHER = "weather"
HELP = "help"
UNKNOWN = "unknown"
def detect_intent(utterance: str) -> str:
"""Detect user intent from utterance."""
u = utterance.strip()
# Daily report keywords
if any(k in u for k in ["일보", "작업일보", "오늘 공사", "금일 공사"]):
return KakaoIntent.DAILY_REPORT
# RAG / question keywords
if any(k in u for k in ["질문", "법규", "시방서", "기준", "KCS", "법령", "산안법", "중대재해", "?", ""]):
return KakaoIntent.RAG_QUESTION
# Weather keywords
if any(k in u for k in ["날씨", "기상", "", "", "바람"]):
return KakaoIntent.WEATHER
# Help
if any(k in u for k in ["도움말", "메뉴", "help", "사용법"]):
return KakaoIntent.HELP
return KakaoIntent.UNKNOWN
def parse_daily_report_input(utterance: str) -> dict:
"""
Parse daily report input from free-form Kakao message.
Example: "오늘 일보: 콘크리트 5명, 철근 3명, 관로매설 오후 완료"
"""
workers = {}
work_items = []
issues = None
# Extract worker counts: "직종 N명" patterns
worker_pattern = re.findall(r'([가-힣a-zA-Z]+)\s+(\d+)명', utterance)
for role, count in worker_pattern:
if role not in ["", "합계"]:
workers[role] = int(count)
# Extract work items after "일보:" or newlines
lines = utterance.replace("일보:", "").replace("작업일보:", "").split("\n")
for line in lines:
line = line.strip().lstrip("-").strip()
if line and len(line) > 2 and not re.search(r'\d+명', line):
work_items.append(line)
# Check for issues
if "특이" in utterance or "문제" in utterance or "이슈" in utterance:
issue_match = re.search(r'(특이|문제|이슈)[사항:\s]*(.+?)(?:\n|$)', utterance)
if issue_match:
issues = issue_match.group(2).strip()
return {
"workers_count": workers,
"work_items": work_items if work_items else ["기타 작업"],
"issues": issues,
"report_date": str(date.today()),
}
def make_help_response() -> dict:
return list_card(
header_title="CONAI 현장 도우미",
items=[
{"title": "작업일보 작성", "description": "일보: 작업내용 입력"},
{"title": "법규 질문", "description": "질문: 궁금한 내용 입력"},
{"title": "날씨 확인", "description": "날씨 입력"},
],
buttons=[{"action": "message", "label": "일보 작성", "messageText": "일보:"}],
)
@@ -0,0 +1,52 @@
SYSTEM_PROMPT = """당신은 대한민국 토목건설 현장의 작업일보 작성 전문가입니다.
현장소장이 제공하는 정보를 바탕으로 공식적인 작업일보를 작성합니다.
작업일보 작성 원칙:
1. 건설기술진흥법 시행규칙에 따른 서식 기준을 준수합니다
2. 객관적이고 사실에 근거한 내용만 기록합니다
3. 전문 건설 용어를 사용하되, 명확하고 이해하기 쉽게 작성합니다
4. 날씨, 인원, 장비, 작업내용을 구조적으로 기술합니다
5. 특이사항이 있으면 간결하게 기록합니다
응답 형식:
- 작업내용은 공종별로 구분하여 기술
- 각 항목은 간결하고 명확하게
- 존칭이나 과도한 수식어 사용 금지
"""
def build_prompt(
project_name: str,
report_date: str,
weather_summary: str,
temperature: str,
workers: dict,
equipment: list,
work_items: list[str],
issues: str | None,
) -> str:
workers_text = ", ".join([f"{k} {v}" for k, v in workers.items()])
equipment_text = ", ".join([f"{e.get('type', '')} {e.get('count', 1)}" for e in equipment])
work_text = "\n".join([f"- {item}" for item in work_items])
prompt = f"""다음 정보를 바탕으로 작업일보의 '작업내용' 항목을 작성해주세요.
[현장 정보]
- 공사명: {project_name}
- 작업일자: {report_date}
- 날씨: {weather_summary}, 기온 {temperature}
[투입 인원]
{workers_text}
[투입 장비]
{equipment_text if equipment_text else "장비 없음"}
[당일 작업 항목]
{work_text}
[특이사항]
{issues if issues else "특이사항 없음"}
위 정보를 기반으로 공식 작업일보의 '금일 작업내용' 항목을 200~400자로 작성해주세요.
공종별로 나누어 구체적이고 전문적으로 기술하세요."""
return prompt
@@ -0,0 +1,47 @@
SYSTEM_PROMPT = """당신은 대한민국 토목건설 현장의 품질관리 전문가입니다.
KCS(한국건설기준) 시방서와 건설기술진흥법에 따라 검측요청서를 작성합니다.
검측요청서 작성 원칙:
1. KCS 시방서 기준에 맞는 체크리스트 항목을 포함합니다
2. 각 항목은 명확하고 측정 가능해야 합니다
3. 시공 전/시공 중/시공 후 점검 시점을 구분합니다
4. 허용 기준값이 있는 항목은 수치를 명시합니다
공종별 주요 체크리스트:
- 철근공사: 배근 간격, 피복두께, 이음 위치, 가스압접 등
- 거푸집공사: 치수, 수직도, 지지대 안전, 청소 상태 등
- 콘크리트타설: 슬럼프, 공기량, 타설 방법, 양생 계획 등
- 관로매설: 관저고, 관경, 구배, 접합 상태, 토피 등
- 성토/다짐: 두께, 다짐도, 함수비 등
- 도로포장: 두께, 배합, 평탄성, 표면상태 등
"""
def build_prompt(
project_name: str,
inspection_type: str,
location_detail: str,
requested_date: str,
wbs_name: str | None,
) -> str:
return f"""다음 정보를 바탕으로 검측요청서의 점검 항목 목록을 생성해주세요.
[검측 정보]
- 공사명: {project_name}
- 공종: {inspection_type}
- 위치: {location_detail or "미지정"}
- 관련 WBS: {wbs_name or "미지정"}
- 검측 요청일: {requested_date}
다음 JSON 형식으로 체크리스트 항목을 10개 이내로 작성하세요:
{{
"checklist_items": [
{{
"item": "점검항목명",
"standard": "기준값 또는 기준 내용",
"timing": "시공전|시공중|시공후",
"passed": null
}}
]
}}
KCS 시방서 기준에 맞는 구체적인 항목으로 작성하세요."""
+31
View File
@@ -0,0 +1,31 @@
SYSTEM_PROMPT = """당신은 대한민국 건설 법규 및 KCS(한국건설기준) 시방서 전문 어시스턴트입니다.
반드시 제공된 참고 자료(Context)에서 근거를 찾아 답변해야 합니다.
답변 원칙:
1. 제공된 Context에서만 근거를 찾아 답변합니다
2. Context에 해당 정보가 없으면 "제공된 자료에서 해당 정보를 찾을 수 없습니다"라고 명시합니다
3. 법령 조항 번호, KCS 코드 등 출처를 명확히 인용합니다
4. 이 답변은 참고용이며 법률 자문이 아님을 명심하세요
5. 안전과 관련된 사항은 반드시 전문가 확인을 권고합니다
금지 사항:
- Context에 없는 내용을 임의로 추가하는 것
- 법적 판단이나 책임 소재 결정
- 개인 의견 제시
"""
def build_prompt(question: str, context_chunks: list[dict]) -> str:
context_text = "\n\n---\n\n".join([
f"[출처: {c.get('title', '알 수 없음')} | {c.get('source_type', '')}]\n{c.get('content', '')}"
for c in context_chunks
])
return f"""다음 참고 자료를 바탕으로 질문에 답변해주세요.
[참고 자료]
{context_text}
[질문]
{question}
위 참고 자료에 근거하여 답변해주세요. 출처를 명확히 인용하고, 자료에서 찾을 수 없는 내용은 그렇다고 명시하세요."""
+81
View File
@@ -0,0 +1,81 @@
WEEKLY_SYSTEM_PROMPT = """당신은 대한민국 토목건설 현장의 공사관리 전문가입니다.
주간 공정보고서를 작성합니다. 발주처에 제출하는 공식 문서입니다.
작성 원칙:
1. 객관적 데이터를 기반으로 작성합니다
2. 계획 대비 실적을 명확히 비교합니다
3. 다음 주 예정 공사를 구체적으로 기술합니다
4. 문제점과 대책을 포함합니다
5. 전문적이고 간결한 문체를 사용합니다
"""
MONTHLY_SYSTEM_PROMPT = """당신은 대한민국 토목건설 현장의 공사관리 전문가입니다.
월간 공정보고서를 작성합니다. 발주처에 제출하는 공식 문서입니다.
작성 원칙:
1. 당월 주요 공사 실적을 종합합니다
2. 공정률 현황과 기성 현황을 포함합니다
3. 주요 문제점과 해결 내용을 기술합니다
4. 익월 공사 계획을 수립합니다
5. 공사 품질/안전 현황을 포함합니다
"""
def build_weekly_prompt(
project_name: str,
period_start: str,
period_end: str,
daily_summaries: list[dict],
overall_progress_pct: float,
weather_issues: list[str],
) -> str:
summaries_text = "\n".join([
f"- {s.get('date', '')}: {s.get('work_content', '')[:100]}"
for s in daily_summaries
])
return f"""다음 정보를 바탕으로 주간 공정보고서 '금주 공사현황' 섹션을 작성해주세요.
[보고 기간]
- 공사명: {project_name}
- 기간: {period_start} ~ {period_end}
[일별 작업 현황]
{summaries_text if summaries_text else "작업일보 없음"}
[공정 현황]
- 전체 공정률: {overall_progress_pct:.1f}%
[날씨 영향]
{chr(10).join(weather_issues) if weather_issues else "날씨 특이사항 없음"}
주간 공정보고서 형식으로 400~600자 분량으로 작성해주세요:
1. 금주 주요 공사 내용
2. 공정 현황 (계획 대비 실적)
3. 특이사항 및 대책
4. 차주 예정 공사"""
def build_monthly_prompt(
project_name: str,
period_start: str,
period_end: str,
weekly_summaries: list[str],
overall_progress_pct: float,
) -> str:
return f"""다음 정보를 바탕으로 월간 공정보고서를 작성해주세요.
[보고 기간]
- 공사명: {project_name}
- 기간: {period_start} ~ {period_end}
- 전체 공정률: {overall_progress_pct:.1f}%
[주간별 현황 요약]
{chr(10).join(weekly_summaries) if weekly_summaries else "주간 현황 없음"}
월간 공정보고서 형식으로 600~800자 분량으로 작성해주세요:
1. 당월 공사 개요
2. 공정 현황 (계획 대비 실적, 공정률)
3. 주요 시공 내용
4. 품질/안전 현황
5. 문제점 및 대책
6. 익월 공사 계획"""
+125
View File
@@ -0,0 +1,125 @@
"""
RAG (Retrieval-Augmented Generation) service.
Embeds questions, retrieves relevant chunks, and generates answers with Claude.
"""
import httpx
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, text
from app.config import settings
from app.models.rag import RagChunk, RagSource
from app.services.ai_engine import complete
from app.services.prompts.rag import SYSTEM_PROMPT, build_prompt
async def embed_text(text_input: str) -> list[float]:
"""Get embedding vector for text using Voyage AI or OpenAI."""
if settings.VOYAGE_API_KEY:
return await _embed_voyage(text_input)
elif settings.OPENAI_API_KEY:
return await _embed_openai(text_input)
else:
raise ValueError("임베딩 API 키가 설정되지 않았습니다 (VOYAGE_API_KEY 또는 OPENAI_API_KEY)")
async def _embed_voyage(text_input: str) -> list[float]:
async with httpx.AsyncClient(timeout=30.0) as client:
resp = await client.post(
"https://api.voyageai.com/v1/embeddings",
headers={"Authorization": f"Bearer {settings.VOYAGE_API_KEY}"},
json={"model": settings.EMBEDDING_MODEL, "input": text_input},
)
resp.raise_for_status()
return resp.json()["data"][0]["embedding"]
async def _embed_openai(text_input: str) -> list[float]:
async with httpx.AsyncClient(timeout=30.0) as client:
resp = await client.post(
"https://api.openai.com/v1/embeddings",
headers={"Authorization": f"Bearer {settings.OPENAI_API_KEY}"},
json={"model": "text-embedding-3-small", "input": text_input},
)
resp.raise_for_status()
return resp.json()["data"][0]["embedding"]
async def retrieve_chunks(
db: AsyncSession,
question_embedding: list[float],
top_k: int = 5,
source_types: list[str] | None = None,
) -> list[dict]:
"""Retrieve most relevant chunks using pgvector cosine similarity."""
embedding_str = "[" + ",".join(str(x) for x in question_embedding) + "]"
# Build query with optional source type filter
source_filter = ""
if source_types:
types_str = ", ".join(f"'{t}'" for t in source_types)
source_filter = f"AND rs.source_type IN ({types_str})"
query = text(f"""
SELECT
rc.id,
rc.content,
rc.metadata,
rs.title,
rs.source_type,
1 - (rc.embedding <=> '{embedding_str}'::vector) AS relevance_score
FROM rag_chunks rc
JOIN rag_sources rs ON rs.id = rc.source_id
WHERE rc.embedding IS NOT NULL
{source_filter}
ORDER BY rc.embedding <=> '{embedding_str}'::vector
LIMIT {top_k}
""")
result = await db.execute(query)
rows = result.fetchall()
return [
{
"id": str(row.id),
"content": row.content,
"metadata": row.metadata,
"title": row.title,
"source_type": row.source_type,
"relevance_score": float(row.relevance_score),
}
for row in rows
]
async def ask(
db: AsyncSession,
question: str,
top_k: int = 5,
source_types: list[str] | None = None,
) -> dict:
"""Full RAG pipeline: embed -> retrieve -> generate."""
# 1. Embed the question
embedding = await embed_text(question)
# 2. Retrieve relevant chunks
chunks = await retrieve_chunks(db, embedding, top_k, source_types)
if not chunks:
return {
"question": question,
"answer": "관련 자료를 찾을 수 없습니다. 더 구체적인 질문을 입력하거나, 관련 자료가 업로드되었는지 확인해주세요.",
"sources": [],
}
# 3. Build prompt and generate answer
prompt = build_prompt(question, chunks)
answer = await complete(
messages=[{"role": "user", "content": prompt}],
system=SYSTEM_PROMPT,
temperature=0.5,
)
return {
"question": question,
"answer": answer,
"sources": chunks,
}
+99
View File
@@ -0,0 +1,99 @@
"""Weekly and monthly report generation."""
from app.services.ai_engine import complete
from app.services.prompts.report import (
WEEKLY_SYSTEM_PROMPT, MONTHLY_SYSTEM_PROMPT,
build_weekly_prompt, build_monthly_prompt,
)
async def generate_weekly_report(
project_name: str,
period_start: str,
period_end: str,
daily_reports: list,
overall_progress_pct: float,
weather_alerts: list,
) -> tuple[str, dict]:
"""
Generate weekly report text and structured data.
Returns (ai_text, content_json).
"""
daily_summaries = [
{
"date": str(r.report_date),
"work_content": r.work_content or "",
}
for r in daily_reports
]
weather_issues = [f"{a.alert_date}: {a.message}" for a in weather_alerts]
# Calculate stats
total_workers = sum(
sum(r.workers_count.values()) if r.workers_count else 0
for r in daily_reports
)
prompt = build_weekly_prompt(
project_name=project_name,
period_start=period_start,
period_end=period_end,
daily_summaries=daily_summaries,
overall_progress_pct=overall_progress_pct,
weather_issues=weather_issues,
)
ai_text = await complete(
messages=[{"role": "user", "content": prompt}],
system=WEEKLY_SYSTEM_PROMPT,
temperature=0.3,
)
content_json = {
"period_start": period_start,
"period_end": period_end,
"overall_progress_pct": overall_progress_pct,
"daily_count": len(daily_reports),
"total_workers": total_workers,
"weather_alert_count": len(weather_alerts),
}
return ai_text, content_json
async def generate_monthly_report(
project_name: str,
period_start: str,
period_end: str,
daily_reports: list,
overall_progress_pct: float,
) -> tuple[str, dict]:
"""Generate monthly report text and structured data."""
# Group dailies by week for summary
weekly_summaries = []
for r in daily_reports[::7]: # Sample weekly
if r.work_content:
weekly_summaries.append(f"- {r.report_date}: {r.work_content[:80]}...")
prompt = build_monthly_prompt(
project_name=project_name,
period_start=period_start,
period_end=period_end,
weekly_summaries=weekly_summaries,
overall_progress_pct=overall_progress_pct,
)
ai_text = await complete(
messages=[{"role": "user", "content": prompt}],
system=MONTHLY_SYSTEM_PROMPT,
temperature=0.3,
)
content_json = {
"period_start": period_start,
"period_end": period_end,
"overall_progress_pct": overall_progress_pct,
"daily_count": len(daily_reports),
}
return ai_text, content_json
+202
View File
@@ -0,0 +1,202 @@
"""
기상청 Open API (KMA) integration.
Fetches short-term (단기예보) and medium-term (중기예보) forecasts.
"""
import httpx
from datetime import date, datetime, timedelta, timezone
from typing import Any
from app.config import settings
KMA_BASE = settings.KMA_BASE_URL
API_KEY = settings.KMA_API_KEY
# Weather code -> Korean description
WEATHER_CODE_MAP = {
"1": "맑음", "2": "구름조금", "3": "구름많음",
"4": "흐림", "5": "", "6": "비눈", "7": "눈비",
"8": "",
}
async def fetch_short_term_forecast(nx: int, ny: int) -> list[dict]:
"""Fetch 단기예보 (3-day, 3-hour interval)."""
now = datetime.now(timezone.utc).astimezone()
# KMA issues forecasts at 02, 05, 08, 11, 14, 17, 20, 23
base_hours = [2, 5, 8, 11, 14, 17, 20, 23]
current_hour = now.hour
base_hour = max([h for h in base_hours if h <= current_hour], default=23)
base_date = now.strftime("%Y%m%d") if current_hour >= 2 else (now - timedelta(days=1)).strftime("%Y%m%d")
base_time = f"{base_hour:02d}00"
params = {
"serviceKey": API_KEY,
"pageNo": 1,
"numOfRows": 1000,
"dataType": "JSON",
"base_date": base_date,
"base_time": base_time,
"nx": nx,
"ny": ny,
}
async with httpx.AsyncClient(timeout=30.0) as client:
resp = await client.get(f"{KMA_BASE}/getVilageFcst", params=params)
resp.raise_for_status()
data = resp.json()
items = data.get("response", {}).get("body", {}).get("items", {}).get("item", [])
return _parse_short_term(items)
def _parse_short_term(items: list[dict]) -> list[dict]:
"""Parse KMA short-term forecast items into daily summaries."""
daily: dict[str, dict] = {}
for item in items:
fcst_date = item.get("fcstDate", "")[:8] # YYYYMMDD
category = item.get("category", "")
value = item.get("fcstValue", "")
if fcst_date not in daily:
daily[fcst_date] = {
"date": f"{fcst_date[:4]}-{fcst_date[4:6]}-{fcst_date[6:]}",
"temp_max": None, "temp_min": None,
"precipitation": 0.0, "wind_speed": None,
"sky": None, "pty": None,
}
d = daily[fcst_date]
if category == "TMX" and value != "-":
d["temp_max"] = float(value)
elif category == "TMN" and value != "-":
d["temp_min"] = float(value)
elif category == "PCP" and value not in ("-", "강수없음"):
try:
d["precipitation"] = max(d["precipitation"], float(value.replace("mm", "").strip()))
except ValueError:
pass
elif category == "WSD":
try:
ws = float(value)
if d["wind_speed"] is None or ws > d["wind_speed"]:
d["wind_speed"] = ws
except ValueError:
pass
elif category == "SKY":
d["sky"] = value
elif category == "PTY":
d["pty"] = value
result = []
for fcst_date in sorted(daily.keys()):
d = daily[fcst_date]
weather_code = d.get("pty") or d.get("sky") or "1"
result.append({
"date": d["date"],
"temperature_high": d["temp_max"],
"temperature_low": d["temp_min"],
"precipitation_mm": d["precipitation"],
"wind_speed_ms": d["wind_speed"],
"weather_code": weather_code,
"weather_desc": WEATHER_CODE_MAP.get(str(weather_code), "알 수 없음"),
})
return result
# --- Weather Constraint Evaluation ---
# Default constraints by work type code
DEFAULT_CONSTRAINTS: dict[str, dict] = {
"CONCRETE": {"min_temp": 5.0, "max_wind": None, "no_rain": True},
"HIGH_WORK": {"min_temp": None, "max_wind": 10.0, "no_rain": False},
"ASPHALT": {"min_temp": 10.0, "max_wind": None, "no_rain": True},
"EARTHWORK": {"min_temp": None, "max_wind": None, "no_rain": True},
"REBAR": {"min_temp": None, "max_wind": None, "no_rain": False},
}
def evaluate_weather_alerts(
forecast: dict,
tasks_on_date: list,
work_type_constraints: dict[str, dict] | None = None,
) -> list[dict]:
"""
Evaluate weather constraints for tasks on a given date.
Returns list of alert dicts.
"""
alerts = []
constraints = work_type_constraints or DEFAULT_CONSTRAINTS
for task in tasks_on_date:
# Determine work type from task name (simple keyword matching)
work_type = _detect_work_type(task.name)
if not work_type or work_type not in constraints:
continue
constraint = constraints[work_type]
temp_low = forecast.get("temperature_low")
wind_speed = forecast.get("wind_speed_ms")
precipitation = forecast.get("precipitation_mm", 0)
# Check temperature
if constraint.get("min_temp") and temp_low is not None:
if temp_low < constraint["min_temp"]:
alerts.append({
"task_id": str(task.id),
"alert_date": forecast.get("date"),
"alert_type": f"cold_{work_type.lower()}",
"severity": "critical" if temp_low < constraint["min_temp"] - 5 else "warning",
"message": (
f"[{task.name}] 최저기온 {temp_low}°C - "
f"{work_type} 작업 기준온도({constraint['min_temp']}°C) 미달. "
f"작업 조정 검토 필요."
),
})
# Check wind
if constraint.get("max_wind") and wind_speed is not None:
if wind_speed > constraint["max_wind"]:
alerts.append({
"task_id": str(task.id),
"alert_date": forecast.get("date"),
"alert_type": f"wind_{work_type.lower()}",
"severity": "critical",
"message": (
f"[{task.name}] 풍속 {wind_speed}m/s - "
f"허용 최대풍속({constraint['max_wind']}m/s) 초과. "
f"고소작업 중단 검토."
),
})
# Check rain
if constraint.get("no_rain") and precipitation and precipitation > 1.0:
alerts.append({
"task_id": str(task.id),
"alert_date": forecast.get("date"),
"alert_type": f"rain_{work_type.lower()}",
"severity": "warning",
"message": (
f"[{task.name}] 강수 예보 {precipitation}mm - "
f"{work_type} 작업 우천 시 제한. 공정 조정 검토."
),
})
return alerts
def _detect_work_type(task_name: str) -> str | None:
"""Simple keyword-based work type detection from task name."""
name_lower = task_name.lower()
if any(k in name_lower for k in ["콘크리트", "타설", "레미콘"]):
return "CONCRETE"
if any(k in name_lower for k in ["고소", "크레인", "비계", "거푸집"]):
return "HIGH_WORK"
if any(k in name_lower for k in ["아스팔트", "포장"]):
return "ASPHALT"
if any(k in name_lower for k in ["성토", "절토", "굴착", "토공"]):
return "EARTHWORK"
if any(k in name_lower for k in ["철근", "배근"]):
return "REBAR"
return None
+55
View File
@@ -0,0 +1,55 @@
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[project]
name = "conai-backend"
version = "1.0.0"
description = "CONAI - Construction AI Platform Backend"
requires-python = ">=3.11"
dependencies = [
# Web framework
"fastapi>=0.115.0",
"uvicorn[standard]>=0.32.0",
# Database
"sqlalchemy>=2.0.36",
"alembic>=1.14.0",
"asyncpg>=0.30.0",
"psycopg2-binary>=2.9.10",
# Supabase
"supabase>=2.10.0",
# Auth
"python-jose[cryptography]>=3.3.0",
"passlib[bcrypt]>=1.7.4",
"python-multipart>=0.0.20",
# AI
"anthropic>=0.40.0",
# Config
"pydantic-settings>=2.6.0",
"pydantic>=2.10.0",
# HTTP
"httpx>=0.28.0",
# PDF
"weasyprint>=64.0",
"jinja2>=3.1.4",
# Excel
"openpyxl>=3.1.5",
# Scheduling
"apscheduler>=3.10.4",
# Storage
"boto3>=1.35.0",
# Utilities
"python-dateutil>=2.9.0",
"pytz>=2024.2",
]
[project.optional-dependencies]
dev = [
"pytest>=8.3.0",
"pytest-asyncio>=0.24.0",
"httpx>=0.28.0",
"pytest-cov>=6.0.0",
]
[tool.hatch.build.targets.wheel]
packages = ["app"]
+68
View File
@@ -0,0 +1,68 @@
import pytest
import asyncio
from httpx import AsyncClient, ASGITransport
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker, AsyncSession
from app.main import app
from app.core.database import Base, get_db
from app.core.security import get_password_hash
from app.models.user import User, UserRole
TEST_DB_URL = "postgresql+asyncpg://postgres:postgres@localhost:5432/conai_test"
test_engine = create_async_engine(TEST_DB_URL, echo=False)
TestSessionLocal = async_sessionmaker(test_engine, class_=AsyncSession, expire_on_commit=False)
@pytest.fixture(scope="session")
def event_loop():
loop = asyncio.get_event_loop_policy().new_event_loop()
yield loop
loop.close()
@pytest.fixture(scope="session")
async def db_setup():
async with test_engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
yield
async with test_engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
@pytest.fixture
async def db(db_setup):
async with TestSessionLocal() as session:
yield session
await session.rollback()
@pytest.fixture
async def client(db):
async def override_get_db():
yield db
app.dependency_overrides[get_db] = override_get_db
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as ac:
yield ac
app.dependency_overrides.clear()
@pytest.fixture
async def test_user(db):
user = User(
email="test@conai.app",
hashed_password=get_password_hash("testpass123"),
name="테스트 현장소장",
role=UserRole.SITE_MANAGER,
)
db.add(user)
await db.commit()
await db.refresh(user)
return user
@pytest.fixture
async def auth_headers(client, test_user):
resp = await client.post("/api/v1/auth/login", data={"username": "test@conai.app", "password": "testpass123"})
token = resp.json()["access_token"]
return {"Authorization": f"Bearer {token}"}
+76
View File
@@ -0,0 +1,76 @@
"""Tests for CPM Gantt calculation."""
import pytest
from datetime import date
from unittest.mock import MagicMock
import uuid
from app.services.gantt import compute_cpm
def make_task(name: str, start: str, end: str) -> MagicMock:
t = MagicMock()
t.id = uuid.uuid4()
t.name = name
t.planned_start = date.fromisoformat(start)
t.planned_end = date.fromisoformat(end)
return t
def make_dep(pred_id, succ_id) -> MagicMock:
d = MagicMock()
d.predecessor_id = pred_id
d.successor_id = succ_id
return d
def test_cpm_no_dependencies():
tasks = [
make_task("A", "2026-04-01", "2026-04-05"),
make_task("B", "2026-04-01", "2026-04-10"),
]
result = compute_cpm(tasks, [])
assert isinstance(result, tuple)
cpm_data, duration = result
assert len(cpm_data) == 2
assert duration > 0
def test_cpm_serial_tasks():
t1 = make_task("A", "2026-04-01", "2026-04-05")
t2 = make_task("B", "2026-04-06", "2026-04-10")
dep = make_dep(t1.id, t2.id)
result = compute_cpm([t1, t2], [dep])
assert isinstance(result, tuple)
cpm_data, duration = result
# Serial tasks: both should be critical
assert cpm_data[t1.id]["is_critical"] is True
assert cpm_data[t2.id]["is_critical"] is True
def test_cpm_parallel_tasks():
"""In parallel paths, only the longer path is critical."""
t_start = make_task("Start", "2026-04-01", "2026-04-02")
t_long = make_task("Long Path", "2026-04-03", "2026-04-20") # 18 days
t_short = make_task("Short Path", "2026-04-03", "2026-04-10") # 8 days
t_end = make_task("End", "2026-04-21", "2026-04-22")
deps = [
make_dep(t_start.id, t_long.id),
make_dep(t_start.id, t_short.id),
make_dep(t_long.id, t_end.id),
make_dep(t_short.id, t_end.id),
]
result = compute_cpm([t_start, t_long, t_short, t_end], deps)
assert isinstance(result, tuple)
cpm_data, duration = result
# Long path and start/end should be critical; short path should not
assert cpm_data[t_long.id]["is_critical"] is True
assert cpm_data[t_short.id]["is_critical"] is False
def test_cpm_empty_tasks():
result = compute_cpm([], [])
assert result == {}
+76
View File
@@ -0,0 +1,76 @@
"""Tests for weather service."""
import pytest
from unittest.mock import MagicMock
import uuid
from app.services.weather_service import evaluate_weather_alerts, _detect_work_type, _parse_short_term
def make_task(name: str) -> MagicMock:
t = MagicMock()
t.id = uuid.uuid4()
t.name = name
return t
def test_detect_work_type_concrete():
assert _detect_work_type("콘크리트 타설") == "CONCRETE"
assert _detect_work_type("레미콘 타설 공사") == "CONCRETE"
def test_detect_work_type_high_work():
assert _detect_work_type("고소 작업") == "HIGH_WORK"
assert _detect_work_type("비계 설치") == "HIGH_WORK"
def test_detect_work_type_unknown():
assert _detect_work_type("기타 공사") is None
def test_evaluate_cold_concrete_alert():
task = make_task("콘크리트 타설")
forecast = {
"date": "2026-04-01",
"temperature_low": 3.0,
"wind_speed_ms": 2.0,
"precipitation_mm": 0.0,
}
alerts = evaluate_weather_alerts(forecast, [task])
assert len(alerts) == 1
assert alerts[0]["alert_type"] == "cold_concrete"
assert alerts[0]["severity"] == "warning"
def test_evaluate_rain_concrete_alert():
task = make_task("콘크리트 타설")
forecast = {
"date": "2026-04-01",
"temperature_low": 15.0,
"wind_speed_ms": 2.0,
"precipitation_mm": 5.0,
}
alerts = evaluate_weather_alerts(forecast, [task])
assert any(a["alert_type"] == "rain_concrete" for a in alerts)
def test_no_alert_good_weather():
task = make_task("콘크리트 타설")
forecast = {
"date": "2026-04-01",
"temperature_low": 15.0,
"wind_speed_ms": 3.0,
"precipitation_mm": 0.0,
}
alerts = evaluate_weather_alerts(forecast, [task])
assert len(alerts) == 0
def test_wind_alert_high_work():
task = make_task("고소 작업 비계")
forecast = {
"date": "2026-04-01",
"temperature_low": 10.0,
"wind_speed_ms": 12.0,
"precipitation_mm": 0.0,
}
alerts = evaluate_weather_alerts(forecast, [task])
assert any(a["alert_type"] == "wind_high_work" for a in alerts)