init
This commit is contained in:
parent
51ed7c0a25
commit
f909eb69fb
12
.env.example
Normal file
12
.env.example
Normal file
@ -0,0 +1,12 @@
|
||||
DATABASE_URL=postgresql+asyncpg://user:password@localhost:5432/hr_ai_db
|
||||
|
||||
# Selectel S3 Configuration
|
||||
S3_ENDPOINT_URL=https://s3.selcdn.ru
|
||||
S3_ACCESS_KEY_ID=your_access_key
|
||||
S3_SECRET_ACCESS_KEY=your_secret_key
|
||||
S3_BUCKET_NAME=your_bucket_name
|
||||
S3_REGION=ru-1
|
||||
|
||||
# App Configuration
|
||||
APP_ENV=development
|
||||
DEBUG=true
|
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
.claude
|
||||
.venv
|
8
.idea/.gitignore
vendored
Normal file
8
.idea/.gitignore
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
# Editor-based HTTP Client requests
|
||||
/httpRequests/
|
||||
# Datasource local storage ignored files
|
||||
/dataSources/
|
||||
/dataSources.local.xml
|
147
alembic.ini
Normal file
147
alembic.ini
Normal file
@ -0,0 +1,147 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts.
|
||||
# this is typically a path given in POSIX (e.g. forward slashes)
|
||||
# format, relative to the token %(here)s which refers to the location of this
|
||||
# ini file
|
||||
script_location = %(here)s/migrations
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory. for multiple paths, the path separator
|
||||
# is defined by "path_separator" below.
|
||||
prepend_sys_path = .
|
||||
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to ZoneInfo()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to <script_location>/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "path_separator"
|
||||
# below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
|
||||
|
||||
# path_separator; This indicates what character is used to split lists of file
|
||||
# paths, including version_locations and prepend_sys_path within configparser
|
||||
# files such as alembic.ini.
|
||||
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
|
||||
# to provide os-dependent path splitting.
|
||||
#
|
||||
# Note that in order to support legacy alembic.ini files, this default does NOT
|
||||
# take place if path_separator is not present in alembic.ini. If this
|
||||
# option is omitted entirely, fallback logic is as follows:
|
||||
#
|
||||
# 1. Parsing of the version_locations option falls back to using the legacy
|
||||
# "version_path_separator" key, which if absent then falls back to the legacy
|
||||
# behavior of splitting on spaces and/or commas.
|
||||
# 2. Parsing of the prepend_sys_path option falls back to the legacy
|
||||
# behavior of splitting on spaces, commas, or colons.
|
||||
#
|
||||
# Valid values for path_separator are:
|
||||
#
|
||||
# path_separator = :
|
||||
# path_separator = ;
|
||||
# path_separator = space
|
||||
# path_separator = newline
|
||||
#
|
||||
# Use os.pathsep. Default configuration used for new projects.
|
||||
path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# database URL. This is consumed by the user-maintained env.py script only.
|
||||
# other means of configuring database URLs may be customized within the env.py
|
||||
# file.
|
||||
# sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
|
||||
# hooks = ruff
|
||||
# ruff.type = module
|
||||
# ruff.module = ruff
|
||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Alternatively, use the exec runner to execute a binary found on your PATH
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = ruff
|
||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration. This is also consumed by the user-maintained
|
||||
# env.py script only.
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARNING
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARNING
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
0
app/__init__.py
Normal file
0
app/__init__.py
Normal file
0
app/core/__init__.py
Normal file
0
app/core/__init__.py
Normal file
21
app/core/config.py
Normal file
21
app/core/config.py
Normal file
@ -0,0 +1,21 @@
|
||||
from pydantic_settings import BaseSettings
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
database_url: str = "postgresql+asyncpg://user:password@localhost:5432/hr_ai_db"
|
||||
|
||||
s3_endpoint_url: str = "https://s3.selcdn.ru"
|
||||
s3_access_key_id: str
|
||||
s3_secret_access_key: str
|
||||
s3_bucket_name: str
|
||||
s3_region: str = "ru-1"
|
||||
|
||||
app_env: str = "development"
|
||||
debug: bool = True
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
|
||||
|
||||
settings = Settings()
|
25
app/core/database.py
Normal file
25
app/core/database.py
Normal file
@ -0,0 +1,25 @@
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker
|
||||
from sqlmodel import SQLModel
|
||||
from .config import settings
|
||||
|
||||
engine = create_async_engine(
|
||||
settings.database_url,
|
||||
echo=settings.debug,
|
||||
future=True
|
||||
)
|
||||
|
||||
async_session_maker = async_sessionmaker(
|
||||
bind=engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False
|
||||
)
|
||||
|
||||
|
||||
async def get_session() -> AsyncSession:
|
||||
async with async_session_maker() as session:
|
||||
yield session
|
||||
|
||||
|
||||
async def create_db_and_tables():
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(SQLModel.metadata.create_all)
|
52
app/core/s3.py
Normal file
52
app/core/s3.py
Normal file
@ -0,0 +1,52 @@
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
from typing import Optional
|
||||
import uuid
|
||||
from app.core.config import settings
|
||||
|
||||
|
||||
class S3Service:
|
||||
def __init__(self):
|
||||
self.s3_client = boto3.client(
|
||||
's3',
|
||||
endpoint_url=settings.s3_endpoint_url,
|
||||
aws_access_key_id=settings.s3_access_key_id,
|
||||
aws_secret_access_key=settings.s3_secret_access_key,
|
||||
region_name=settings.s3_region
|
||||
)
|
||||
self.bucket_name = settings.s3_bucket_name
|
||||
|
||||
async def upload_file(self, file_content: bytes, file_name: str, content_type: str) -> Optional[str]:
|
||||
try:
|
||||
file_key = f"{uuid.uuid4()}_{file_name}"
|
||||
|
||||
self.s3_client.put_object(
|
||||
Bucket=self.bucket_name,
|
||||
Key=file_key,
|
||||
Body=file_content,
|
||||
ContentType=content_type
|
||||
)
|
||||
|
||||
file_url = f"{settings.s3_endpoint_url}/{self.bucket_name}/{file_key}"
|
||||
return file_url
|
||||
|
||||
except ClientError as e:
|
||||
print(f"Error uploading file to S3: {e}")
|
||||
return None
|
||||
|
||||
async def delete_file(self, file_url: str) -> bool:
|
||||
try:
|
||||
file_key = file_url.split('/')[-1]
|
||||
|
||||
self.s3_client.delete_object(
|
||||
Bucket=self.bucket_name,
|
||||
Key=file_key
|
||||
)
|
||||
return True
|
||||
|
||||
except ClientError as e:
|
||||
print(f"Error deleting file from S3: {e}")
|
||||
return False
|
||||
|
||||
|
||||
s3_service = S3Service()
|
92
app/core/session_middleware.py
Normal file
92
app/core/session_middleware.py
Normal file
@ -0,0 +1,92 @@
|
||||
from fastapi import Request, Response
|
||||
from fastapi.responses import JSONResponse
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
from starlette.types import ASGIApp
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.core.database import async_session_maker
|
||||
from app.repositories.session_repository import SessionRepository
|
||||
from app.models.session import Session
|
||||
import logging
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SessionMiddleware(BaseHTTPMiddleware):
|
||||
"""Middleware для автоматического управления сессиями"""
|
||||
|
||||
def __init__(self, app: ASGIApp, cookie_name: str = "session_id"):
|
||||
super().__init__(app)
|
||||
self.cookie_name = cookie_name
|
||||
|
||||
async def dispatch(self, request: Request, call_next):
|
||||
# Пропускаем статические файлы, служебные эндпоинты и OPTIONS запросы
|
||||
if (request.url.path.startswith(("/docs", "/redoc", "/openapi.json", "/health", "/favicon.ico")) or
|
||||
request.method == "OPTIONS"):
|
||||
return await call_next(request)
|
||||
|
||||
# Получаем session_id из cookie или заголовка
|
||||
session_id = request.cookies.get(self.cookie_name) or request.headers.get("X-Session-ID")
|
||||
|
||||
session_obj = None
|
||||
|
||||
try:
|
||||
# Работаем с БД в рамках одной async сессии
|
||||
async with async_session_maker() as db_session:
|
||||
session_repo = SessionRepository(db_session)
|
||||
|
||||
# Проверяем существующую сессию
|
||||
if session_id:
|
||||
session_obj = await session_repo.get_by_session_id(session_id)
|
||||
if session_obj and not session_obj.is_expired():
|
||||
# Обновляем время последней активности
|
||||
await session_repo.update_last_activity(session_id)
|
||||
else:
|
||||
session_obj = None
|
||||
|
||||
# Создаем новую сессию, если нет действующей
|
||||
if not session_obj:
|
||||
user_agent = request.headers.get("User-Agent")
|
||||
client_ip = getattr(request.client, 'host', None) if request.client else None
|
||||
session_obj = await session_repo.create_session(
|
||||
user_agent=user_agent,
|
||||
ip_address=client_ip
|
||||
)
|
||||
logger.info(f"Created new session: {session_obj.session_id}")
|
||||
|
||||
# Добавляем сессию в контекст запроса
|
||||
request.state.session = session_obj
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Session middleware error: {e}")
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content={"error": "Session management error"}
|
||||
)
|
||||
|
||||
# Выполняем запрос
|
||||
response = await call_next(request)
|
||||
|
||||
# Устанавливаем cookie с session_id в ответе
|
||||
if session_obj and isinstance(response, Response):
|
||||
response.set_cookie(
|
||||
key=self.cookie_name,
|
||||
value=session_obj.session_id,
|
||||
max_age=30 * 24 * 60 * 60, # 30 дней
|
||||
httponly=True,
|
||||
secure=False, # Для dev среды
|
||||
samesite="lax"
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
async def get_current_session(request: Request) -> Session:
|
||||
"""Получить текущую сессию из контекста запроса"""
|
||||
return getattr(request.state, 'session', None)
|
||||
|
||||
|
||||
async def get_db_session() -> AsyncSession:
|
||||
"""Получить новую сессию БД для использования в эндпоинтах"""
|
||||
async with async_session_maker() as session:
|
||||
yield session
|
17
app/models/__init__.py
Normal file
17
app/models/__init__.py
Normal file
@ -0,0 +1,17 @@
|
||||
from .vacancy import Vacancy, VacancyCreate, VacancyUpdate, VacancyRead
|
||||
from .resume import Resume, ResumeCreate, ResumeUpdate, ResumeRead
|
||||
from .session import Session, SessionCreate, SessionRead
|
||||
|
||||
__all__ = [
|
||||
"Vacancy",
|
||||
"VacancyCreate",
|
||||
"VacancyUpdate",
|
||||
"VacancyRead",
|
||||
"Resume",
|
||||
"ResumeCreate",
|
||||
"ResumeUpdate",
|
||||
"ResumeRead",
|
||||
"Session",
|
||||
"SessionCreate",
|
||||
"SessionRead",
|
||||
]
|
57
app/models/resume.py
Normal file
57
app/models/resume.py
Normal file
@ -0,0 +1,57 @@
|
||||
from sqlmodel import SQLModel, Field, Relationship
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class ResumeStatus(str, Enum):
|
||||
PENDING = "pending"
|
||||
UNDER_REVIEW = "under_review"
|
||||
INTERVIEW_SCHEDULED = "interview_scheduled"
|
||||
INTERVIEWED = "interviewed"
|
||||
REJECTED = "rejected"
|
||||
ACCEPTED = "accepted"
|
||||
|
||||
|
||||
class ResumeBase(SQLModel):
|
||||
vacancy_id: int = Field(foreign_key="vacancy.id")
|
||||
session_id: int = Field(foreign_key="session.id")
|
||||
applicant_name: str = Field(max_length=255)
|
||||
applicant_email: str = Field(max_length=255)
|
||||
applicant_phone: Optional[str] = Field(max_length=50)
|
||||
resume_file_url: str
|
||||
cover_letter: Optional[str] = None
|
||||
status: ResumeStatus = Field(default=ResumeStatus.PENDING)
|
||||
interview_report_url: Optional[str] = None
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class Resume(ResumeBase, table=True):
|
||||
id: Optional[int] = Field(default=None, primary_key=True)
|
||||
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
updated_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
|
||||
|
||||
class ResumeCreate(SQLModel):
|
||||
vacancy_id: int
|
||||
applicant_name: str = Field(max_length=255)
|
||||
applicant_email: str = Field(max_length=255)
|
||||
applicant_phone: Optional[str] = Field(max_length=50)
|
||||
resume_file_url: str
|
||||
cover_letter: Optional[str] = None
|
||||
|
||||
|
||||
class ResumeUpdate(SQLModel):
|
||||
applicant_name: Optional[str] = None
|
||||
applicant_email: Optional[str] = None
|
||||
applicant_phone: Optional[str] = None
|
||||
cover_letter: Optional[str] = None
|
||||
status: Optional[ResumeStatus] = None
|
||||
interview_report_url: Optional[str] = None
|
||||
notes: Optional[str] = None
|
||||
|
||||
|
||||
class ResumeRead(ResumeBase):
|
||||
id: int
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
47
app/models/session.py
Normal file
47
app/models/session.py
Normal file
@ -0,0 +1,47 @@
|
||||
from sqlmodel import SQLModel, Field
|
||||
from typing import Optional
|
||||
from datetime import datetime, timedelta
|
||||
import uuid
|
||||
|
||||
|
||||
class SessionBase(SQLModel):
|
||||
session_id: str = Field(max_length=255, unique=True, index=True)
|
||||
user_agent: Optional[str] = Field(max_length=512)
|
||||
ip_address: Optional[str] = Field(max_length=45)
|
||||
is_active: bool = Field(default=True)
|
||||
expires_at: datetime = Field(default_factory=lambda: datetime.utcnow() + timedelta(days=30))
|
||||
last_activity: datetime = Field(default_factory=datetime.utcnow)
|
||||
|
||||
|
||||
class Session(SessionBase, table=True):
|
||||
id: Optional[int] = Field(default=None, primary_key=True)
|
||||
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
updated_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
|
||||
@classmethod
|
||||
def create_new_session(cls, user_agent: Optional[str] = None, ip_address: Optional[str] = None) -> "Session":
|
||||
"""Create a new session with a unique session_id"""
|
||||
return cls(
|
||||
session_id=str(uuid.uuid4()),
|
||||
user_agent=user_agent,
|
||||
ip_address=ip_address
|
||||
)
|
||||
|
||||
def is_expired(self) -> bool:
|
||||
"""Check if session is expired"""
|
||||
return datetime.utcnow() > self.expires_at
|
||||
|
||||
def extend_session(self, days: int = 30) -> None:
|
||||
"""Extend session expiration"""
|
||||
self.expires_at = datetime.utcnow() + timedelta(days=days)
|
||||
self.last_activity = datetime.utcnow()
|
||||
|
||||
|
||||
class SessionCreate(SessionBase):
|
||||
pass
|
||||
|
||||
|
||||
class SessionRead(SessionBase):
|
||||
id: int
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
95
app/models/vacancy.py
Normal file
95
app/models/vacancy.py
Normal file
@ -0,0 +1,95 @@
|
||||
from sqlmodel import SQLModel, Field
|
||||
from typing import Optional, List
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class EmploymentType(str, Enum):
|
||||
FULL_TIME = "full"
|
||||
PART_TIME = "part"
|
||||
PROJECT = "project"
|
||||
VOLUNTEER = "volunteer"
|
||||
PROBATION = "probation"
|
||||
|
||||
|
||||
class Experience(str, Enum):
|
||||
NO_EXPERIENCE = "noExperience"
|
||||
BETWEEN_1_AND_3 = "between1And3"
|
||||
BETWEEN_3_AND_6 = "between3And6"
|
||||
MORE_THAN_6 = "moreThan6"
|
||||
|
||||
|
||||
class Schedule(str, Enum):
|
||||
FULL_DAY = "fullDay"
|
||||
SHIFT = "shift"
|
||||
FLEXIBLE = "flexible"
|
||||
REMOTE = "remote"
|
||||
FLY_IN_FLY_OUT = "flyInFlyOut"
|
||||
|
||||
|
||||
class VacancyBase(SQLModel):
|
||||
title: str = Field(max_length=255)
|
||||
description: str
|
||||
key_skills: Optional[str] = None
|
||||
employment_type: EmploymentType
|
||||
experience: Experience
|
||||
schedule: Schedule
|
||||
salary_from: Optional[int] = None
|
||||
salary_to: Optional[int] = None
|
||||
salary_currency: Optional[str] = Field(default="RUR", max_length=3)
|
||||
gross_salary: Optional[bool] = False
|
||||
company_name: str = Field(max_length=255)
|
||||
company_description: Optional[str] = None
|
||||
area_name: str = Field(max_length=255)
|
||||
metro_stations: Optional[str] = None
|
||||
address: Optional[str] = None
|
||||
professional_roles: Optional[str] = None
|
||||
contacts_name: Optional[str] = Field(max_length=255)
|
||||
contacts_email: Optional[str] = Field(max_length=255)
|
||||
contacts_phone: Optional[str] = Field(max_length=50)
|
||||
is_archived: bool = Field(default=False)
|
||||
premium: bool = Field(default=False)
|
||||
published_at: Optional[datetime] = Field(default_factory=datetime.utcnow)
|
||||
url: Optional[str] = None
|
||||
|
||||
|
||||
class Vacancy(VacancyBase, table=True):
|
||||
id: Optional[int] = Field(default=None, primary_key=True)
|
||||
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
updated_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
|
||||
|
||||
class VacancyCreate(VacancyBase):
|
||||
pass
|
||||
|
||||
|
||||
class VacancyUpdate(SQLModel):
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
key_skills: Optional[str] = None
|
||||
employment_type: Optional[EmploymentType] = None
|
||||
experience: Optional[Experience] = None
|
||||
schedule: Optional[Schedule] = None
|
||||
salary_from: Optional[int] = None
|
||||
salary_to: Optional[int] = None
|
||||
salary_currency: Optional[str] = None
|
||||
gross_salary: Optional[bool] = None
|
||||
company_name: Optional[str] = None
|
||||
company_description: Optional[str] = None
|
||||
area_name: Optional[str] = None
|
||||
metro_stations: Optional[str] = None
|
||||
address: Optional[str] = None
|
||||
professional_roles: Optional[str] = None
|
||||
contacts_name: Optional[str] = None
|
||||
contacts_email: Optional[str] = None
|
||||
contacts_phone: Optional[str] = None
|
||||
is_archived: Optional[bool] = None
|
||||
premium: Optional[bool] = None
|
||||
published_at: Optional[datetime] = None
|
||||
url: Optional[str] = None
|
||||
|
||||
|
||||
class VacancyRead(VacancyBase):
|
||||
id: int
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
4
app/repositories/__init__.py
Normal file
4
app/repositories/__init__.py
Normal file
@ -0,0 +1,4 @@
|
||||
from .vacancy_repository import VacancyRepository
|
||||
from .resume_repository import ResumeRepository
|
||||
|
||||
__all__ = ["VacancyRepository", "ResumeRepository"]
|
49
app/repositories/base_repository.py
Normal file
49
app/repositories/base_repository.py
Normal file
@ -0,0 +1,49 @@
|
||||
from typing import TypeVar, Generic, Optional, List, Type
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, update, delete
|
||||
from sqlmodel import SQLModel
|
||||
|
||||
ModelType = TypeVar("ModelType", bound=SQLModel)
|
||||
|
||||
|
||||
class BaseRepository(Generic[ModelType]):
|
||||
def __init__(self, model: Type[ModelType], session: AsyncSession):
|
||||
self.model = model
|
||||
self.session = session
|
||||
|
||||
async def create(self, obj_in: ModelType) -> ModelType:
|
||||
db_obj = self.model.model_validate(obj_in)
|
||||
self.session.add(db_obj)
|
||||
await self.session.commit()
|
||||
await self.session.refresh(db_obj)
|
||||
return db_obj
|
||||
|
||||
async def get(self, id: int) -> Optional[ModelType]:
|
||||
statement = select(self.model).where(self.model.id == id)
|
||||
result = await self.session.execute(statement)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def get_all(self, skip: int = 0, limit: int = 100) -> List[ModelType]:
|
||||
statement = select(self.model).offset(skip).limit(limit)
|
||||
result = await self.session.execute(statement)
|
||||
return result.scalars().all()
|
||||
|
||||
async def update(self, id: int, obj_in: dict) -> Optional[ModelType]:
|
||||
statement = (
|
||||
update(self.model)
|
||||
.where(self.model.id == id)
|
||||
.values(**obj_in)
|
||||
.returning(self.model)
|
||||
)
|
||||
result = await self.session.execute(statement)
|
||||
db_obj = result.scalar_one_or_none()
|
||||
if db_obj:
|
||||
await self.session.commit()
|
||||
await self.session.refresh(db_obj)
|
||||
return db_obj
|
||||
|
||||
async def delete(self, id: int) -> bool:
|
||||
statement = delete(self.model).where(self.model.id == id)
|
||||
result = await self.session.execute(statement)
|
||||
await self.session.commit()
|
||||
return result.rowcount > 0
|
55
app/repositories/resume_repository.py
Normal file
55
app/repositories/resume_repository.py
Normal file
@ -0,0 +1,55 @@
|
||||
from typing import List, Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from app.models.resume import Resume, ResumeStatus
|
||||
from .base_repository import BaseRepository
|
||||
|
||||
|
||||
class ResumeRepository(BaseRepository[Resume]):
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(Resume, session)
|
||||
|
||||
async def get_by_vacancy_id(self, vacancy_id: int) -> List[Resume]:
|
||||
statement = select(Resume).where(Resume.vacancy_id == vacancy_id)
|
||||
result = await self.session.execute(statement)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_by_session_id(self, session_id: int) -> List[Resume]:
|
||||
"""Получить все резюме пользователя по session_id"""
|
||||
statement = select(Resume).where(Resume.session_id == session_id)
|
||||
result = await self.session.execute(statement)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_by_vacancy_and_session(self, vacancy_id: int, session_id: int) -> List[Resume]:
|
||||
"""Получить резюме пользователя для конкретной вакансии"""
|
||||
statement = select(Resume).where(
|
||||
Resume.vacancy_id == vacancy_id,
|
||||
Resume.session_id == session_id
|
||||
)
|
||||
result = await self.session.execute(statement)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_by_status(self, status: ResumeStatus) -> List[Resume]:
|
||||
statement = select(Resume).where(Resume.status == status)
|
||||
result = await self.session.execute(statement)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_by_applicant_email(self, email: str) -> List[Resume]:
|
||||
statement = select(Resume).where(Resume.applicant_email == email)
|
||||
result = await self.session.execute(statement)
|
||||
return result.scalars().all()
|
||||
|
||||
async def create_with_session(self, resume_data: dict, session_id: int) -> Resume:
|
||||
"""Создать резюме с привязкой к сессии"""
|
||||
resume_data['session_id'] = session_id
|
||||
new_resume = Resume(**resume_data)
|
||||
return await self.create(new_resume)
|
||||
|
||||
async def update_status(self, resume_id: int, status: ResumeStatus) -> Optional[Resume]:
|
||||
return await self.update(resume_id, {"status": status})
|
||||
|
||||
async def add_interview_report(self, resume_id: int, report_url: str) -> Optional[Resume]:
|
||||
return await self.update(resume_id, {
|
||||
"interview_report_url": report_url,
|
||||
"status": ResumeStatus.INTERVIEWED
|
||||
})
|
64
app/repositories/session_repository.py
Normal file
64
app/repositories/session_repository.py
Normal file
@ -0,0 +1,64 @@
|
||||
from typing import Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from app.models.session import Session
|
||||
from app.repositories.base_repository import BaseRepository
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class SessionRepository(BaseRepository[Session]):
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(Session, session)
|
||||
|
||||
async def get_by_session_id(self, session_id: str) -> Optional[Session]:
|
||||
"""Get session by session_id"""
|
||||
statement = select(Session).where(
|
||||
Session.session_id == session_id,
|
||||
Session.is_active == True,
|
||||
Session.expires_at > datetime.utcnow()
|
||||
)
|
||||
result = await self.session.execute(statement)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def create_session(self, user_agent: Optional[str] = None, ip_address: Optional[str] = None) -> Session:
|
||||
"""Create a new session"""
|
||||
new_session = Session.create_new_session(user_agent=user_agent, ip_address=ip_address)
|
||||
return await self.create(new_session)
|
||||
|
||||
async def deactivate_session(self, session_id: str) -> bool:
|
||||
"""Deactivate session by session_id"""
|
||||
session = await self.get_by_session_id(session_id)
|
||||
if session:
|
||||
session.is_active = False
|
||||
session.updated_at = datetime.utcnow()
|
||||
self.session.add(session)
|
||||
await self.session.commit()
|
||||
await self.session.refresh(session)
|
||||
return True
|
||||
return False
|
||||
|
||||
async def update_last_activity(self, session_id: str) -> bool:
|
||||
"""Update last activity timestamp for session"""
|
||||
session = await self.get_by_session_id(session_id)
|
||||
if session:
|
||||
session.last_activity = datetime.utcnow()
|
||||
session.updated_at = datetime.utcnow()
|
||||
self.session.add(session)
|
||||
await self.session.commit()
|
||||
await self.session.refresh(session)
|
||||
return True
|
||||
return False
|
||||
|
||||
async def cleanup_expired_sessions(self) -> int:
|
||||
"""Remove expired sessions"""
|
||||
statement = select(Session).where(Session.expires_at < datetime.utcnow())
|
||||
result = await self.session.execute(statement)
|
||||
expired_sessions = result.scalars().all()
|
||||
|
||||
count = 0
|
||||
for session in expired_sessions:
|
||||
await self.session.delete(session)
|
||||
count += 1
|
||||
|
||||
await self.session.commit()
|
||||
return count
|
55
app/repositories/vacancy_repository.py
Normal file
55
app/repositories/vacancy_repository.py
Normal file
@ -0,0 +1,55 @@
|
||||
from typing import List, Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, and_
|
||||
from app.models.vacancy import Vacancy, VacancyCreate, VacancyUpdate
|
||||
from .base_repository import BaseRepository
|
||||
|
||||
|
||||
class VacancyRepository(BaseRepository[Vacancy]):
|
||||
def __init__(self, session: AsyncSession):
|
||||
super().__init__(Vacancy, session)
|
||||
|
||||
async def get_by_company(self, company_name: str) -> List[Vacancy]:
|
||||
statement = select(Vacancy).where(Vacancy.company_name == company_name)
|
||||
result = await self.session.execute(statement)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_by_area(self, area_name: str) -> List[Vacancy]:
|
||||
statement = select(Vacancy).where(Vacancy.area_name == area_name)
|
||||
result = await self.session.execute(statement)
|
||||
return result.scalars().all()
|
||||
|
||||
async def get_active_vacancies(self, skip: int = 0, limit: int = 100) -> List[Vacancy]:
|
||||
statement = (
|
||||
select(Vacancy)
|
||||
.where(Vacancy.is_archived == False)
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
)
|
||||
result = await self.session.execute(statement)
|
||||
return result.scalars().all()
|
||||
|
||||
async def search_vacancies(
|
||||
self,
|
||||
title: Optional[str] = None,
|
||||
company_name: Optional[str] = None,
|
||||
area_name: Optional[str] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[Vacancy]:
|
||||
conditions = []
|
||||
|
||||
if title:
|
||||
conditions.append(Vacancy.title.ilike(f"%{title}%"))
|
||||
if company_name:
|
||||
conditions.append(Vacancy.company_name.ilike(f"%{company_name}%"))
|
||||
if area_name:
|
||||
conditions.append(Vacancy.area_name.ilike(f"%{area_name}%"))
|
||||
|
||||
statement = select(Vacancy)
|
||||
if conditions:
|
||||
statement = statement.where(and_(*conditions))
|
||||
|
||||
statement = statement.offset(skip).limit(limit)
|
||||
result = await self.session.execute(statement)
|
||||
return result.scalars().all()
|
4
app/routers/__init__.py
Normal file
4
app/routers/__init__.py
Normal file
@ -0,0 +1,4 @@
|
||||
from .vacancy_router import router as vacancy_router
|
||||
from .resume_router import router as resume_router
|
||||
|
||||
__all__ = ["vacancy_router", "resume_router"]
|
194
app/routers/resume_router.py
Normal file
194
app/routers/resume_router.py
Normal file
@ -0,0 +1,194 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, UploadFile, File, Form, Request
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import List, Optional
|
||||
from app.core.session_middleware import get_current_session, get_db_session
|
||||
from app.models.resume import ResumeCreate, ResumeUpdate, ResumeRead, ResumeStatus
|
||||
from app.models.session import Session
|
||||
from app.services.resume_service import ResumeService
|
||||
from app.services.file_service import FileService
|
||||
|
||||
router = APIRouter(prefix="/resumes", tags=["resumes"])
|
||||
|
||||
|
||||
@router.post("/", response_model=ResumeRead)
|
||||
async def create_resume(
|
||||
request: Request,
|
||||
vacancy_id: int = Form(...),
|
||||
applicant_name: str = Form(...),
|
||||
applicant_email: str = Form(...),
|
||||
applicant_phone: Optional[str] = Form(None),
|
||||
cover_letter: Optional[str] = Form(None),
|
||||
resume_file: UploadFile = File(...),
|
||||
current_session: Session = Depends(get_current_session),
|
||||
db_session: AsyncSession = Depends(get_db_session)
|
||||
):
|
||||
if not current_session:
|
||||
raise HTTPException(status_code=401, detail="No active session")
|
||||
|
||||
file_service = FileService()
|
||||
resume_service = ResumeService(db_session)
|
||||
|
||||
resume_file_url = await file_service.upload_resume_file(resume_file)
|
||||
if not resume_file_url:
|
||||
raise HTTPException(status_code=400, detail="Failed to upload resume file")
|
||||
|
||||
resume_data = ResumeCreate(
|
||||
vacancy_id=vacancy_id,
|
||||
applicant_name=applicant_name,
|
||||
applicant_email=applicant_email,
|
||||
applicant_phone=applicant_phone,
|
||||
resume_file_url=resume_file_url,
|
||||
cover_letter=cover_letter
|
||||
)
|
||||
|
||||
return await resume_service.create_resume_with_session(resume_data, current_session.id)
|
||||
|
||||
|
||||
@router.get("/", response_model=List[ResumeRead])
|
||||
async def get_resumes(
|
||||
request: Request,
|
||||
skip: int = Query(0, ge=0),
|
||||
limit: int = Query(100, ge=1, le=1000),
|
||||
vacancy_id: Optional[int] = Query(None),
|
||||
status: Optional[ResumeStatus] = Query(None),
|
||||
current_session: Session = Depends(get_current_session),
|
||||
db_session: AsyncSession = Depends(get_db_session)
|
||||
):
|
||||
if not current_session:
|
||||
raise HTTPException(status_code=401, detail="No active session")
|
||||
|
||||
service = ResumeService(db_session)
|
||||
|
||||
# Получаем только резюме текущего пользователя
|
||||
if vacancy_id:
|
||||
return await service.get_resumes_by_vacancy_and_session(vacancy_id, current_session.id)
|
||||
|
||||
return await service.get_resumes_by_session(current_session.id, skip=skip, limit=limit)
|
||||
|
||||
|
||||
@router.get("/{resume_id}", response_model=ResumeRead)
|
||||
async def get_resume(
|
||||
request: Request,
|
||||
resume_id: int,
|
||||
current_session: Session = Depends(get_current_session),
|
||||
db_session: AsyncSession = Depends(get_db_session)
|
||||
):
|
||||
if not current_session:
|
||||
raise HTTPException(status_code=401, detail="No active session")
|
||||
|
||||
service = ResumeService(db_session)
|
||||
resume = await service.get_resume(resume_id)
|
||||
|
||||
if not resume:
|
||||
raise HTTPException(status_code=404, detail="Resume not found")
|
||||
|
||||
# Проверяем, что резюме принадлежит текущей сессии
|
||||
if resume.session_id != current_session.id:
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
return resume
|
||||
|
||||
|
||||
@router.put("/{resume_id}", response_model=ResumeRead)
|
||||
async def update_resume(
|
||||
request: Request,
|
||||
resume_id: int,
|
||||
resume: ResumeUpdate,
|
||||
current_session: Session = Depends(get_current_session),
|
||||
db_session: AsyncSession = Depends(get_db_session)
|
||||
):
|
||||
if not current_session:
|
||||
raise HTTPException(status_code=401, detail="No active session")
|
||||
|
||||
service = ResumeService(db_session)
|
||||
existing_resume = await service.get_resume(resume_id)
|
||||
|
||||
if not existing_resume:
|
||||
raise HTTPException(status_code=404, detail="Resume not found")
|
||||
|
||||
# Проверяем, что резюме принадлежит текущей сессии
|
||||
if existing_resume.session_id != current_session.id:
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
updated_resume = await service.update_resume(resume_id, resume)
|
||||
return updated_resume
|
||||
|
||||
|
||||
@router.patch("/{resume_id}/status", response_model=ResumeRead)
|
||||
async def update_resume_status(
|
||||
request: Request,
|
||||
resume_id: int,
|
||||
status: ResumeStatus,
|
||||
current_session: Session = Depends(get_current_session),
|
||||
db_session: AsyncSession = Depends(get_db_session)
|
||||
):
|
||||
if not current_session:
|
||||
raise HTTPException(status_code=401, detail="No active session")
|
||||
|
||||
service = ResumeService(db_session)
|
||||
existing_resume = await service.get_resume(resume_id)
|
||||
|
||||
if not existing_resume:
|
||||
raise HTTPException(status_code=404, detail="Resume not found")
|
||||
|
||||
# Проверяем, что резюме принадлежит текущей сессии
|
||||
if existing_resume.session_id != current_session.id:
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
updated_resume = await service.update_resume_status(resume_id, status)
|
||||
return updated_resume
|
||||
|
||||
|
||||
@router.post("/{resume_id}/interview-report")
|
||||
async def upload_interview_report(
|
||||
request: Request,
|
||||
resume_id: int,
|
||||
report_file: UploadFile = File(...),
|
||||
current_session: Session = Depends(get_current_session),
|
||||
db_session: AsyncSession = Depends(get_db_session)
|
||||
):
|
||||
if not current_session:
|
||||
raise HTTPException(status_code=401, detail="No active session")
|
||||
|
||||
file_service = FileService()
|
||||
resume_service = ResumeService(db_session)
|
||||
|
||||
existing_resume = await resume_service.get_resume(resume_id)
|
||||
if not existing_resume:
|
||||
raise HTTPException(status_code=404, detail="Resume not found")
|
||||
|
||||
# Проверяем, что резюме принадлежит текущей сессии
|
||||
if existing_resume.session_id != current_session.id:
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
report_url = await file_service.upload_interview_report(report_file)
|
||||
if not report_url:
|
||||
raise HTTPException(status_code=400, detail="Failed to upload interview report")
|
||||
|
||||
updated_resume = await resume_service.add_interview_report(resume_id, report_url)
|
||||
|
||||
return {"message": "Interview report uploaded successfully", "report_url": report_url}
|
||||
|
||||
|
||||
@router.delete("/{resume_id}")
|
||||
async def delete_resume(
|
||||
request: Request,
|
||||
resume_id: int,
|
||||
current_session: Session = Depends(get_current_session),
|
||||
db_session: AsyncSession = Depends(get_db_session)
|
||||
):
|
||||
if not current_session:
|
||||
raise HTTPException(status_code=401, detail="No active session")
|
||||
|
||||
service = ResumeService(db_session)
|
||||
existing_resume = await service.get_resume(resume_id)
|
||||
|
||||
if not existing_resume:
|
||||
raise HTTPException(status_code=404, detail="Resume not found")
|
||||
|
||||
# Проверяем, что резюме принадлежит текущей сессии
|
||||
if existing_resume.session_id != current_session.id:
|
||||
raise HTTPException(status_code=403, detail="Access denied")
|
||||
|
||||
success = await service.delete_resume(resume_id)
|
||||
return {"message": "Resume deleted successfully"}
|
92
app/routers/session_router.py
Normal file
92
app/routers/session_router.py
Normal file
@ -0,0 +1,92 @@
|
||||
from fastapi import APIRouter, Depends, Request, HTTPException
|
||||
from fastapi.responses import JSONResponse
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.core.session_middleware import get_current_session, get_db_session
|
||||
from app.repositories.session_repository import SessionRepository
|
||||
from app.models.session import Session, SessionRead
|
||||
from typing import Optional
|
||||
import logging
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(prefix="/sessions", tags=["Sessions"])
|
||||
|
||||
|
||||
@router.get("/current", response_model=SessionRead)
|
||||
async def get_current_session_info(
|
||||
request: Request,
|
||||
current_session: Session = Depends(get_current_session)
|
||||
):
|
||||
"""Получить информацию о текущей сессии"""
|
||||
if not current_session:
|
||||
raise HTTPException(status_code=401, detail="No active session")
|
||||
|
||||
return SessionRead(
|
||||
id=current_session.id,
|
||||
session_id=current_session.session_id,
|
||||
user_agent=current_session.user_agent,
|
||||
ip_address=current_session.ip_address,
|
||||
is_active=current_session.is_active,
|
||||
expires_at=current_session.expires_at,
|
||||
last_activity=current_session.last_activity,
|
||||
created_at=current_session.created_at,
|
||||
updated_at=current_session.updated_at
|
||||
)
|
||||
|
||||
|
||||
@router.post("/refresh")
|
||||
async def refresh_session(
|
||||
request: Request,
|
||||
current_session: Session = Depends(get_current_session),
|
||||
db_session: AsyncSession = Depends(get_db_session)
|
||||
):
|
||||
"""Продлить сессию на 30 дней"""
|
||||
if not current_session:
|
||||
raise HTTPException(status_code=401, detail="No active session")
|
||||
|
||||
session_repo = SessionRepository(db_session)
|
||||
current_session.extend_session(days=30)
|
||||
|
||||
db_session.add(current_session)
|
||||
db_session.commit()
|
||||
db_session.refresh(current_session)
|
||||
|
||||
logger.info(f"Extended session {current_session.session_id}")
|
||||
|
||||
return {
|
||||
"message": "Session extended successfully",
|
||||
"expires_at": current_session.expires_at,
|
||||
"session_id": current_session.session_id
|
||||
}
|
||||
|
||||
|
||||
@router.post("/logout")
|
||||
async def logout(
|
||||
request: Request,
|
||||
current_session: Session = Depends(get_current_session),
|
||||
db_session: AsyncSession = Depends(get_db_session)
|
||||
):
|
||||
"""Завершить текущую сессию"""
|
||||
if not current_session:
|
||||
raise HTTPException(status_code=401, detail="No active session")
|
||||
|
||||
session_repo = SessionRepository(db_session)
|
||||
deactivated = await session_repo.deactivate_session(current_session.session_id)
|
||||
|
||||
if deactivated:
|
||||
logger.info(f"Deactivated session {current_session.session_id}")
|
||||
response = JSONResponse(content={"message": "Logged out successfully"})
|
||||
response.delete_cookie("session_id")
|
||||
return response
|
||||
else:
|
||||
raise HTTPException(status_code=500, detail="Failed to logout")
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
async def session_health_check():
|
||||
"""Проверка работоспособности сессионного механизма"""
|
||||
return {
|
||||
"status": "healthy",
|
||||
"service": "session_management",
|
||||
"message": "Session management is working properly"
|
||||
}
|
93
app/routers/vacancy_router.py
Normal file
93
app/routers/vacancy_router.py
Normal file
@ -0,0 +1,93 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import List, Optional
|
||||
from app.core.database import get_session
|
||||
from app.models.vacancy import VacancyCreate, VacancyUpdate, VacancyRead
|
||||
from app.services.vacancy_service import VacancyService
|
||||
|
||||
router = APIRouter(prefix="/vacancies", tags=["vacancies"])
|
||||
|
||||
|
||||
@router.post("/", response_model=VacancyRead)
|
||||
async def create_vacancy(
|
||||
vacancy: VacancyCreate,
|
||||
session: AsyncSession = Depends(get_session)
|
||||
):
|
||||
service = VacancyService(session)
|
||||
return await service.create_vacancy(vacancy)
|
||||
|
||||
|
||||
@router.get("/", response_model=List[VacancyRead])
|
||||
async def get_vacancies(
|
||||
skip: int = Query(0, ge=0),
|
||||
limit: int = Query(100, ge=1, le=1000),
|
||||
active_only: bool = Query(False),
|
||||
title: Optional[str] = Query(None),
|
||||
company_name: Optional[str] = Query(None),
|
||||
area_name: Optional[str] = Query(None),
|
||||
session: AsyncSession = Depends(get_session)
|
||||
):
|
||||
service = VacancyService(session)
|
||||
|
||||
if any([title, company_name, area_name]):
|
||||
return await service.search_vacancies(
|
||||
title=title,
|
||||
company_name=company_name,
|
||||
area_name=area_name,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
if active_only:
|
||||
return await service.get_active_vacancies(skip=skip, limit=limit)
|
||||
|
||||
return await service.get_all_vacancies(skip=skip, limit=limit)
|
||||
|
||||
|
||||
@router.get("/{vacancy_id}", response_model=VacancyRead)
|
||||
async def get_vacancy(
|
||||
vacancy_id: int,
|
||||
session: AsyncSession = Depends(get_session)
|
||||
):
|
||||
service = VacancyService(session)
|
||||
vacancy = await service.get_vacancy(vacancy_id)
|
||||
if not vacancy:
|
||||
raise HTTPException(status_code=404, detail="Vacancy not found")
|
||||
return vacancy
|
||||
|
||||
|
||||
@router.put("/{vacancy_id}", response_model=VacancyRead)
|
||||
async def update_vacancy(
|
||||
vacancy_id: int,
|
||||
vacancy: VacancyUpdate,
|
||||
session: AsyncSession = Depends(get_session)
|
||||
):
|
||||
service = VacancyService(session)
|
||||
updated_vacancy = await service.update_vacancy(vacancy_id, vacancy)
|
||||
if not updated_vacancy:
|
||||
raise HTTPException(status_code=404, detail="Vacancy not found")
|
||||
return updated_vacancy
|
||||
|
||||
|
||||
@router.delete("/{vacancy_id}")
|
||||
async def delete_vacancy(
|
||||
vacancy_id: int,
|
||||
session: AsyncSession = Depends(get_session)
|
||||
):
|
||||
service = VacancyService(session)
|
||||
success = await service.delete_vacancy(vacancy_id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Vacancy not found")
|
||||
return {"message": "Vacancy deleted successfully"}
|
||||
|
||||
|
||||
@router.patch("/{vacancy_id}/archive", response_model=VacancyRead)
|
||||
async def archive_vacancy(
|
||||
vacancy_id: int,
|
||||
session: AsyncSession = Depends(get_session)
|
||||
):
|
||||
service = VacancyService(session)
|
||||
archived_vacancy = await service.archive_vacancy(vacancy_id)
|
||||
if not archived_vacancy:
|
||||
raise HTTPException(status_code=404, detail="Vacancy not found")
|
||||
return archived_vacancy
|
5
app/services/__init__.py
Normal file
5
app/services/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
from .vacancy_service import VacancyService
|
||||
from .resume_service import ResumeService
|
||||
from .file_service import FileService
|
||||
|
||||
__all__ = ["VacancyService", "ResumeService", "FileService"]
|
37
app/services/file_service.py
Normal file
37
app/services/file_service.py
Normal file
@ -0,0 +1,37 @@
|
||||
from fastapi import UploadFile
|
||||
from typing import Optional
|
||||
from app.core.s3 import s3_service
|
||||
|
||||
|
||||
class FileService:
|
||||
def __init__(self):
|
||||
self.s3_service = s3_service
|
||||
|
||||
async def upload_resume_file(self, file: UploadFile) -> Optional[str]:
|
||||
if not file.filename:
|
||||
return None
|
||||
|
||||
content = await file.read()
|
||||
content_type = file.content_type or "application/octet-stream"
|
||||
|
||||
return await self.s3_service.upload_file(
|
||||
file_content=content,
|
||||
file_name=file.filename,
|
||||
content_type=content_type
|
||||
)
|
||||
|
||||
async def upload_interview_report(self, file: UploadFile) -> Optional[str]:
|
||||
if not file.filename:
|
||||
return None
|
||||
|
||||
content = await file.read()
|
||||
content_type = file.content_type or "application/octet-stream"
|
||||
|
||||
return await self.s3_service.upload_file(
|
||||
file_content=content,
|
||||
file_name=f"interview_report_{file.filename}",
|
||||
content_type=content_type
|
||||
)
|
||||
|
||||
async def delete_file(self, file_url: str) -> bool:
|
||||
return await self.s3_service.delete_file(file_url)
|
53
app/services/resume_service.py
Normal file
53
app/services/resume_service.py
Normal file
@ -0,0 +1,53 @@
|
||||
from typing import List, Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.models.resume import Resume, ResumeCreate, ResumeUpdate, ResumeStatus
|
||||
from app.repositories.resume_repository import ResumeRepository
|
||||
|
||||
|
||||
class ResumeService:
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.repository = ResumeRepository(session)
|
||||
|
||||
async def create_resume(self, resume_data: ResumeCreate) -> Resume:
|
||||
resume = Resume.model_validate(resume_data)
|
||||
return await self.repository.create(resume)
|
||||
|
||||
async def create_resume_with_session(self, resume_data: ResumeCreate, session_id: int) -> Resume:
|
||||
"""Создать резюме с привязкой к сессии"""
|
||||
resume_dict = resume_data.model_dump()
|
||||
return await self.repository.create_with_session(resume_dict, session_id)
|
||||
|
||||
async def get_resume(self, resume_id: int) -> Optional[Resume]:
|
||||
return await self.repository.get(resume_id)
|
||||
|
||||
async def get_all_resumes(self, skip: int = 0, limit: int = 100) -> List[Resume]:
|
||||
return await self.repository.get_all(skip=skip, limit=limit)
|
||||
|
||||
async def get_resumes_by_vacancy(self, vacancy_id: int) -> List[Resume]:
|
||||
return await self.repository.get_by_vacancy_id(vacancy_id)
|
||||
|
||||
async def get_resumes_by_session(self, session_id: int, skip: int = 0, limit: int = 100) -> List[Resume]:
|
||||
"""Получить резюме пользователя по session_id"""
|
||||
return await self.repository.get_by_session_id(session_id)
|
||||
|
||||
async def get_resumes_by_vacancy_and_session(self, vacancy_id: int, session_id: int) -> List[Resume]:
|
||||
"""Получить резюме пользователя для конкретной вакансии"""
|
||||
return await self.repository.get_by_vacancy_and_session(vacancy_id, session_id)
|
||||
|
||||
async def get_resumes_by_status(self, status: ResumeStatus) -> List[Resume]:
|
||||
return await self.repository.get_by_status(status)
|
||||
|
||||
async def update_resume(self, resume_id: int, resume_data: ResumeUpdate) -> Optional[Resume]:
|
||||
update_data = resume_data.model_dump(exclude_unset=True)
|
||||
if not update_data:
|
||||
return await self.repository.get(resume_id)
|
||||
return await self.repository.update(resume_id, update_data)
|
||||
|
||||
async def delete_resume(self, resume_id: int) -> bool:
|
||||
return await self.repository.delete(resume_id)
|
||||
|
||||
async def update_resume_status(self, resume_id: int, status: ResumeStatus) -> Optional[Resume]:
|
||||
return await self.repository.update_status(resume_id, status)
|
||||
|
||||
async def add_interview_report(self, resume_id: int, report_url: str) -> Optional[Resume]:
|
||||
return await self.repository.add_interview_report(resume_id, report_url)
|
50
app/services/vacancy_service.py
Normal file
50
app/services/vacancy_service.py
Normal file
@ -0,0 +1,50 @@
|
||||
from typing import List, Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.models.vacancy import Vacancy, VacancyCreate, VacancyUpdate
|
||||
from app.repositories.vacancy_repository import VacancyRepository
|
||||
|
||||
|
||||
class VacancyService:
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.repository = VacancyRepository(session)
|
||||
|
||||
async def create_vacancy(self, vacancy_data: VacancyCreate) -> Vacancy:
|
||||
vacancy = Vacancy.model_validate(vacancy_data)
|
||||
return await self.repository.create(vacancy)
|
||||
|
||||
async def get_vacancy(self, vacancy_id: int) -> Optional[Vacancy]:
|
||||
return await self.repository.get(vacancy_id)
|
||||
|
||||
async def get_all_vacancies(self, skip: int = 0, limit: int = 100) -> List[Vacancy]:
|
||||
return await self.repository.get_all(skip=skip, limit=limit)
|
||||
|
||||
async def get_active_vacancies(self, skip: int = 0, limit: int = 100) -> List[Vacancy]:
|
||||
return await self.repository.get_active_vacancies(skip=skip, limit=limit)
|
||||
|
||||
async def update_vacancy(self, vacancy_id: int, vacancy_data: VacancyUpdate) -> Optional[Vacancy]:
|
||||
update_data = vacancy_data.model_dump(exclude_unset=True)
|
||||
if not update_data:
|
||||
return await self.repository.get(vacancy_id)
|
||||
return await self.repository.update(vacancy_id, update_data)
|
||||
|
||||
async def delete_vacancy(self, vacancy_id: int) -> bool:
|
||||
return await self.repository.delete(vacancy_id)
|
||||
|
||||
async def archive_vacancy(self, vacancy_id: int) -> Optional[Vacancy]:
|
||||
return await self.repository.update(vacancy_id, {"is_archived": True})
|
||||
|
||||
async def search_vacancies(
|
||||
self,
|
||||
title: Optional[str] = None,
|
||||
company_name: Optional[str] = None,
|
||||
area_name: Optional[str] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100
|
||||
) -> List[Vacancy]:
|
||||
return await self.repository.search_vacancies(
|
||||
title=title,
|
||||
company_name=company_name,
|
||||
area_name=area_name,
|
||||
skip=skip,
|
||||
limit=limit
|
||||
)
|
44
main.py
Normal file
44
main.py
Normal file
@ -0,0 +1,44 @@
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from contextlib import asynccontextmanager
|
||||
from app.core.session_middleware import SessionMiddleware
|
||||
from app.routers import vacancy_router, resume_router
|
||||
from app.routers.session_router import router as session_router
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
yield
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
title="HR AI Backend",
|
||||
description="Backend API for HR AI system with vacancies and resumes management",
|
||||
version="1.0.0",
|
||||
lifespan=lifespan
|
||||
)
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Добавляем middleware для управления сессиями (после CORS)
|
||||
app.add_middleware(SessionMiddleware, cookie_name="session_id")
|
||||
|
||||
app.include_router(vacancy_router, prefix="/api/v1")
|
||||
app.include_router(resume_router, prefix="/api/v1")
|
||||
app.include_router(session_router, prefix="/api/v1")
|
||||
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
return {"message": "HR AI Backend API", "version": "1.0.0"}
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
return {"status": "healthy"}
|
1
migrations/README
Normal file
1
migrations/README
Normal file
@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
91
migrations/env.py
Normal file
91
migrations/env.py
Normal file
@ -0,0 +1,91 @@
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
|
||||
from alembic import context
|
||||
from sqlmodel import SQLModel
|
||||
|
||||
from app.core.config import settings
|
||||
from app.models import *
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
target_metadata = SQLModel.metadata
|
||||
|
||||
# Set the database URL from settings
|
||||
config.set_main_option("sqlalchemy.url", settings.database_url)
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async def run_async_migrations():
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
def do_run_migrations(connection):
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
28
migrations/script.py.mako
Normal file
28
migrations/script.py.mako
Normal file
@ -0,0 +1,28 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
${downgrades if downgrades else "pass"}
|
59
migrations/versions/4e19b8fe4a88_add_session_id_to_resume.py
Normal file
59
migrations/versions/4e19b8fe4a88_add_session_id_to_resume.py
Normal file
@ -0,0 +1,59 @@
|
||||
"""add_session_id_to_resume
|
||||
|
||||
Revision ID: 4e19b8fe4a88
|
||||
Revises: ae966b3e742e
|
||||
Create Date: 2025-08-30 20:38:36.867781
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '4e19b8fe4a88'
|
||||
down_revision: Union[str, Sequence[str], None] = 'ae966b3e742e'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
# Сначала добавляем колонку как nullable
|
||||
op.add_column('resume', sa.Column('session_id', sa.Integer(), nullable=True))
|
||||
|
||||
# Создаем временную сессию для существующих резюме (если есть)
|
||||
op.execute("""
|
||||
INSERT INTO session (session_id, is_active, expires_at, last_activity, created_at, updated_at)
|
||||
SELECT
|
||||
'legacy-' || CAST(EXTRACT(epoch FROM NOW()) AS TEXT),
|
||||
true,
|
||||
NOW() + INTERVAL '30 days',
|
||||
NOW(),
|
||||
NOW(),
|
||||
NOW()
|
||||
WHERE NOT EXISTS (SELECT 1 FROM session LIMIT 1)
|
||||
""")
|
||||
|
||||
# Обновляем существующие резюме, привязывая их к первой сессии
|
||||
op.execute("""
|
||||
UPDATE resume
|
||||
SET session_id = (SELECT id FROM session ORDER BY id LIMIT 1)
|
||||
WHERE session_id IS NULL
|
||||
""")
|
||||
|
||||
# Теперь делаем колонку NOT NULL
|
||||
op.alter_column('resume', 'session_id', nullable=False)
|
||||
|
||||
op.create_foreign_key(None, 'resume', 'session', ['session_id'], ['id'])
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(None, 'resume', type_='foreignkey')
|
||||
op.drop_column('resume', 'session_id')
|
||||
# ### end Alembic commands ###
|
312
migrations/versions/7ffa784ab042_add_sample_vacancies.py
Normal file
312
migrations/versions/7ffa784ab042_add_sample_vacancies.py
Normal file
@ -0,0 +1,312 @@
|
||||
"""add_sample_vacancies
|
||||
|
||||
Revision ID: 7ffa784ab042
|
||||
Revises: a694f7c9e766
|
||||
Create Date: 2025-08-30 20:00:00.661534
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '7ffa784ab042'
|
||||
down_revision: Union[str, Sequence[str], None] = 'a694f7c9e766'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Add sample vacancies."""
|
||||
|
||||
# Create sample vacancies data
|
||||
vacancies_data = [
|
||||
{
|
||||
'title': 'Senior Python Developer',
|
||||
'description': '''Мы ищем опытного Python-разработчика для работы в команде разработки высоконагруженного веб-сервиса.
|
||||
|
||||
Обязанности:
|
||||
• Разработка и поддержка API на Python (FastAPI/Django)
|
||||
• Оптимизация производительности приложений
|
||||
• Проектирование архитектуры микросервисов
|
||||
• Код-ревью и менторство младших разработчиков
|
||||
• Участие в планировании и декомпозиции задач
|
||||
|
||||
Требования:
|
||||
• Опыт разработки на Python от 5 лет
|
||||
• Глубокие знания Django/FastAPI, SQLAlchemy, PostgreSQL
|
||||
• Опыт работы с Redis, RabbitMQ/Kafka
|
||||
• Знание Docker, Kubernetes
|
||||
• Опыт работы с микросервисной архитектурой
|
||||
• Понимание принципов SOLID, DRY, KISS
|
||||
|
||||
Будет плюсом:
|
||||
• Опыт работы с облачными сервисами (AWS/GCP)
|
||||
• Знание Go или Node.js
|
||||
• Опыт ведения технических интервью''',
|
||||
'key_skills': 'Python, FastAPI, Django, PostgreSQL, Redis, Docker, Kubernetes, Микросервисы, REST API, Git',
|
||||
'employment_type': 'FULL_TIME',
|
||||
'experience': 'MORE_THAN_6',
|
||||
'schedule': 'REMOTE',
|
||||
'salary_from': 250000,
|
||||
'salary_to': 400000,
|
||||
'salary_currency': 'RUR',
|
||||
'gross_salary': False,
|
||||
'company_name': 'TechCorp Solutions',
|
||||
'company_description': 'Компания-разработчик инновационных решений в области fintech. У нас работает более 500 специалистов, офисы в Москве и Санкт-Петербурге.',
|
||||
'area_name': 'Москва',
|
||||
'metro_stations': 'Сокольники, Красносельская',
|
||||
'address': 'г. Москва, ул. Русаковская, д. 13',
|
||||
'professional_roles': 'Программист, разработчик',
|
||||
'contacts_name': 'Анна Петрова',
|
||||
'contacts_email': 'hr@techcorp.ru',
|
||||
'contacts_phone': '+7 (495) 123-45-67',
|
||||
'is_archived': False,
|
||||
'premium': True,
|
||||
'url': 'https://techcorp.ru/careers/senior-python'
|
||||
},
|
||||
{
|
||||
'title': 'Frontend React Developer',
|
||||
'description': '''Приглашаем талантливого фронтенд-разработчика для создания современных веб-приложений.
|
||||
|
||||
Задачи:
|
||||
• Разработка пользовательских интерфейсов на React
|
||||
• Интеграция с REST API
|
||||
• Оптимизация производительности приложений
|
||||
• Написание unit-тестов
|
||||
• Участие в планировании UX/UI решений
|
||||
|
||||
Требования:
|
||||
• Опыт работы с React от 3 лет
|
||||
• Знание TypeScript, HTML5, CSS3, SASS/SCSS
|
||||
• Опыт работы с Redux/MobX
|
||||
• Знание современных инструментов сборки (Webpack, Vite)
|
||||
• Понимание принципов responsive design
|
||||
• Опыт работы с Git
|
||||
|
||||
Мы предлагаем:
|
||||
• Интересные задачи и современный стек технологий
|
||||
• Гибкий график работы
|
||||
• Медицинское страхование
|
||||
• Обучение за счет компании
|
||||
• Дружная команда профессионалов''',
|
||||
'key_skills': 'React, TypeScript, JavaScript, HTML5, CSS3, SASS, Redux, Webpack, Git, REST API',
|
||||
'employment_type': 'FULL_TIME',
|
||||
'experience': 'BETWEEN_3_AND_6',
|
||||
'schedule': 'FLEXIBLE',
|
||||
'salary_from': 150000,
|
||||
'salary_to': 250000,
|
||||
'salary_currency': 'RUR',
|
||||
'gross_salary': False,
|
||||
'company_name': 'Digital Agency Pro',
|
||||
'company_description': 'Креативное digital-агентство, специализирующееся на разработке веб-приложений и мобильных решений для крупных брендов.',
|
||||
'area_name': 'Санкт-Петербург',
|
||||
'metro_stations': 'Технологический институт, Пушкинская',
|
||||
'address': 'г. Санкт-Петербург, ул. Правды, д. 10',
|
||||
'professional_roles': 'Программист, разработчик',
|
||||
'contacts_name': 'Михаил Сидоров',
|
||||
'contacts_email': 'jobs@digitalagency.ru',
|
||||
'contacts_phone': '+7 (812) 987-65-43',
|
||||
'is_archived': False,
|
||||
'premium': False,
|
||||
'url': 'https://digitalagency.ru/vacancy/react-dev'
|
||||
},
|
||||
{
|
||||
'title': 'DevOps Engineer',
|
||||
'description': '''Ищем DevOps-инженера для автоматизации процессов CI/CD и управления облачной инфраструктурой.
|
||||
|
||||
Основные задачи:
|
||||
• Проектирование и поддержка CI/CD pipeline
|
||||
• Управление Kubernetes кластерами
|
||||
• Мониторинг и логирование приложений
|
||||
• Автоматизация deployment процессов
|
||||
• Обеспечение отказоустойчивости сервисов
|
||||
• Оптимизация затрат на инфраструктуру
|
||||
|
||||
Требования:
|
||||
• Опыт работы DevOps от 4 лет
|
||||
• Глубокие знания Docker, Kubernetes
|
||||
• Опыт работы с облачными платформами (AWS/Azure/GCP)
|
||||
• Знание Terraform, Ansible
|
||||
• Опыт с Jenkins, GitLab CI/CD
|
||||
• Знание мониторинга (Prometheus, Grafana, ELK)
|
||||
• Понимание сетевых технологий
|
||||
|
||||
Условия:
|
||||
• Официальное трудоустройство
|
||||
• Компенсация обучения и сертификации
|
||||
• Современное оборудование
|
||||
• Возможность работы из дома''',
|
||||
'key_skills': 'Docker, Kubernetes, AWS, Terraform, Ansible, Jenkins, GitLab CI/CD, Prometheus, Grafana, Linux',
|
||||
'employment_type': 'FULL_TIME',
|
||||
'experience': 'BETWEEN_3_AND_6',
|
||||
'schedule': 'REMOTE',
|
||||
'salary_from': 200000,
|
||||
'salary_to': 350000,
|
||||
'salary_currency': 'RUR',
|
||||
'gross_salary': False,
|
||||
'company_name': 'CloudTech Systems',
|
||||
'company_description': 'Системный интегратор, специализирующийся на внедрении облачных решений и автоматизации IT-процессов для корпоративных клиентов.',
|
||||
'area_name': 'Москва',
|
||||
'metro_stations': 'Белорусская, Маяковская',
|
||||
'address': 'г. Москва, Тверская ул., д. 25',
|
||||
'professional_roles': 'Системный администратор, DevOps',
|
||||
'contacts_name': 'Елена Васильева',
|
||||
'contacts_email': 'hr@cloudtech.ru',
|
||||
'contacts_phone': '+7 (495) 555-12-34',
|
||||
'is_archived': False,
|
||||
'premium': True,
|
||||
'url': 'https://cloudtech.ru/careers/devops'
|
||||
},
|
||||
{
|
||||
'title': 'Junior Java Developer',
|
||||
'description': '''Приглашаем начинающего Java-разработчика для участия в крупных enterprise-проектах.
|
||||
|
||||
Обязанности:
|
||||
• Разработка backend-сервисов на Java
|
||||
• Написание unit и integration тестов
|
||||
• Участие в code review
|
||||
• Изучение и применение лучших практик разработки
|
||||
• Работа в команде по Agile методологии
|
||||
|
||||
Требования:
|
||||
• Знание Java Core, ООП принципов
|
||||
• Базовое понимание Spring Framework
|
||||
• Опыт работы с SQL базами данных
|
||||
• Знание Git
|
||||
• Желание развиваться и изучать новые технологии
|
||||
• Понимание принципов REST API
|
||||
|
||||
Мы предлагаем:
|
||||
• Менторство от senior разработчиков
|
||||
• Обучающие курсы и конференции
|
||||
• Карьерный рост
|
||||
• Стабильную зарплату
|
||||
• Молодая и амбициозная команда
|
||||
• Интересные проекты в финтех сфере''',
|
||||
'key_skills': 'Java, Spring Framework, SQL, Git, REST API, JUnit, Maven, PostgreSQL',
|
||||
'employment_type': 'FULL_TIME',
|
||||
'experience': 'BETWEEN_1_AND_3',
|
||||
'schedule': 'FULL_DAY',
|
||||
'salary_from': 80000,
|
||||
'salary_to': 120000,
|
||||
'salary_currency': 'RUR',
|
||||
'gross_salary': False,
|
||||
'company_name': 'FinTech Innovations',
|
||||
'company_description': 'Быстро развивающийся стартап в области финансовых технологий. Создаем инновационные решения для банков и финансовых институтов.',
|
||||
'area_name': 'Екатеринбург',
|
||||
'metro_stations': 'Площадь 1905 года, Динамо',
|
||||
'address': 'г. Екатеринбург, ул. Ленина, д. 33',
|
||||
'professional_roles': 'Программист, разработчик',
|
||||
'contacts_name': 'Дмитрий Козлов',
|
||||
'contacts_email': 'recruitment@fintech-inn.ru',
|
||||
'contacts_phone': '+7 (343) 456-78-90',
|
||||
'is_archived': False,
|
||||
'premium': False,
|
||||
'url': 'https://fintech-inn.ru/jobs/java-junior'
|
||||
},
|
||||
{
|
||||
'title': 'Product Manager IT',
|
||||
'description': '''Ищем опытного продуктового менеджера для управления развитием digital-продуктов.
|
||||
|
||||
Основные задачи:
|
||||
• Управление продуктовой стратегией и roadmap
|
||||
• Анализ потребностей пользователей и рынка
|
||||
• Координация работы команд разработки
|
||||
• A/B тестирование и анализ метрик
|
||||
• Планирование релизов и feature delivery
|
||||
• Взаимодействие с stakeholders
|
||||
• Управление product backlog
|
||||
|
||||
Требования:
|
||||
• Опыт работы Product Manager от 4 лет
|
||||
• Знание методологий Agile/Scrum
|
||||
• Опыт работы с аналитическими системами
|
||||
• Понимание UX/UI принципов
|
||||
• Навыки работы с Jira, Confluence
|
||||
• Опыт проведения интервью с пользователями
|
||||
• Аналитическое мышление и data-driven подход
|
||||
|
||||
Что мы предлагаем:
|
||||
• Высокую степень влияния на продукт
|
||||
• Работу с топ-менеджментом компании
|
||||
• Современные инструменты и методики
|
||||
• Конкурентную заработную плату
|
||||
• Полный соц. пакет и ДМС''',
|
||||
'key_skills': 'Product Management, Agile, Scrum, Аналитика, UX/UI, Jira, A/B тестирование, User Research',
|
||||
'employment_type': 'FULL_TIME',
|
||||
'experience': 'BETWEEN_3_AND_6',
|
||||
'schedule': 'FLEXIBLE',
|
||||
'salary_from': 180000,
|
||||
'salary_to': 280000,
|
||||
'salary_currency': 'RUR',
|
||||
'gross_salary': False,
|
||||
'company_name': 'Marketplace Solutions',
|
||||
'company_description': 'Один из лидеров российского e-commerce рынка. Развиваем крупнейшую онлайн-платформу с миллионами пользователей.',
|
||||
'area_name': 'Москва',
|
||||
'metro_stations': 'Парк культуры, Сокольники',
|
||||
'address': 'г. Москва, Садовая-Триумфальная ул., д. 4/10',
|
||||
'professional_roles': 'Менеджер продукта, Product Manager',
|
||||
'contacts_name': 'Ольга Смирнова',
|
||||
'contacts_email': 'pm-jobs@marketplace.ru',
|
||||
'contacts_phone': '+7 (495) 777-88-99',
|
||||
'is_archived': False,
|
||||
'premium': True,
|
||||
'url': 'https://marketplace.ru/career/product-manager'
|
||||
}
|
||||
]
|
||||
|
||||
# Insert vacancies using raw SQL with proper enum casting
|
||||
for vacancy_data in vacancies_data:
|
||||
op.execute(f"""
|
||||
INSERT INTO vacancy (
|
||||
title, description, key_skills, employment_type, experience,
|
||||
schedule, salary_from, salary_to, salary_currency, gross_salary,
|
||||
company_name, company_description, area_name, metro_stations, address,
|
||||
professional_roles, contacts_name, contacts_email, contacts_phone,
|
||||
is_archived, premium, published_at, url, created_at, updated_at
|
||||
) VALUES (
|
||||
'{vacancy_data['title']}',
|
||||
'{vacancy_data['description'].replace("'", "''")}',
|
||||
'{vacancy_data['key_skills']}',
|
||||
'{vacancy_data['employment_type']}'::employmenttype,
|
||||
'{vacancy_data['experience']}'::experience,
|
||||
'{vacancy_data['schedule']}'::schedule,
|
||||
{vacancy_data['salary_from']},
|
||||
{vacancy_data['salary_to']},
|
||||
'{vacancy_data['salary_currency']}',
|
||||
{vacancy_data['gross_salary']},
|
||||
'{vacancy_data['company_name']}',
|
||||
'{vacancy_data['company_description'].replace("'", "''")}',
|
||||
'{vacancy_data['area_name']}',
|
||||
'{vacancy_data['metro_stations']}',
|
||||
'{vacancy_data['address']}',
|
||||
'{vacancy_data['professional_roles']}',
|
||||
'{vacancy_data['contacts_name']}',
|
||||
'{vacancy_data['contacts_email']}',
|
||||
'{vacancy_data['contacts_phone']}',
|
||||
{vacancy_data['is_archived']},
|
||||
{vacancy_data['premium']},
|
||||
NOW(),
|
||||
'{vacancy_data['url']}',
|
||||
NOW(),
|
||||
NOW()
|
||||
)
|
||||
""")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Remove sample vacancies."""
|
||||
# Remove the sample vacancies by their unique titles
|
||||
sample_titles = [
|
||||
'Senior Python Developer',
|
||||
'Frontend React Developer',
|
||||
'DevOps Engineer',
|
||||
'Junior Java Developer',
|
||||
'Product Manager IT'
|
||||
]
|
||||
|
||||
for title in sample_titles:
|
||||
op.execute(f"DELETE FROM vacancy WHERE title = '{title}'")
|
78
migrations/versions/a694f7c9e766_initial.py
Normal file
78
migrations/versions/a694f7c9e766_initial.py
Normal file
@ -0,0 +1,78 @@
|
||||
"""initial
|
||||
|
||||
Revision ID: a694f7c9e766
|
||||
Revises:
|
||||
Create Date: 2025-08-30 19:48:53.070679
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'a694f7c9e766'
|
||||
down_revision: Union[str, Sequence[str], None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('vacancy',
|
||||
sa.Column('title', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),
|
||||
sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('key_skills', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('employment_type', sa.Enum('FULL_TIME', 'PART_TIME', 'PROJECT', 'VOLUNTEER', 'PROBATION', name='employmenttype'), nullable=False),
|
||||
sa.Column('experience', sa.Enum('NO_EXPERIENCE', 'BETWEEN_1_AND_3', 'BETWEEN_3_AND_6', 'MORE_THAN_6', name='experience'), nullable=False),
|
||||
sa.Column('schedule', sa.Enum('FULL_DAY', 'SHIFT', 'FLEXIBLE', 'REMOTE', 'FLY_IN_FLY_OUT', name='schedule'), nullable=False),
|
||||
sa.Column('salary_from', sa.Integer(), nullable=True),
|
||||
sa.Column('salary_to', sa.Integer(), nullable=True),
|
||||
sa.Column('salary_currency', sqlmodel.sql.sqltypes.AutoString(length=3), nullable=True),
|
||||
sa.Column('gross_salary', sa.Boolean(), nullable=True),
|
||||
sa.Column('company_name', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),
|
||||
sa.Column('company_description', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('area_name', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),
|
||||
sa.Column('metro_stations', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('address', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('professional_roles', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('contacts_name', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=True),
|
||||
sa.Column('contacts_email', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=True),
|
||||
sa.Column('contacts_phone', sqlmodel.sql.sqltypes.AutoString(length=50), nullable=True),
|
||||
sa.Column('is_archived', sa.Boolean(), nullable=False),
|
||||
sa.Column('premium', sa.Boolean(), nullable=False),
|
||||
sa.Column('published_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('url', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('resume',
|
||||
sa.Column('vacancy_id', sa.Integer(), nullable=False),
|
||||
sa.Column('applicant_name', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),
|
||||
sa.Column('applicant_email', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),
|
||||
sa.Column('applicant_phone', sqlmodel.sql.sqltypes.AutoString(length=50), nullable=True),
|
||||
sa.Column('resume_file_url', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column('cover_letter', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('status', sa.Enum('PENDING', 'UNDER_REVIEW', 'INTERVIEW_SCHEDULED', 'INTERVIEWED', 'REJECTED', 'ACCEPTED', name='resumestatus'), nullable=False),
|
||||
sa.Column('interview_report_url', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('notes', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['vacancy_id'], ['vacancy.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('resume')
|
||||
op.drop_table('vacancy')
|
||||
# ### end Alembic commands ###
|
46
migrations/versions/ae966b3e742e_add_session_table.py
Normal file
46
migrations/versions/ae966b3e742e_add_session_table.py
Normal file
@ -0,0 +1,46 @@
|
||||
"""add_session_table
|
||||
|
||||
Revision ID: ae966b3e742e
|
||||
Revises: 7ffa784ab042
|
||||
Create Date: 2025-08-30 20:10:57.802953
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'ae966b3e742e'
|
||||
down_revision: Union[str, Sequence[str], None] = '7ffa784ab042'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('session',
|
||||
sa.Column('session_id', sqlmodel.sql.sqltypes.AutoString(length=255), nullable=False),
|
||||
sa.Column('user_agent', sqlmodel.sql.sqltypes.AutoString(length=512), nullable=True),
|
||||
sa.Column('ip_address', sqlmodel.sql.sqltypes.AutoString(length=45), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=False),
|
||||
sa.Column('expires_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('last_activity', sa.DateTime(), nullable=False),
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_session_session_id'), 'session', ['session_id'], unique=True)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_session_session_id'), table_name='session')
|
||||
op.drop_table('session')
|
||||
# ### end Alembic commands ###
|
48
pyproject.toml
Normal file
48
pyproject.toml
Normal file
@ -0,0 +1,48 @@
|
||||
[project]
|
||||
name = "hr-ai-backend"
|
||||
version = "0.1.0"
|
||||
description = "HR AI Backend with FastAPI"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"fastapi[standard]>=0.104.0",
|
||||
"uvicorn[standard]>=0.24.0",
|
||||
"sqlmodel>=0.0.14",
|
||||
"asyncpg>=0.29.0",
|
||||
"alembic>=1.13.0",
|
||||
"python-multipart>=0.0.6",
|
||||
"boto3>=1.34.0",
|
||||
"python-dotenv>=1.0.0",
|
||||
"pydantic-settings>=2.1.0",
|
||||
]
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["app"]
|
||||
|
||||
[tool.uv]
|
||||
dev-dependencies = [
|
||||
"pytest>=7.4.0",
|
||||
"pytest-asyncio>=0.21.0",
|
||||
"httpx>=0.25.0",
|
||||
"black>=23.0.0",
|
||||
"isort>=5.12.0",
|
||||
"flake8>=6.0.0",
|
||||
"mypy>=1.7.0",
|
||||
]
|
||||
|
||||
[tool.black]
|
||||
line-length = 88
|
||||
target-version = ['py311']
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
line_length = 88
|
||||
|
||||
[tool.mypy]
|
||||
python_version = "3.11"
|
||||
warn_return_any = true
|
||||
warn_unused_configs = true
|
||||
disallow_untyped_defs = true
|
Loading…
Reference in New Issue
Block a user