From 742045fbde1390164335ba82a88b3744ca0f6fad Mon Sep 17 00:00:00 2001 From: Oleksandr Markusyk Date: Sun, 18 Jan 2026 14:40:42 +0100 Subject: [PATCH] CRUD API, CLI, tests, and docker compose --- API.md | 122 ++++++++++++++++ Dockerfile | 15 ++ app/__init__.py | 0 app/__pycache__/__init__.cpython-314.pyc | Bin 0 -> 155 bytes app/__pycache__/db.cpython-314.pyc | Bin 0 -> 2367 bytes app/__pycache__/main.cpython-314.pyc | Bin 0 -> 8998 bytes app/__pycache__/schemas.cpython-314.pyc | Bin 0 -> 2657 bytes app/db.py | 42 ++++++ app/main.py | 135 ++++++++++++++++++ app/schemas.py | 35 +++++ cli.py | 79 ++++++++++ docker-compose.yml | 35 +++++ pytest.ini | 5 + requirements.txt | 9 ++ tests/__init__.py | 0 tests/__pycache__/__init__.cpython-314.pyc | Bin 0 -> 157 bytes .../conftest.cpython-314-pytest-8.3.4.pyc | Bin 0 -> 1671 bytes ...t_servers_api.cpython-314-pytest-8.3.4.pyc | Bin 0 -> 14138 bytes tests/conftest.py | 30 ++++ tests/test_servers_api.py | 67 +++++++++ 20 files changed, 574 insertions(+) create mode 100644 API.md create mode 100644 Dockerfile create mode 100644 app/__init__.py create mode 100644 app/__pycache__/__init__.cpython-314.pyc create mode 100644 app/__pycache__/db.cpython-314.pyc create mode 100644 app/__pycache__/main.cpython-314.pyc create mode 100644 app/__pycache__/schemas.cpython-314.pyc create mode 100644 app/db.py create mode 100644 app/main.py create mode 100644 app/schemas.py create mode 100644 cli.py create mode 100644 docker-compose.yml create mode 100644 pytest.ini create mode 100644 requirements.txt create mode 100644 tests/__init__.py create mode 100644 tests/__pycache__/__init__.cpython-314.pyc create mode 100644 tests/__pycache__/conftest.cpython-314-pytest-8.3.4.pyc create mode 100644 tests/__pycache__/test_servers_api.cpython-314-pytest-8.3.4.pyc create mode 100644 tests/conftest.py create mode 100644 tests/test_servers_api.py diff --git a/API.md b/API.md new file mode 100644 index 0000000..74e35c0 --- /dev/null +++ b/API.md @@ -0,0 +1,122 @@ +CRUD service for tracking the state of servers across multiple datacenters. + +The project is implemented as a small production-style backend service with: +- FastAPI +- PostgreSQL +- Raw SQL (psycopg) +- Docker Compose +- CLI tool +- pytest test suite (including Docker-based execution) + +--- + +## How to run the application + +### Requirements +- Docker +- Docker Compose + +From the project root directory: + +```bash +docker compose up --build + + +After startup the API will be available at: +API base URL: http://localhost:8000 +Swagger / OpenAPI UI: http://localhost:8000/docs + +A server has the following fields: + +{ + "id": 1, + "hostname": "srv-1", + "ip_address": "10.0.0.1", + "state": "active", + "datacenter": "berlin", + "created_at": "2026-01-18T12:17:01Z", + "updated_at": "2026-01-18T12:17:01Z" +} + +Validation rules: +hostname must be unique +ip_address must be a valid IPv4 or IPv6 address + +state must be one of: +active +offline +retired + +API endpoints +Create a server +curl -X POST http://localhost:8000/servers \ + -H 'Content-Type: application/json' \ + -d '{ + "hostname": "srv-1", + "ip_address": "10.0.0.1", + "state": "active", + "datacenter": "berlin" + }' + +List all servers +curl http://localhost:8000/servers + +Get server by id +curl http://localhost:8000/servers/1 + +Update a server +curl -X PUT http://localhost:8000/servers/1 \ + -H 'Content-Type: application/json' \ + -d '{ + "state": "offline" + }' + +Delete a server +curl -X DELETE http://localhost:8000/servers/1 + +The CLI communicates with the HTTP API. + +Optionally set the API URL: + +export API_URL=http://localhost:8000 + + +Examples: + +python cli.py create srv-2 10.0.0.2 active --datacenter ber +python cli.py list +python cli.py get 1 +python cli.py update 1 --state retired +python cli.py delete 1 + +Running tests (Docker) + +Tests can be executed entirely inside Docker, using the same image and dependencies +as the API service. + +Start PostgreSQL +docker compose up -d db + +Run tests in a container +docker compose run --rm tests + +Expected output: + +6 passed + +Notes about test execution: +Tests use the same PostgreSQL service defined in docker-compose.yml +The test suite truncates the servers table between tests +During negative tests (e.g. duplicate hostname), PostgreSQL logs +unique constraint violation errors — this is expected and handled by the API +Running tests locally (optional) + +If you prefer to run tests outside Docker: +export DATABASE_URL=postgresql://inventory:inventory@localhost:5432/inventory +pip install -r requirements.txt +pytest -q + +Notes: +Database schema is initialized automatically on application startup +All database operations are implemented using raw SQL (no ORM) +The CLI is a thin client that communicates with the API over HTTP diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..f107ce8 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,15 @@ +FROM python:3.12-slim + +WORKDIR /app + +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 + +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +EXPOSE 8000 +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] + diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/__pycache__/__init__.cpython-314.pyc b/app/__pycache__/__init__.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..82ac119a952fe6f09f3351aa8a9e2f974addcd22 GIT binary patch literal 155 zcmdPq_I|p@<2{{|u76WuYHhoLW?@ zpP!SOU7V}$lAm0fo0?ZrtRIwGkZ+`)oROH5lbDyDYN(%BP@o?lpP83g5+AQuPfjjbZlq8cN5rxpqqp^K{QzfSe1s-@>!FZ#!6TgXA_08!E$UTvGCg4u{R_= ztxyjQf-Z+v%7J4qz4X*ekNE>tKd2hHLajtE+?FO%D%wNe%&u98P^&)6GjHD4%=?%( z!=0%(0w{fJnS~C79+O1?u%@te8>Sk%fn-KP9+_E*8D~AVkKmldjdyq*6lbK67Xn&` z6dsRwkv=qs3VT{sUYPok9FoH$Y?I{0=2$s$0Ljq<$l5c~lJnwn3}|se^Aa+17-bVr z2rr*y{7%iWJ!8cytG2pmV4y@L&%mnZP+T`PPr=SDKRG58g^R+Js7%X~AH7>~TyGH@ z?uXXtT+XzY4I30zPd~@rvm8yemO$$C$&t5Cv_&)qHYPs-uJv!_0jZ%Gbc228V}x{| z_n@oH5+gGuB1fUrauOjD-raCC)28Hzi|i93?}xGBRk5ArFj4X&6?av0DvN%6)UjJ zkVb|8LVz5Gf2#wIGl2TQKsySB=TQj65$Z>!=MgD|1GUQCSGF_I6tg8<+ADdF>ysRDkQ&t6g!qRuWC2_ zkY+iq0oFQ*B271s$(jA=qTwloh!4Ut0l#}4h9?mG-xKNjg*z7>oc((CVd8vktieV? z`!_q&cjBATWIeH#xR+|gQ7l!Duf;!2{<$4P$^F}i4RsQBYAtoQYcrZ^gn;l@BifR? zm;Bou1pBx9FCylPeL@0#lLl-f(JKsb8$+Bhj5dbT!Z~i^90&B5ZVkyQxj9=sFueNK z$Sl1B9FBqrCJE1fiLwl)HA#z{dQIz;4#a5}(g^l;&|aqZDH-nXqZ20O@KS{mgY*P{ zPz@PXP>EQj5ajy|qcByr$|)$IRG1m2K=gJ&z<-4XJHr-8T6e(=8fop<2d$KRBQ3kH zT9bMmMkL$>79oB;$b^>L3pGo=$xKj{|0YH1zA2Yw)gNH{I1ex?i$YQ4p*K#7ymX1r zUn%n9tTa`e;-QBuLmT0Ts36aqI)71me@c`kVUjP%(q%!u#!raXj<)!uTiR;b;EUpH zks8WRPfqgFdFkr37zmjaMb&laye=>0#kN?$cS#3&*+Rh5RZrERj~STq1G0tE)+j#?>{D95~e4MyzSf?$_7f+3RwHUN~2fuQ$<;j^2H!F3O%l(Eg9NP zeso+Mo#2O>9!Pn9Xb^6@xoiv`M#9vH=lphmkcO}Bq zSCm#Vnj7+0^1%t3+kp|1%sf03WC6n!4^K&4kn0;Gl-Y$_40jc)g#c)Fgso4N08ruNpauU)Ut-JM(4Hj`cTiM5G! z?qRZLheMs+FvRz*WqwU%>T}QNqMrFA^UNIpYoEU^90`4u7Sgc|6a{o6oe&068@&`i zk`UgGWs^SZxPBbE7?5Fk<&?v+xLRaey@lwsR2L47L9cJmls2 literal 0 HcmV?d00001 diff --git a/app/__pycache__/main.cpython-314.pyc b/app/__pycache__/main.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..35ec7eb50d6320d311c382ee3a3d0674445e36c1 GIT binary patch literal 8998 zcmc&(TWlLwdOpJ$lEb?s>S9rnHI{5ybQDRpmH4J4wJx^gi^Ps(Ch#?w+9t{j@uA%Yqz~JR6 zDXB&iag7%xC7FmTl4eoVu$of*Ce7V{QJVUGFq(*kDUIh$Jo-*b$~6E9e?d}1FiBjq zM&nU66glT-HRqTlUy|g0SqiI?X47k*+e9NV3wE zh)Lg9rn?3bQ>kewt}0!kluYdFnz|T{#lrCm(%vp$(ls59#ygW&G-oIjj>i+Q2~sE| zR|6*p{1q=`v*amZ8T(_*Zd>W&2{aS>Dp7%0OWb8L zYsOM5d5MiOer5#T1Sf+>B|PYBw8=QL#>b-P;e^6*xelr*^NI(uN5Fl4w5q2O^p#@U3a;GQr`nas^a0R!+RDS!78~$#J+WTqHWB$}Z@E7X+}>b-|!%8Z=v`@ohhtO;3%4)$5hzU zlRY$OXA9edBw#ki#)%ywYbf>|qO zJ2Be@nV+Larm>1DYb?mSW{WBbUC`hh8-S$nqB=rw=pi)`N~Ba7@tvM$2nw@g-BG>f za9>k@bZD)zdj9OE&FkLUd1ayZS9R?xUjLfAa(?%ZVr$;oTL*3&Sm<5xwnKUTyg7auaw5X$t1VG{vS& zBB&LS>B;U!Gbz{BN(Ax(*_|DK0p@HZQByN%0%XVEY4!-)M53_!rQAejtFcI-j~2*8 zZFUM`ArXM~M&LbBZ?uUfkrg>Z`(I=xau!WlYO`o5q>g51@mpv9)-FIA9vK@H$Kf$P zDrDtFXg9#5L&$?khd{xkL&$@PpK=HuJ#}VqOlaSwbO`ys-X{)@pAkofM~(~8NQdwp z*>(tdjOq~b7OKNh(^tK5be3L`~8%?txm5(PqyKQF0M7ZY(X zpRgaVw~Puv<{|T&Ka-QDmx zyVP?!?H*a;N7fzn8^i=_{10rtH=1@2Eb#*wGd4X_PQkl;P}RSK{J6f~W&Z1?a59FX=_5jNw z(*(tpLf>J!ahuzUdBdbo&*~N=HBz#0t#Pp^D7dYRlb2vh1uZbdYdf>=F=jU-w-0@q zphX?1p+A>$mPrimBn9g42GdNC`7L1@fxV1n5+nxX0#KSZvqq8t^cCa`mojydl<62i zF+gV?&U4T?Vmb+cY$Mx&D#?`t&2nw6Kn)NJBcgEc|L~CNH_cq4*fBPEYOsI&_kl%} z-7#_Wv;ms5VN@I#6otMwVTLSN0V?3J%XnMlmm$l72>2TmM4Q)^Do z+{x=F7udA35sj+e8@->rdTrpJJGxiAd+!e1dFSTIJIs$q?hY(__W~fe8?H|RAlR!v z^8M!5JgMGC+vX;pUDKQAd~?2!e9shIz^CguDfIq$cdu)N0(&G%n3 zLwS*N^i{hSJv_85wmACs+h(c_)2!=HbqEYaBkvbUHV3S!WdANs$K9;cVaqW$M4OGq zx0Y<&+nn3sWOhC#klk+0;Nvqttvzprp>WUz#|`0T@~($rf0VJPevdZTex}F{+QivN zazQR<$AEA*7T9WFyMTpJU|*npNmGf%#EI}Lv6xEQwz7H(ET)o?BDd98q|hxUzj@~J z|A~0NJ~Su}f@}+~fT5!c_@kR}+|MfXizYyk~J4j!k9kzD=EmnyX-Gaq@8=#s%ma^-01kE^IHEyZ|!<*>pga< zcO+dqx>PZ`?%lqitaw|WItmfsq09HYv zm3KRJwlKdvmZ7zufYXKbzXPWWpSrkboeUVIhh6B)ya7v_C7mP*ni8}o6QJG2nFNb$ zM2(9hN)k97aDy;W=cNoqd7cHMm_TlObWWH{plrT%PE)~Xw%n>Fa^L~8m*H-VH=eL| zCV*CONs=HG`mL*A3r3e+MR}f^xsr4pWt_=PPatQ_Y;po+cowYQR05_LkE2OM_sg0G zQw)O#FrZhGg1HlM64Abo1%NKUBLx;5IQcnOcM}C;4w?fwr#C+OFEV5(A2FHaN zQ-?6~gF^osc*Ya&w(FjKkuIcs{s6wdJPg|=pTMjOGc*R}{g~mk@>f!iU9&*UUILxF z13M7Z_1GmqPd0#wmbAwGNg^6A(Z>+{P9pneo=XqT>Q=i_V@1JH#hUM_I zQdVK(pf>8`8y34u4_z2)8+DEdf8}#H6;Rt2;;xuIw${)zd+ckgd(~REY_0ps+PcQO z=Umra^NBRyW|S|i*0wFzwx#*qYe-qYY_0#wx+~keRJ%LPw`VIirxtdv`Z|_<9V@jj zruj~w|M2~*?|)?#3|_G`pPS>tt?3)ntF`{+T7R0~vlZ`oSVsGDZF`#cf8(gk#sY@Z z?h{M=i414**uJ*8=IXE4&%eB4^DXfW8NxQ2GiEalW;lu2@!{#Kr|11?t}(+owhnE4 zR&(dlUo@pFx(cEe59@sEzTNlwm;Ojh`^J{4$8uo|$ChCX_hw-X2G>&*#$d7OnKB9| z_Osrq`PpCkJdodIXo#xFI6@su?TO#T-;oDlG{l*y{ex!KW*=342>m(l#>(_Q zCXk`w1CP(%ptViBo44uR#TfJdvv-59S(4r&2j74XgNJz;F74i+kx9Aqj9Hs@vCoWUw2PvWHi)2Ix;>uG7gsYF(fE z!1!bJXO>mr@Un0??KrZ;9eHYlPVh=-{~SajT3`X-k2IOT*xP5f-4}Q$F4`S^KG$Lm zEjKy(UbKz)1^E7@e+Q36&&eoon)N(<1`j8r^uuxKCb^T!m8nGXLZ>9l30a|mUaBLg z(&>zx)3+NhjOG9})TvBel%~T<{*#aHkDxh4KjhM&kH#h8Yn@h+J%PQUesK^~@Bj@2 z%V;IZuVU7Z8G3sfn}knjs6a5Tjk_@!i;a2@N*T8Kf`2bIn#!;byw@I z`R)vSUjLV)vvn&>&xQ#iq)7Hs21^-VmT1Y=e9wjyhc`?RVnK={87yVCm!Uv^EK7pL oOprk$-&Nm|v*n&;wfn?!_la5G3UhM91c-)|r>kx@o8gAAESek` zF=*Q{%oVt{!#N8+X>dP=R(Ysv!;grW^3~X9ak&=ypT^blxtiOk`Vo)ImS3;ES$0>P zFm$3D{u^bdUN6V)ieGi&sro8QLvLve5b*$*ugOM!uz6vlFx;F4b9$d5OS_-^a z+8=^(dgGSo>JX`sMQxlc(WQSZI-|&zgs0GFDd1DLefl+Wk|HOeiSgy;HNw-DBIBG( z;(V6*5a^O3GSwimLqEE~S3oH$m1@kMT8y zxl)Tc?yJZIb=ygJ6Diz3aCr`QT-X_(l{BHooG}c1vJJRUcu9S^#;Oh%dE0hFCyu?q z<*@%a_>m?n;v7Ja6Gb%~5?C7PjnY(a6#^2U0`vDbafu8A? z8<7i?zSQw=zl`gLp>3~Ib`)x2+@^QsYq24a(#UmVSYh2w@~}Aa@E-Y?EHE3n$} z<3D-#;`?J~n-~AWC4NPcMPlzhLUCzW+Dl;i2rWJ%)S{A6Wd;#}0%x7VS*P5PtRlb9 zSOqq>Sxp108DKSQ(Oy=s)?thN`I5?z5;lwmkz^xacIWb2sCgcj$m0&?x||}b%8t4& zw^*m{sOxep8Q)RY;g}tS<&-prG&0PH&j>9kYyur}-+Fjt$H3dgFbO~(hRAF@3#NnN z(4F}EYD-(_pt%3;=!3nJEp2L>$Ksv$A7sZ`+T;tN%)J20!?@ltG>9jgM1w%GSJ0He z^a6MSooM8WUk8x@cxd9MbN4R)a`OJ6g=S^r)v4yijfu(T`G;f2c1ID1=tB{Mg{6&S z_MvznK@l$+36~-hp}cSaEwn{ab*_w;0hC&hCC&xF%1S0Pe_xu5rX%8(uTI9 z(TCXq=onuIlVE0MZ_Orz6$Wly>asOZytUA!D#?48LpC$pdGF|E79BYvTjo(0q<*>w zbWc7scSZ02U{;1TGBk3bUNtNBu1Xg+-8Fw9Xx=d1M~x*-p! zFzc(H6Tz1O!}~bN#_~pyCs)Q(W>rbHoELLz@)q(cF;6GoB=0c1CombyllnxZls+PZ vkH~>P$aITLZ|N%Czc#VPw+M)>oJP;lwNqOJpIe3ge+DhCrT--GkQ4n2y7VIp literal 0 HcmV?d00001 diff --git a/app/db.py b/app/db.py new file mode 100644 index 0000000..778f2c2 --- /dev/null +++ b/app/db.py @@ -0,0 +1,42 @@ +import os +from contextlib import contextmanager +from typing import Iterator + +import psycopg +from psycopg.rows import dict_row + +DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://inventory:inventory@localhost:5432/inventory") + + +@contextmanager +def get_conn() -> Iterator[psycopg.Connection]: + conn = psycopg.connect(DATABASE_URL, row_factory=dict_row) + try: + yield conn + conn.commit() + except Exception: + conn.rollback() + raise + finally: + conn.close() + + +def init_db() -> None: + ddl = """ + CREATE TABLE IF NOT EXISTS servers ( + id BIGSERIAL PRIMARY KEY, + hostname TEXT NOT NULL UNIQUE, + ip_address INET NOT NULL, + state TEXT NOT NULL, + datacenter TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT now(), + CONSTRAINT servers_state_check CHECK (state IN ('active', 'offline', 'retired')) + ); + + CREATE INDEX IF NOT EXISTS idx_servers_state ON servers(state); + """ + with get_conn() as conn: + with conn.cursor() as cur: + cur.execute(ddl) + diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..f8aeeca --- /dev/null +++ b/app/main.py @@ -0,0 +1,135 @@ +from contextlib import asynccontextmanager + +from fastapi import FastAPI, HTTPException, Response, status +from psycopg.errors import CheckViolation, UniqueViolation + +from app.db import get_conn, init_db +from app.schemas import ServerCreate, ServerOut, ServerUpdate + + +@asynccontextmanager +async def lifespan(app: FastAPI): + # Initialize DB schema on startup + init_db() + yield + # Nothing to cleanup here (connections are per-request) + + +app = FastAPI(title="Server Inventory API", version="1.0.0", lifespan=lifespan) + + +def _row_to_out(row: dict) -> ServerOut: + return ServerOut( + id=row["id"], + hostname=row["hostname"], + ip_address=str(row["ip_address"]), + state=row["state"], + datacenter=row.get("datacenter"), + created_at=row["created_at"].isoformat(), + updated_at=row["updated_at"].isoformat(), + ) + +@app.post("/servers", response_model=ServerOut, status_code=status.HTTP_201_CREATED) +def create_server(payload: ServerCreate) -> ServerOut: + sql = """ + INSERT INTO servers (hostname, ip_address, state, datacenter) + VALUES (%s, %s, %s, %s) + RETURNING id, hostname, ip_address, state, datacenter, created_at, updated_at; + """ + try: + with get_conn() as conn: + with conn.cursor() as cur: + cur.execute(sql, (payload.hostname, str(payload.ip_address), payload.state.value, payload.datacenter)) + row = cur.fetchone() + return _row_to_out(row) + except UniqueViolation: + raise HTTPException(status_code=409, detail="hostname must be unique") + except CheckViolation: + # should be prevented by pydantic Enum, but keep DB safety + raise HTTPException(status_code=422, detail="invalid state") + + +@app.get("/servers", response_model=list[ServerOut]) +def list_servers() -> list[ServerOut]: + sql = """ + SELECT id, hostname, ip_address, state, datacenter, created_at, updated_at + FROM servers + ORDER BY id; + """ + with get_conn() as conn: + with conn.cursor() as cur: + cur.execute(sql) + rows = cur.fetchall() + return [_row_to_out(r) for r in rows] + + +@app.get("/servers/{server_id}", response_model=ServerOut) +def get_server(server_id: int) -> ServerOut: + sql = """ + SELECT id, hostname, ip_address, state, datacenter, created_at, updated_at + FROM servers + WHERE id = %s; + """ + with get_conn() as conn: + with conn.cursor() as cur: + cur.execute(sql, (server_id,)) + row = cur.fetchone() + if not row: + raise HTTPException(status_code=404, detail="server not found") + return _row_to_out(row) + + +@app.put("/servers/{server_id}", response_model=ServerOut) +def update_server(server_id: int, payload: ServerUpdate) -> ServerOut: + # Build dynamic raw SQL safely with whitelist + parameter list + fields = [] + params: list[object] = [] + + if payload.hostname is not None: + fields.append("hostname = %s") + params.append(payload.hostname) + if payload.ip_address is not None: + fields.append("ip_address = %s") + params.append(str(payload.ip_address)) + if payload.state is not None: + fields.append("state = %s") + params.append(payload.state.value) + if payload.datacenter is not None: + fields.append("datacenter = %s") + params.append(payload.datacenter) + + if not fields: + raise HTTPException(status_code=400, detail="no fields to update") + + sql = f""" + UPDATE servers + SET {", ".join(fields)}, updated_at = now() + WHERE id = %s + RETURNING id, hostname, ip_address, state, datacenter, created_at, updated_at; + """ + params.append(server_id) + + try: + with get_conn() as conn: + with conn.cursor() as cur: + cur.execute(sql, tuple(params)) + row = cur.fetchone() + if not row: + raise HTTPException(status_code=404, detail="server not found") + return _row_to_out(row) + except UniqueViolation: + raise HTTPException(status_code=409, detail="hostname must be unique") + except CheckViolation: + raise HTTPException(status_code=422, detail="invalid state") + + +@app.delete("/servers/{server_id}", status_code=status.HTTP_204_NO_CONTENT) +def delete_server(server_id: int) -> Response: + sql = "DELETE FROM servers WHERE id = %s;" + with get_conn() as conn: + with conn.cursor() as cur: + cur.execute(sql, (server_id,)) + if cur.rowcount == 0: + raise HTTPException(status_code=404, detail="server not found") + return Response(status_code=status.HTTP_204_NO_CONTENT) + diff --git a/app/schemas.py b/app/schemas.py new file mode 100644 index 0000000..080aec1 --- /dev/null +++ b/app/schemas.py @@ -0,0 +1,35 @@ +from enum import Enum +from typing import Optional + +from pydantic import BaseModel, Field, IPvAnyAddress + + +class ServerState(str, Enum): + active = "active" + offline = "offline" + retired = "retired" + + +class ServerCreate(BaseModel): + hostname: str = Field(min_length=1, max_length=255) + ip_address: IPvAnyAddress + state: ServerState + datacenter: Optional[str] = Field(default=None, max_length=255) + + +class ServerUpdate(BaseModel): + hostname: Optional[str] = Field(default=None, min_length=1, max_length=255) + ip_address: Optional[IPvAnyAddress] = None + state: Optional[ServerState] = None + datacenter: Optional[str] = Field(default=None, max_length=255) + + +class ServerOut(BaseModel): + id: int + hostname: str + ip_address: str + state: ServerState + datacenter: Optional[str] = None + created_at: str + updated_at: str + diff --git a/cli.py b/cli.py new file mode 100644 index 0000000..f235c80 --- /dev/null +++ b/cli.py @@ -0,0 +1,79 @@ +import os +from typing import Optional + +import requests +import typer + +app = typer.Typer(help="Server Inventory CLI (talks to the REST API)") + +API_URL = os.getenv("API_URL", "http://localhost:8000") + + +def _handle(resp: requests.Response): + if resp.status_code >= 400: + try: + detail = resp.json() + except Exception: + detail = resp.text + raise typer.Exit(code=1) from Exception(f"HTTP {resp.status_code}: {detail}") + if resp.status_code == 204: + return None + return resp.json() + + +@app.command() +def create(hostname: str, ip: str, state: str, datacenter: Optional[str] = None): + """Create a server.""" + payload = {"hostname": hostname, "ip_address": ip, "state": state, "datacenter": datacenter} + r = requests.post(f"{API_URL}/servers", json=payload, timeout=10) + typer.echo(_handle(r)) + + +@app.command("list") +def list_cmd(): + """List all servers.""" + r = requests.get(f"{API_URL}/servers", timeout=10) + typer.echo(_handle(r)) + + +@app.command() +def get(id: int): + """Get one server by id.""" + r = requests.get(f"{API_URL}/servers/{id}", timeout=10) + typer.echo(_handle(r)) + + +@app.command() +def update( + id: int, + hostname: Optional[str] = None, + ip: Optional[str] = None, + state: Optional[str] = None, + datacenter: Optional[str] = None, +): + """Update a server (only provided fields are changed).""" + payload = {} + if hostname is not None: + payload["hostname"] = hostname + if ip is not None: + payload["ip_address"] = ip + if state is not None: + payload["state"] = state + if datacenter is not None: + payload["datacenter"] = datacenter + + r = requests.put(f"{API_URL}/servers/{id}", json=payload, timeout=10) + typer.echo(_handle(r)) + + +@app.command() +def delete(id: int): + """Delete a server by id.""" + r = requests.delete(f"{API_URL}/servers/{id}", timeout=10) + _handle(r) + typer.echo("deleted") + + +if __name__ == "__main__": + app() + diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..812e3ae --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,35 @@ +services: + db: + image: postgres:16 + environment: + POSTGRES_DB: inventory + POSTGRES_USER: inventory + POSTGRES_PASSWORD: inventory + ports: + - "5432:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U inventory -d inventory"] + interval: 2s + timeout: 3s + retries: 30 + + api: + build: . + environment: + DATABASE_URL: postgresql://inventory:inventory@db:5432/inventory + APP_ENV: dev + ports: + - "8000:8000" + depends_on: + db: + condition: service_healthy + + tests: + build: . + environment: + DATABASE_URL: postgresql://inventory:inventory@db:5432/inventory + depends_on: + db: + condition: service_healthy + command: ["pytest", "-q"] + diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..3e3a32b --- /dev/null +++ b/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +testpaths = tests +filterwarnings = + ignore:'asyncio\.iscoroutinefunction' is deprecated:DeprecationWarning + diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..79a1e26 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,9 @@ +fastapi==0.115.6 +uvicorn[standard]==0.32.1 +psycopg[binary]==3.2.13 +typer==0.15.1 +requests==2.32.3 + +pytest==8.3.4 +httpx==0.28.1 + diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/__pycache__/__init__.cpython-314.pyc b/tests/__pycache__/__init__.cpython-314.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6373af27d42a7df95527afdef3da36935b15106d GIT binary patch literal 157 zcmdPq_I|p@<2{{|u76Wu+floLW?@ zpP!SOU7V}$lAm0fo0?ZrtRIwGkZ+`)oROH5lbDyDYN%h5S_~A9kI&4@EQycTE2zB1 fVUwGmQks)$SHuc53S?O^i1CS;k&&^88OQo1|^B#BMXXnS#w)m(rwcb(4TiLkOx<7)>TrRY|oelBau> zC4KU`u~Am=7#qvuUi-5DA^*S#G!I2kHrQbF&6zg}qYpb*mR+Z0+ZFfR^Lg(*=lrhD zXVMA4vGS8;HU$8k3%wBynNF4fG~okKNDUYXR-r0-LJq1C7R_}n5|aW;jKy#jYH~Hs z;(<-9C8|l5tfp8hkR!EpHN!GFSe2G0dxyPR&wvtDq%z)y4|aStwsf}34^s4sDY6p3 z9QrAVlTZ#FSoS|+l9w?XK9tlz#EE~4=IVw zU#c7^Fa?hZX5n(j$Iw9v{&Z=X8ZIq4mhlnYD9t&#w}IEACBS22zN`Z1%9xMvsUpd_OD0vH-h7mx`hqqg+k5+f|r>SGD8xL>now~lCntdTc zW*kXk_%Cc};zbn2*6QDZrZx7GUc}8FoTywS-<_@$qCaLUqw!skk=>OusNEe+R?ek& z&jgx6vT`N9Tb5CO2n@XNe0K6k264L*K6gZL*sLjVlju|sTyAoCV_gor0BGHmB|UhW z?nzXSYimhPy?uxGQ ziEA>$kM~u6tfxz0Fn3oq*B2b>OI^{=u4$BM4YSCN;hp%UeF=XKMQjL9x!X^fYc@2~ z4ifSu{F4~=rax3i1gL|55kIzOZZXd_g7+4*G|2Py8pu~SoV%W7yz8FBLwN1zr%2i& zA>?O(&*8K9uaJHQh2P=LXK?D5;nDAF-_&;Oy`uSK_`^LIYD*x_eqIRppI`KJeJLQmLvYaZJ)ttL`gr2>MX*)N|&~W8=gL3LBsq zhBN2Rd~@gCbA9eP=iZY80l$}nW9aXR_`lp7H;e`Ia2U+XbHGe;*Eq_5!ew~v>bT&{ zI-0o)BvaMQQAZt@aZx8wHzhy?S_RZYT|m9m4OFE39xf<6!zwQg^G3~h5#hfR{=fXw z4*W`(8?-+~1RNB;O&!!p39X_o>ZU@7P|s1Lg=s#_51OsjU(jgUaqX|;-C<|{1&$j( zx6gPo`WKFC;)aL`9CVpn;o8d4<}m5uY0H&8J&q#=mU^i;B$%i{O>$;QzqWEmj2p+` zI=ZXmOTJ<>mRh+mf5hmq*}_;|$$Np7-}@da(piRz{Y1$oj)0%?~ z=6#M!3)lGZ3t^&-@K>h%M(>u$&(;I;U2TLtFv6`@KDUi`+vxVNw?vBK8l)(iHJevMiULUSOOO&@EG6pLloF17iIkirQX-pDDkG(;5-GVh zrSzJVxIvh+OApGK+PQ*-da`Mubh&|Tc>gVwZ)m6aD+v3n(f5Fr`bXHWS*ae=N}cCl z^ADD@Q%s4fmL`pxY~A{U$a%Q@pL2_i!n)-UkP2Ehw)#R zS|w+q8A)|ThcofplIo2`GtprwnUUCpD>i)Sy2KvxD!C!2lBzR(w^QXAUb~U>z6wNT z_*|xC@BZ}Nqd^&O9=)VG;xXj9kQ^z(^y!#B)tQi>zJ#1XOwWY9o_-zEKjAgI66#Xj zsgaRHJSkZX?|$|rELA_`UpWQ_9`X}C*n;k$?ER_1vI}+a*&^>MRZqCHt!zM?snqc9QfM2?O~Sc4ikqlrD9N}gvd#ni2lkrcZX%|xW1q!ZC(Gy_iAUJSZD593HWnF+rlI%gXl9I^gsQ;yl}4a!!wH^2~FA^ZD-&Ob;le`N3I@%!$p5 z#5i-Ld1}BaSL|6P50gFpFFoDR?m_Rbv73A-oHpnSjK5%r4q+o#*_?VmjR zPcbkh-|JUM{mi&VrlB|W`nf>rr~0ju#r&$rV&_S{tz_MXil$_%1R%5Yv``~dJ~M7% z>C!aJZ?U$##IpNcd?dP}iVv|CGvmTAiiNQr;oH-Iy#3sF%xA6;zworCvo&>E~ zW9GYgu~i|V*{d3v6GMx{ICCVVh^=}tZ{rsbZK3SUJPGLn^iTU|5;Uj%G~g7&iGoBNE*YKMAB50L{fR9sE|amqoU?4pF{%N zSL>ccf|{cxkcL52|o z+kylWgD$ZqWO3a+{uQ|Tfa_LW*<^e?3o&p^%0%M{wiO}UkYEN7gJdv{?MBi9B*;U2 zo3zJ{@uU`NV(cg~*xlsVM@aS{X+yFXNjnnM1-2ha5J?9TjMIWXb{P57NJ2j&&f0A1QkGFBUapvmm z#G~HF2cLMA%Rg2=la%(6sm7dVbn@dR(SQHc^eKh3JouqT=ERmoVw^eBGIh!-SFS|dpQ&)&XxW)c##R{L|3Ksdw6zXbv_k-pk8b8+^@qkRy4H7eoHl*p*EcWg?`3Gr_{o&axtQ$Jy3 z%mX$&*2A0sQ1w#){nVDyHOFMaTww3fG*oq6XQ`V~Qg=`;fJj%Zr0f$!Uj*{HR&Cd` zO8cr^vAz#r^bpD@R>$MSlZ(oaB1$x_?7e}Cb@Lrltp93m$x6BRA)Y!bhsTt>vBC3c z2G3t)@F3U!9W{9HHwbGmctS<&@&*r#{<<4H(K2`<7`(tMpS9_8?AS5gx}meKD-Ld2 zuUc_}UUm@iYjVa{(z9$D43`uE+Y8$**<_4Ss2)9S`5qA6fZ^itsXx{|3M%_g;?m*3 zJEmRFSJW;uWM4i@kMeJNyhU0Ce@F92A(^4p8q$;ea^k$+W3|imv|R{;6<$iSg%R`gPc~?{{V&Q B)R+JO literal 0 HcmV?d00001 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..2928b46 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,30 @@ +import os + +import pytest +from fastapi.testclient import TestClient + +from app.db import init_db, get_conn +from app.main import app + +TEST_DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://inventory:inventory@localhost:5432/inventory") + + +@pytest.fixture(autouse=True, scope="session") +def _init(): + # ensure schema exists + init_db() + + +@pytest.fixture(autouse=True) +def _clean_db(): + # truncate before each test for isolation + with get_conn() as conn: + with conn.cursor() as cur: + cur.execute("TRUNCATE TABLE servers RESTART IDENTITY;") + yield + + +@pytest.fixture() +def client(): + return TestClient(app) + diff --git a/tests/test_servers_api.py b/tests/test_servers_api.py new file mode 100644 index 0000000..4b56b5f --- /dev/null +++ b/tests/test_servers_api.py @@ -0,0 +1,67 @@ +def test_crud_happy_path(client): + # create + r = client.post( + "/servers", + json={"hostname": "srv-1", "ip_address": "10.0.0.1", "state": "active", "datacenter": "dc-ber"}, + ) + assert r.status_code == 201 + data = r.json() + assert data["id"] == 1 + assert data["hostname"] == "srv-1" + assert data["ip_address"] == "10.0.0.1" + assert data["state"] == "active" + + # list + r = client.get("/servers") + assert r.status_code == 200 + lst = r.json() + assert len(lst) == 1 + + # get one + r = client.get("/servers/1") + assert r.status_code == 200 + assert r.json()["hostname"] == "srv-1" + + # update + r = client.put("/servers/1", json={"state": "offline", "ip_address": "10.0.0.2"}) + assert r.status_code == 200 + assert r.json()["state"] == "offline" + assert r.json()["ip_address"] == "10.0.0.2" + + # delete + r = client.delete("/servers/1") + assert r.status_code == 204 + + # verify gone + r = client.get("/servers/1") + assert r.status_code == 404 + + +def test_hostname_unique(client): + r1 = client.post("/servers", json={"hostname": "dup", "ip_address": "10.0.0.1", "state": "active"}) + assert r1.status_code == 201 + + r2 = client.post("/servers", json={"hostname": "dup", "ip_address": "10.0.0.2", "state": "offline"}) + assert r2.status_code == 409 + assert "unique" in r2.json()["detail"] + + +def test_ip_validation(client): + r = client.post("/servers", json={"hostname": "badip", "ip_address": "not-an-ip", "state": "active"}) + assert r.status_code == 422 # pydantic validation + + +def test_state_validation(client): + r = client.post("/servers", json={"hostname": "badstate", "ip_address": "10.0.0.3", "state": "broken"}) + assert r.status_code == 422 # enum validation + + +def test_update_not_found(client): + r = client.put("/servers/999", json={"state": "offline"}) + assert r.status_code == 404 + + +def test_delete_not_found(client): + r = client.delete("/servers/999") + assert r.status_code == 404 +