new file: Dockerfile
new file: Makefile new file: database-migration/Dockerfile new file: database-migration/data.db new file: database-migration/db_migration.py new file: database-migration/migration.py new file: database-migration/requirements.txt new file: docker-compose.migration.yml new file: docker-compose.yml new file: main.py new file: requirements.txt
This commit is contained in:
@@ -0,0 +1,7 @@
|
||||
FROM python:3.13-slim
|
||||
WORKDIR /app
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
COPY . .
|
||||
EXPOSE 1234
|
||||
CMD ["python3", "main.py"]
|
||||
@@ -0,0 +1,25 @@
|
||||
build:
|
||||
podman-compose build
|
||||
|
||||
reload:
|
||||
podman rm authentication_app_1
|
||||
podman rm authentication_db_1
|
||||
podman rm authentication_migration_1
|
||||
|
||||
up:
|
||||
podman-compose up --build
|
||||
|
||||
down:
|
||||
podman-compose down
|
||||
|
||||
restart:
|
||||
make down up
|
||||
|
||||
migrate:
|
||||
cp ../../../server/data/users/auth_data/basis_data/data.db database-migration/data.db
|
||||
# make reload
|
||||
# podman-compose -f docker-compose.yml -f docker-compose.migration.yml up --build
|
||||
podman-compose -f docker-compose.migration.yml up --build
|
||||
|
||||
migrate-test:
|
||||
podman-compose -f docker-compose.migration.yml up --build
|
||||
@@ -0,0 +1,6 @@
|
||||
FROM python:3.13-slim
|
||||
WORKDIR /app/
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
COPY . .
|
||||
CMD ["python3", "db_migration_test2.py"]
|
||||
Binary file not shown.
@@ -0,0 +1,38 @@
|
||||
print('RUNNING!')
|
||||
|
||||
import sqlite3
|
||||
import os
|
||||
import psycopg
|
||||
|
||||
print(os.getcwd())
|
||||
print(os.listdir())
|
||||
|
||||
with open('data.db', 'rb') as f:
|
||||
data = f.read()
|
||||
print(f'[DEBUG] Begin SQLite database')
|
||||
print(data)
|
||||
print(f'[DEBUG] End SQLite database')
|
||||
|
||||
|
||||
conn = psycopg.connect(
|
||||
host = 'db',
|
||||
port = 5432,
|
||||
dbname = os.getenv('POSTGRES_DB'),
|
||||
user = os.getenv('POSTGRES_USER'),
|
||||
password = os.getenv('POSTGRES_PASSWORD')
|
||||
)
|
||||
|
||||
cursor = conn.cursor()
|
||||
|
||||
with sqlite3.connect('data.db') as sqlite3_conn:
|
||||
sqlite_cursor = sqlite3_conn.cursor()
|
||||
sqlite_cursor.execute('SELECT COUNT(*) FROM data')
|
||||
number_of_users = sqlite_cursor.fetchone()[0]
|
||||
for user in range(1, number_of_users + 1):
|
||||
print(f'Migrating user {user} ...')
|
||||
sqlite_cursor.execute('SELECT (id, email, remail, phone, password) FROM data WHERE id = ?', (user,))
|
||||
cursor.execute('INSERT INTO users (id, email, remail, phone, password) VALUES (%s, %s, %s, %s, %s)', tuple(sqlite_cursor.fetchone()))
|
||||
cursor.execute('SELECT setval(pg_get_serial_sequence(\'users\', \'id\'), (SELECT MAX(id) FROM users), true)')
|
||||
sqlite_cursor.close()
|
||||
|
||||
print('[INFO] Migration completed')
|
||||
@@ -0,0 +1 @@
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
psycopg
|
||||
@@ -0,0 +1,21 @@
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
db:
|
||||
image: postgres:15
|
||||
environment:
|
||||
- DB_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB}
|
||||
env_file:
|
||||
- .env
|
||||
volumes:
|
||||
- /srv/data/db/services/authentication:/var/lib/postgresql/data
|
||||
migration:
|
||||
build: database-migration
|
||||
image: python:3.11-slim
|
||||
environment:
|
||||
- DB_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB}
|
||||
env_file:
|
||||
- .env
|
||||
depends_on:
|
||||
- db
|
||||
command: python3 db_migration_test1.py
|
||||
@@ -0,0 +1,27 @@
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
app:
|
||||
build: .
|
||||
image: python:3.11-slim
|
||||
working_dir: /app
|
||||
volumes:
|
||||
- .:/app
|
||||
command: python3 main.py
|
||||
ports:
|
||||
- "1234:1234"
|
||||
depends_on:
|
||||
- db
|
||||
environment:
|
||||
- DB_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB}
|
||||
env_file:
|
||||
- .env
|
||||
|
||||
db:
|
||||
image: postgres:15
|
||||
environment:
|
||||
- DB_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB}
|
||||
env_file:
|
||||
- .env
|
||||
volumes:
|
||||
- /srv/data/db/services/authentication:/var/lib/postgresql/data
|
||||
@@ -0,0 +1,85 @@
|
||||
from fastapi import FastAPI, Depends
|
||||
from psycopg_pool import AsyncConnectionPool
|
||||
import uvicorn
|
||||
import os
|
||||
from pydantic import BaseModel
|
||||
|
||||
class Item(BaseModel):
|
||||
name: str
|
||||
description: str
|
||||
reporter: str
|
||||
priority: int
|
||||
is_stupid: bool
|
||||
|
||||
app = FastAPI()
|
||||
pool: AsyncConnectionPool
|
||||
|
||||
|
||||
@app.on_event('startup')
|
||||
async def on_startup():
|
||||
global pool
|
||||
pool = AsyncConnectionPool(
|
||||
os.getenv('DB_URL'),
|
||||
min_size=5,
|
||||
max_size=40
|
||||
)
|
||||
await pool.open()
|
||||
print('[INFO] Started connection pool')
|
||||
|
||||
async with pool.connection() as conn:
|
||||
async with conn.cursor() as cursor:
|
||||
await cursor.execute('CREATE TABLE IF NOT EXISTS users (id SERIAL PRIMARY KEY, email VARCHAR(64), remail VARCHAR(64), phone VARCHAR(16), password VARCHAR(128))')
|
||||
print('[INFO] Database initialized')
|
||||
await conn.commit()
|
||||
|
||||
@app.on_event('shutdown')
|
||||
async def on_shutdown():
|
||||
await pool.close()
|
||||
print('[INFO] Closed connection pool')
|
||||
|
||||
async def get_conn():
|
||||
async with pool.connection() as conn:
|
||||
yield conn
|
||||
|
||||
@app.get('/')
|
||||
def read_root():
|
||||
return {
|
||||
'message': 'microservice is running'
|
||||
}
|
||||
|
||||
@app.post('/item')
|
||||
async def create_item(item: Item, conn = Depends(get_conn)):
|
||||
async with conn.cursor() as cursor:
|
||||
await cursor.execute('INSERT INTO items (name, description, reporter, priority, is_stupid) VALUES (%s, %s, %s, %s, %s) RETURNING *', (item.name, item.description, item.reporter, item.priority, item.is_stupid))
|
||||
i = await cursor.fetchone()
|
||||
return {'id': i[0], 'description' : i[1], 'reporter': i[2], 'priority': i[3], 'is_stupid': i[4]}
|
||||
|
||||
@app.get('/item/{item_id}')
|
||||
async def read_item(item_id, conn = Depends(get_conn)):
|
||||
async with conn.cursor() as cursor:
|
||||
await cursor.execute('SELECT * FROM items WHERE id = %s', (item_id,))
|
||||
i = await cursor.fetchone()
|
||||
return {'id': i[0], 'name' : i[1], 'description': i[2], 'reporter': i[3], 'priority': i[4], 'is_stupid': i[5]}
|
||||
|
||||
@app.get('/items')
|
||||
async def read_item(conn = Depends(get_conn)):
|
||||
async with conn.cursor() as cursor:
|
||||
await cursor.execute('SELECT * FROM items')
|
||||
items = await cursor.fetchall()
|
||||
return [{'id': i[0], 'description' : i[1], 'reporter': i[2], 'priority': i[3], 'is_stupid': i[4]} for i in items]
|
||||
|
||||
@app.get('/datadir')
|
||||
async def get_datadir(conn = Depends(get_conn)):
|
||||
async with conn.cursor() as cursor:
|
||||
await cursor.execute('SHOW data_directory')
|
||||
return {'res': await cursor.fetchall()}
|
||||
|
||||
if __name__ == '__main__':
|
||||
uvicorn.run(
|
||||
'main:app',
|
||||
host = '0.0.0.0',
|
||||
port = 1234,
|
||||
reload = True,
|
||||
reload_dirs = ['/app'],
|
||||
server_header = False
|
||||
)
|
||||
@@ -0,0 +1,5 @@
|
||||
fastapi
|
||||
psycopg_pool
|
||||
uvicorn
|
||||
psycopg[binary]
|
||||
pydantic
|
||||
Reference in New Issue
Block a user