diff --git a/README.md b/README.md index 42b1de3..f721e4d 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,14 @@ # Soul Diary +Self-hosted service. + ## ToDo -1. Implement cursor pagination on backends and server -2. Implement infinity scroll -3. Add filters: min timestamp, max timestamp, emotions -4. Implement S3 backend client -5. Implement FTP backend client +0. Fill README +1. Add filters: min timestamp, max timestamp, emotions +2. Implement S3 backend client +3. Implement FTP backend client +4. Add notifications ## User Flow diff --git a/soul_diary/backend/api/senses/handlers.py b/soul_diary/backend/api/senses/handlers.py index b9d33cc..a1f01f5 100644 --- a/soul_diary/backend/api/senses/handlers.py +++ b/soul_diary/backend/api/senses/handlers.py @@ -6,6 +6,7 @@ from soul_diary.backend.database.models import Sense, Session from .dependencies import is_auth, sense from .schemas import ( CreateSenseRequest, + Pagination, SenseListResponse, SenseResponse, UpdateSenseRequest, @@ -15,11 +16,27 @@ from .schemas import ( async def get_sense_list( database: DatabaseService = fastapi.Depends(database), user_session: Session = fastapi.Depends(is_auth), + pagination: Pagination = fastapi.Depends(Pagination), ) -> SenseListResponse: async with database.transaction() as session: - senses = await database.get_senses(session=session, user=user_session.user) + senses_count = await database.get_senses_count( + session=session, + user=user_session.user, + ) + senses_list, previous_cursor, next_cursor = await database.get_senses( + session=session, + user=user_session.user, + cursor=pagination.cursor, + limit=pagination.limit, + ) - return SenseListResponse(data=senses) + return SenseListResponse( + data=senses_list, + limit=pagination.limit, + total_items=senses_count, + previous=previous_cursor, + next=next_cursor, + ) async def create_sense( diff --git a/soul_diary/backend/api/senses/schemas.py b/soul_diary/backend/api/senses/schemas.py index 7d8e0f6..22ed73a 100644 --- a/soul_diary/backend/api/senses/schemas.py +++ b/soul_diary/backend/api/senses/schemas.py @@ -1,7 +1,20 @@ import uuid from datetime import datetime -from pydantic import BaseModel, ConfigDict +from pydantic import BaseModel, ConfigDict, NonNegativeInt + + +class Pagination(BaseModel): + cursor: str | None = None + limit: int = 10 + + +class PaginatedResponse(BaseModel): + data: list + limit: int + total_items: NonNegativeInt + previous: str | None = None + next: str | None = None class CreateSenseRequest(BaseModel): @@ -20,5 +33,5 @@ class SenseResponse(BaseModel): created_at: datetime -class SenseListResponse(BaseModel): +class SenseListResponse(PaginatedResponse): data: list[SenseResponse] diff --git a/soul_diary/backend/database/migrations/versions/bce1e66bb101_init.py b/soul_diary/backend/database/migrations/versions/bce1e66bb101_init.py index 3d9153b..9737d7f 100644 --- a/soul_diary/backend/database/migrations/versions/bce1e66bb101_init.py +++ b/soul_diary/backend/database/migrations/versions/bce1e66bb101_init.py @@ -12,7 +12,7 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision: str = 'bce1e66bb101' +revision: str = "bce1e66bb101" down_revision: Union[str, None] = None branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None @@ -20,47 +20,55 @@ depends_on: Union[str, Sequence[str], None] = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.create_table('users', - sa.Column('id', sa.Uuid(), nullable=False), - sa.Column('username', sa.String(length=64), nullable=False), - sa.Column('password', sa.String(length=72), nullable=False), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('username') + op.create_table( + "users", + sa.Column("id", sa.Uuid(), nullable=False), + sa.Column("username", sa.String(length=64), nullable=False), + sa.Column("password", sa.String(length=72), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("username"), ) - op.create_index('users__id_idx', 'users', ['id'], unique=False, postgresql_using='hash') - op.create_index('users__username_idx', 'users', ['username'], unique=False, postgresql_using='hash') - op.create_table('senses', - sa.Column('id', sa.Uuid(), nullable=False), - sa.Column('user_id', sa.Uuid(), nullable=False), - sa.Column('data', sa.String(), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_index("users__id_idx", "users", ["id"], unique=False, postgresql_using="hash") + op.create_index("users__username_idx", "users", ["username"], unique=False, + postgresql_using="hash") + op.create_table( + "senses", + sa.Column("id", sa.Uuid(), nullable=False), + sa.Column("user_id", sa.Uuid(), nullable=False), + sa.Column("data", sa.String(), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["user_id"], ["users.id"]), + sa.PrimaryKeyConstraint("id"), ) - op.create_index('senses__created_at_idx', 'senses', ['created_at'], unique=False, postgresql_using='btree') - op.create_index('senses__id_idx', 'senses', ['id'], unique=False, postgresql_using='hash') - op.create_index('senses__user_id_idx', 'senses', ['user_id'], unique=False, postgresql_using='btree') - op.create_table('sessions', - sa.Column('token', sa.String(), nullable=False), - sa.Column('user_id', sa.Uuid(), nullable=False), - sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), - sa.PrimaryKeyConstraint('token') + op.create_index("senses__created_at_idx", "senses", ["created_at"], unique=False, + postgresql_using="btree") + op.create_index("senses__id_idx", "senses", ["id"], unique=False, postgresql_using="hash") + op.create_index("senses__user_id_idx", "senses", ["user_id"], unique=False, + postgresql_using="btree") + op.create_table( + "sessions", + sa.Column("token", sa.String(), nullable=False), + sa.Column("user_id", sa.Uuid(), nullable=False), + sa.ForeignKeyConstraint(["user_id"], ["users.id"]), + sa.PrimaryKeyConstraint("token"), ) - op.create_index('sessions__token_idx', 'sessions', ['token'], unique=False, postgresql_using='hash') - op.create_index('sessions__user_id_idx', 'sessions', ['user_id'], unique=False, postgresql_using='btree') + op.create_index("sessions__token_idx", "sessions", ["token"], unique=False, + postgresql_using="hash") + op.create_index("sessions__user_id_idx", "sessions", ["user_id"], unique=False, + postgresql_using="btree") # ### end Alembic commands ### def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.drop_index('sessions__user_id_idx', table_name='sessions', postgresql_using='btree') - op.drop_index('sessions__token_idx', table_name='sessions', postgresql_using='hash') - op.drop_table('sessions') - op.drop_index('senses__user_id_idx', table_name='senses', postgresql_using='btree') - op.drop_index('senses__id_idx', table_name='senses', postgresql_using='hash') - op.drop_index('senses__created_at_idx', table_name='senses', postgresql_using='btree') - op.drop_table('senses') - op.drop_index('users__username_idx', table_name='users', postgresql_using='hash') - op.drop_index('users__id_idx', table_name='users', postgresql_using='hash') - op.drop_table('users') + op.drop_index("sessions__user_id_idx", table_name="sessions", postgresql_using="btree") + op.drop_index("sessions__token_idx", table_name="sessions", postgresql_using="hash") + op.drop_table("sessions") + op.drop_index("senses__user_id_idx", table_name="senses", postgresql_using="btree") + op.drop_index("senses__id_idx", table_name="senses", postgresql_using="hash") + op.drop_index("senses__created_at_idx", table_name="senses", postgresql_using="btree") + op.drop_table("senses") + op.drop_index("users__username_idx", table_name="users", postgresql_using="hash") + op.drop_index("users__id_idx", table_name="users", postgresql_using="hash") + op.drop_table("users") # ### end Alembic commands ### diff --git a/soul_diary/backend/database/migrations/versions/ed569caafd85_change_indexes.py b/soul_diary/backend/database/migrations/versions/ed569caafd85_change_indexes.py new file mode 100644 index 0000000..c3f21d5 --- /dev/null +++ b/soul_diary/backend/database/migrations/versions/ed569caafd85_change_indexes.py @@ -0,0 +1,45 @@ +"""change indexes + +Revision ID: ed569caafd85 +Revises: bce1e66bb101 +Create Date: 2023-12-18 15:31:56.733172 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "ed569caafd85" +down_revision: Union[str, None] = "bce1e66bb101" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index("senses__created_at_idx", table_name="senses") + op.drop_index("senses__user_id_idx", table_name="senses") + op.drop_index("sessions__user_id_idx", table_name="sessions") + op.create_index("senses__created_at__id_idx", "senses", ["created_at", "id"], unique=False, + postgresql_using="btree") + op.create_index("senses__user_id_idx", "senses", ["user_id"], unique=False, + postgresql_using="hash") + op.create_index("sessions__user_id_idx", "sessions", ["user_id"], unique=False, + postgresql_using="hash") + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index("senses__created_at__id_idx", table_name="senses", postgresql_using="btree") + op.drop_index("senses__user_id_idx", table_name="senses") + op.drop_index("sessions__user_id_idx", table_name="sessions") + op.create_index("senses__created_at_idx", "senses", ["created_at"], unique=False) + op.create_index("senses__user_id_idx", "senses", ["user_id"], unique=False, + postgresql_using="btree") + op.create_index("sessions__user_id_idx", "sessions", ["user_id"], unique=False, + postgresql_using="btree") + # ### end Alembic commands ### diff --git a/soul_diary/backend/database/models.py b/soul_diary/backend/database/models.py index b5c03d7..6d1f665 100644 --- a/soul_diary/backend/database/models.py +++ b/soul_diary/backend/database/models.py @@ -40,7 +40,7 @@ class Session(Base): __table_args__ = ( Index("sessions__token_idx", "token", postgresql_using="hash"), - Index("sessions__user_id_idx", "user_id", postgresql_using="btree"), + Index("sessions__user_id_idx", "user_id", postgresql_using="hash"), ) @@ -56,6 +56,6 @@ class Sense(Base): __table_args__ = ( Index("senses__id_idx", "id", postgresql_using="hash"), - Index("senses__user_id_idx", "user_id", postgresql_using="btree"), - Index("senses__created_at_idx", "created_at", postgresql_using="btree"), + Index("senses__user_id_idx", "user_id", postgresql_using="hash"), + Index("senses__created_at__id_idx", "created_at", "id", postgresql_using="btree"), ) diff --git a/soul_diary/backend/database/service.py b/soul_diary/backend/database/service.py index bb247b4..802c9ea 100644 --- a/soul_diary/backend/database/service.py +++ b/soul_diary/backend/database/service.py @@ -1,13 +1,17 @@ +import base64 import pathlib +import struct import uuid from contextlib import asynccontextmanager +from datetime import datetime from typing import Type import bcrypt from alembic import command as alembic_command from alembic.config import Config as AlembicConfig from facet import ServiceMixin -from sqlalchemy import select +from pydantic import BaseModel +from sqlalchemy import and_, func, or_, select from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine from sqlalchemy.orm import DeclarativeBase @@ -15,7 +19,14 @@ from .models import Sense, Session, User from .settings import DatabaseSettings +class CursorData(BaseModel): + created_at: datetime + sense_id: uuid.UUID + + class DatabaseService(ServiceMixin): + ENCODING = "utf-8" + def __init__(self, dsn: str): self._dsn = dsn self._engine = create_async_engine(self._dsn, pool_recycle=60) @@ -99,13 +110,84 @@ class DatabaseService(ServiceMixin): return user_session - async def get_senses(self, session: AsyncSession, user: User) -> list[Sense]: - query = select(Sense).where(Sense.user == user).order_by(Sense.created_at.desc()) + def cursor_encode(self, data: CursorData) -> str: + datetime_bytes = bytes(struct.pack("d", data.created_at.timestamp())) + sense_id_bytes = data.sense_id.bytes + cursor_bytes = datetime_bytes + sense_id_bytes + return base64.b64encode(cursor_bytes).decode(self.ENCODING) + def cursor_decode(self, cursor: str) -> CursorData: + cursor_bytes = base64.b64decode(cursor.encode(self.ENCODING)) + created_at = datetime.fromtimestamp(struct.unpack("d", cursor_bytes[:8])[0]) + sense_id = uuid.UUID(bytes=cursor_bytes[8:]) + return CursorData(created_at=created_at, sense_id=sense_id) + + def get_senses_filters(self, user: User) -> list: + filters = [Sense.user == user] + + return filters + + async def get_senses_count(self, session: AsyncSession, user: User) -> int: + filters = self.get_senses_filters(user=user) + query = select(func.count(Sense.id)).where(*filters) + + count = await session.scalar(query) + return count + + async def get_senses( + self, + session: AsyncSession, + user: User, + cursor: str | None = None, + limit: int = 10, + ) -> tuple[list[Sense], str | None, str | None]: + filters = self.get_senses_filters(user=user) + cursor_data = None if cursor is None else self.cursor_decode(cursor) + + current_filters = filters.copy() + previous_sense = None + if cursor_data is not None: + current_filters.append(or_( + Sense.created_at > cursor_data.created_at, + and_(Sense.created_at == cursor_data.created_at, Sense.id > cursor_data.sense_id) + )) + query = ( + select(Sense).where(*current_filters) + .order_by(Sense.created_at.asc()).offset(limit).limit(1) + ) + result = await session.execute(query) + previous_sense = result.scalars().first() + + current_filters = filters.copy() + if cursor_data is not None: + current_filters.append(or_( + Sense.created_at < cursor_data.created_at, + and_(Sense.created_at == cursor_data.created_at, Sense.id <= cursor_data.sense_id), + )) + query = ( + select(Sense).where(*current_filters) + .order_by(Sense.created_at.desc()).limit(limit + 1) + ) result = await session.execute(query) - senses = result.scalars().all() + senses = list(result.scalars().all()) - return list(senses) + previous_cursor = None + if previous_sense is not None: + previous_cursor_data = CursorData( + created_at=previous_sense.created_at, + sense_id=previous_sense.id, + ) + previous_cursor = self.cursor_encode(data=previous_cursor_data) + + next_cursor = None + if len(senses) == limit + 1: + next_cursor_data = CursorData( + created_at=senses[-1].created_at, + sense_id=senses[-1].id, + ) + next_cursor = self.cursor_encode(data=next_cursor_data) + + return senses[:-1], previous_cursor, next_cursor async def create_sense(self, session: AsyncSession, user: User, data: str) -> Sense: sense = Sense(user=user, data=data) diff --git a/soul_diary/ui/app/backend/base.py b/soul_diary/ui/app/backend/base.py index 9b89f7b..8c73d56 100644 --- a/soul_diary/ui/app/backend/base.py +++ b/soul_diary/ui/app/backend/base.py @@ -112,11 +112,17 @@ class BaseBackend: async def get_sense_list(self, cursor: str | None = None, limit: int = 10) -> SenseList: encrypted_sense_list = await self.fetch_sense_list(cursor=cursor, limit=limit) - senses = [ + data = [ self.convert_encrypted_sense_to_sense(encrypted_sense) - for encrypted_sense in encrypted_sense_list.senses + for encrypted_sense in encrypted_sense_list.data ] - return SenseList(senses=senses) + return SenseList( + data=data, + limit=encrypted_sense_list.limit, + total_items=encrypted_sense_list.total_items, + previous=encrypted_sense_list.previous, + next=encrypted_sense_list.next, + ) async def create_sense( self, diff --git a/soul_diary/ui/app/backend/local.py b/soul_diary/ui/app/backend/local.py index 58be7af..2a860d7 100644 --- a/soul_diary/ui/app/backend/local.py +++ b/soul_diary/ui/app/backend/local.py @@ -1,8 +1,12 @@ +import base64 import hashlib +import struct import uuid from datetime import datetime from typing import Any +from pydantic import BaseModel + from soul_diary.ui.app.models import BackendType from .base import BaseBackend from .exceptions import ( @@ -14,6 +18,11 @@ from .exceptions import ( from .models import EncryptedSense, EncryptedSenseList, Options +class CursorData(BaseModel): + created_at: datetime + sense_id: uuid.UUID + + class LocalBackend(BaseBackend): BACKEND = BackendType.LOCAL AUTH_BLOCK_TEMPLATE = "auth_block:{username}:{password}" @@ -60,6 +69,18 @@ class LocalBackend(BaseBackend): async def get_options(self) -> Options: return Options(registration_enabled=True) + def cursor_encode(self, data: CursorData) -> str: + datetime_bytes = bytes(struct.pack("d", data.created_at.timestamp())) + sense_id_bytes = data.sense_id.bytes + cursor_bytes = datetime_bytes + sense_id_bytes + return base64.b64encode(cursor_bytes).decode(self.ENCODING) + + def cursor_decode(self, cursor: str) -> CursorData: + cursor_bytes = base64.b64decode(cursor.encode(self.ENCODING)) + created_at = datetime.fromtimestamp(struct.unpack("d", cursor_bytes[:8])[0]) + sense_id = uuid.UUID(bytes=cursor_bytes[8:]) + return CursorData(created_at=created_at, sense_id=sense_id) + async def fetch_sense_list( self, cursor: str | None = None, @@ -70,8 +91,47 @@ class LocalBackend(BaseBackend): sense_list_key = self.SENSE_LIST_KEY_TEMPLATE.format(username=self._username) sense_list = await self._local_storage.raw_read(sense_list_key) or [] - senses = [EncryptedSense.model_validate(sense) for sense in sense_list] - return EncryptedSenseList(senses=senses) + total_items = len(sense_list) + + index = 0 + if cursor is not None: + cursor_data = self.cursor_decode(cursor) + cursor_sense = EncryptedSense.model_validate(sense_list[0]) + while ( + index < total_items and + (cursor_data.created_at < cursor_sense.created_at or + cursor_data.created_at == cursor_sense.created_at and + cursor_data.sense_id < cursor_sense.id) + ): + index += 1 + cursor_sense = EncryptedSense.model_validate(sense_list[index]) + + previous_cursor = None + if index - limit >= 0: + previous_pivot = EncryptedSense.model_validate(sense_list[index - limit]) + previous_cursor_data = CursorData( + created_at=previous_pivot.created_at, + sense_id=previous_pivot.id, + ) + previous_cursor = self.cursor_encode(data=previous_cursor_data) + next_cursor = None + if index + limit < len(sense_list): + next_pivot = EncryptedSense.model_validate(sense_list[index + limit]) + next_cursor_data = CursorData( + created_at=next_pivot.created_at, + sense_id=next_pivot.id, + ) + next_cursor = self.cursor_encode(data=next_cursor_data) + + sense_list = sense_list[index:index + limit] + data = [EncryptedSense.model_validate(sense) for sense in sense_list] + return EncryptedSenseList( + data=data, + limit=limit, + total_items=total_items, + previous=previous_cursor, + next=next_cursor, + ) async def fetch_sense(self, sense_id: uuid.UUID) -> EncryptedSense: sense_list = await self.fetch_sense_list() @@ -84,19 +144,28 @@ class LocalBackend(BaseBackend): async def pull_sense_data(self, data: str, sense_id: uuid.UUID | None = None) -> EncryptedSense: sense_list_key = self.SENSE_LIST_KEY_TEMPLATE.format(username=self._username) - sense_list = await self.fetch_sense_list() + sense_list = await self._local_storage.raw_read(sense_list_key) if sense_id is None: - sense_ids = {sense.id for sense in sense_list.senses} + sense_ids = {uuid.UUID(sense["id"]) for sense in sense_list} sense_id = uuid.uuid4() while sense_id in sense_ids: sense_id = uuid.uuid4() sense = EncryptedSense( id=sense_id, data=data, - created_at=datetime.now().astimezone(), + created_at=datetime.utcnow(), ) - sense_list.senses.insert(0, sense) + index = 0 + cursor_sense = EncryptedSense.model_validate(sense_list[index]) + while ( + index < len(sense_list) and + (sense.created_at < cursor_sense.created_at or + sense.created_at == cursor_sense.created_at and + sense.id < cursor_sense.id) + ): + index += 1 + sense_list.insert(index, sense.model_dump(mode="json")) else: for index, sense in enumerate(sense_list): if sense.id == sense_id: @@ -104,14 +173,11 @@ class LocalBackend(BaseBackend): else: raise SenseNotFoundException() - sense = sense_list.senses[index] + sense = sense_list[index] sense.data = data - sense_list.senses[index] = sense - - await self._local_storage.raw_write( - sense_list_key, - [sense.model_dump(mode="json") for sense in sense_list.senses], - ) + sense_list[index] = sense.model_dump(mode="json") + + await self._local_storage.raw_write(sense_list_key, sense_list) return sense diff --git a/soul_diary/ui/app/backend/models.py b/soul_diary/ui/app/backend/models.py index efe6e41..426a6f1 100644 --- a/soul_diary/ui/app/backend/models.py +++ b/soul_diary/ui/app/backend/models.py @@ -1,23 +1,31 @@ import uuid from datetime import datetime -from pydantic import BaseModel +from pydantic import BaseModel, NonNegativeInt from soul_diary.ui.app.models import Sense +class Paginated(BaseModel): + data: list + limit: int + total_items: NonNegativeInt + previous: str | None = None + next: str | None = None + + class EncryptedSense(BaseModel): id: uuid.UUID data: str created_at: datetime -class EncryptedSenseList(BaseModel): - senses: list[EncryptedSense] +class EncryptedSenseList(Paginated): + data: list[EncryptedSense] -class SenseList(BaseModel): - senses: list[Sense] +class SenseList(Paginated): + data: list[Sense] class Options(BaseModel): diff --git a/soul_diary/ui/app/backend/soul.py b/soul_diary/ui/app/backend/soul.py index d599dde..a3ed943 100644 --- a/soul_diary/ui/app/backend/soul.py +++ b/soul_diary/ui/app/backend/soul.py @@ -127,9 +127,15 @@ class SoulBackend(BaseBackend): params = {key: value for key, value in params.items() if value is not None} response = await self.request(method="GET", path=path, params=params) - senses = [EncryptedSense.model_validate(sense) for sense in response["data"]] + data = [EncryptedSense.model_validate(sense) for sense in response["data"]] - return EncryptedSenseList(senses=senses) + return EncryptedSenseList( + data=data, + limit=response["limit"], + total_items=response["total_items"], + previous=response["previous"], + next=response["next"], + ) async def fetch_sense(self, sense_id: uuid.UUID) -> EncryptedSense: path = f"/senses/{sense_id}" diff --git a/soul_diary/ui/app/middleware.py b/soul_diary/ui/app/middleware.py index 8cad5e3..8bcee65 100644 --- a/soul_diary/ui/app/middleware.py +++ b/soul_diary/ui/app/middleware.py @@ -9,6 +9,7 @@ from soul_diary.ui.app.routes import AUTH, SENSE_LIST async def middleware(page: flet.Page, params: Params, basket: Basket): local_storage = LocalStorage(client_storage=page.client_storage) auth_data = await local_storage.get_auth_data() + # await local_storage._client_storage.clear_async() if auth_data is None: await page.go_async(AUTH) return diff --git a/soul_diary/ui/app/models.py b/soul_diary/ui/app/models.py index 615d026..842070d 100644 --- a/soul_diary/ui/app/models.py +++ b/soul_diary/ui/app/models.py @@ -1,17 +1,28 @@ import enum import uuid -from datetime import datetime +from datetime import datetime, timezone -from pydantic import BaseModel, constr +from pydantic import BaseModel, constr, field_validator class Emotion(str, enum.Enum): - JOY = "радость" - FORCE = "сила" - CALMNESS = "спокойствие" SADNESS = "грусть" - MADNESS = "бешенство" + JOY = "радость" + CALMNESS = "спокойствие" + IRRITATION = "раздражение" + ANGER = "злость" FEAR = "страх" + SHAME = "стыд" + GUILD = "вина" + RESENTMENT = "обида" + BOREDOM = "скука" + ANXIETY = "тревога" + COURAGE = "смелость" + PRIDE = "гордость" + ENERGY = "энергичность" + THANKFULNESS = "благодарность" + PLEASURE = "удовольствие" + DELIGHT = "восхищение" class BackendType(str, enum.Enum): @@ -26,3 +37,10 @@ class Sense(BaseModel): body: constr(min_length=1, strip_whitespace=True) desires: constr(min_length=1, strip_whitespace=True) created_at: datetime + + @field_validator("created_at") + @classmethod + def created_at_validator(cls, created_at: datetime) -> datetime: + created_at = created_at.replace(tzinfo=timezone.utc) + local_timezone = datetime.now().astimezone().tzinfo + return created_at.astimezone(local_timezone) diff --git a/soul_diary/ui/app/pages/sense_list.py b/soul_diary/ui/app/pages/sense_list.py index dcd47cf..1a1dad6 100644 --- a/soul_diary/ui/app/pages/sense_list.py +++ b/soul_diary/ui/app/pages/sense_list.py @@ -1,4 +1,7 @@ +import asyncio import uuid +from contextlib import asynccontextmanager +from datetime import datetime, timezone from functools import partial import flet @@ -15,6 +18,8 @@ class SenseListPage(BasePage): def __init__(self, view: flet.View, local_storage: LocalStorage, extend: bool = False): self.local_storage = local_storage self.senses = [] + self.next_cursor = None + self.lock = asyncio.Lock() self.senses_cards: flet.Column self.extend = extend @@ -23,6 +28,7 @@ class SenseListPage(BasePage): def build(self) -> flet.Container: self.view.vertical_alignment = flet.MainAxisAlignment.START self.view.scroll = flet.ScrollMode.ALWAYS + self.view.on_scroll = self.callback_scroll view_switch = flet.Switch( label="Расширенный вид", @@ -67,7 +73,8 @@ class SenseListPage(BasePage): async def did_mount_async(self): backend_client = await get_backend_client(self.local_storage) sense_list = await backend_client.get_sense_list() - self.senses = sense_list.senses + self.senses = sense_list.data + self.next_cursor = sense_list.next await self.render_cards() async def render_cards(self): @@ -166,6 +173,21 @@ class SenseListPage(BasePage): return gesture_detector + @asynccontextmanager + async def in_progress(self): + progress_ring = flet.Container( + content=flet.ProgressRing(), + alignment=flet.alignment.center, + height=150, + ) + self.senses_cards.controls.append(progress_ring) + await self.update_async() + + yield + + self.senses_cards.controls.pop() + await self.update_async() + @callback_error_handle async def callback_switch_view(self, event: flet.ControlEvent): self.extend = event.control.value @@ -187,3 +209,20 @@ class SenseListPage(BasePage): await backend_client.logout() await self.local_storage.clear_shared_data() await event.page.go_async(AUTH) + + @callback_error_handle + async def callback_scroll(self, event: flet.OnScrollEvent): + if ( + event.pixels < event.max_scroll_extent - 100 or + self.next_cursor is None or + self.lock.locked() + ): + return + + async with self.lock: + backend_client = await get_backend_client(local_storage=self.local_storage) + async with self.in_progress(): + sense_list = await backend_client.get_sense_list(cursor=self.next_cursor) + self.senses.extend(sense_list.data) + self.next_cursor = sense_list.next + await self.render_cards() diff --git a/soul_diary/ui/web/service.py b/soul_diary/ui/web/service.py index 33f2af0..328e6ed 100644 --- a/soul_diary/ui/web/service.py +++ b/soul_diary/ui/web/service.py @@ -1,5 +1,6 @@ from typing import Any +import flet import flet_fastapi import uvicorn from facet import ServiceMixin @@ -24,10 +25,13 @@ class WebService(ServiceMixin): return self._port async def start(self): - app = flet_fastapi.app(SoulDiaryApp( - backend=BackendType.SOUL, - backend_data=self._backend_data, - ).run) + app = flet_fastapi.app( + SoulDiaryApp( + backend=BackendType.SOUL, + backend_data=self._backend_data, + ).run, + web_renderer=flet.WebRenderer.HTML, + ) config = uvicorn.Config(app=app, host="0.0.0.0", port=self._port) server = UvicornServer(config)