Merge pull request #9 from Xevion/0.2.2

### Added

- Added the `orjson` serializer for faster JSON serialization
  - Used in `structlog`'s `JSONRenderer` for production logging
  - Used in `fastapi`'s `Response` for faster response serialization
- Improved documentation in multiple files
  - `__main__.py`
  - `logging.py`
  - `models.py`
  - `utilities.py`
  - `migrate.py`
  - `responses.py`
- A `get_db` utility function to retrieve a reference to the database (with type hinting)
- Minor `DATABASE_URL` check in `models.py` to prevent cryptic connection issues

## Changed

- Migration script now uses `structlog` instead of `print`
  - Migration script output is tuned to structlog as well.
- Migration names must be at least 9 characters long
- Unspecified IPv6 addresses are returned without hiding in `utilities.hide_ip`
- Applied `get_db` utility function in all applicable areas.

### Fixed

- Raised level for `apscheduler.scheduler` logger to `WARNING` to prevent excessive logging
- IPv4 interface bind in production, preventing Railway's Private Networking from functioning
- Reloader mode enabled in production
This commit is contained in:
2024-11-01 19:20:23 -05:00
committed by GitHub
11 changed files with 288 additions and 96 deletions

View File

@@ -8,6 +8,7 @@
"linkpulse",
"migratehistory",
"Nixpacks",
"ORJSON",
"pytz",
"starlette",
"structlog",

View File

@@ -5,6 +5,37 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.2.2] - 2024-11-01
### Added
- Added the `orjson` serializer for faster JSON serialization
- Used in `structlog`'s `JSONRenderer` for production logging
- Used in `fastapi`'s `Response` for faster response serialization
- Improved documentation in multiple files
- `__main__.py`
- `logging.py`
- `models.py`
- `utilities.py`
- `migrate.py`
- `responses.py`
- A `get_db` utility function to retrieve a reference to the database (with type hinting)
- Minor `DATABASE_URL` check in `models.py` to prevent cryptic connection issues
## Changed
- Migration script now uses `structlog` instead of `print`
- Migration script output is tuned to structlog as well.
- Migration names must be at least 9 characters long
- Unspecified IPv6 addresses are returned without hiding in `utilities.hide_ip`
- Applied `get_db` utility function in all applicable areas.
### Fixed
- Raised level for `apscheduler.scheduler` logger to `WARNING` to prevent excessive logging
- IPv4 interface bind in production, preventing Railway's Private Networking from functioning
- Reloader mode enabled in production
## [0.2.1] - 2024-11-01
### Changed
@@ -20,7 +51,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
- This `CHANGELOG.md` file.
- This `CHANGELOG.md` file
- Structured logging with `structlog`
- Readable `ConsoleRenderer` for local development
- `JSONRenderer` for production logging

View File

@@ -1,3 +1,19 @@
"""
This module serves as the entry point for the LinkPulse application. It provides
command-line interface (CLI) commands to serve the application, run migrations,
or start a REPL (Read-Eval-Print Loop) session.
Commands:
- serve: Starts the application server using Uvicorn.
- migrate: Runs database migrations.
- repl: Starts an interactive Python shell with pre-imported objects and models.
"""
from linkpulse.logging import setup_logging
# We want to setup logging as early as possible.
setup_logging()
import os
import sys
import structlog
@@ -7,17 +23,22 @@ logger = structlog.get_logger()
def main(*args):
"""
Primary entrypoint for the LinkPulse application
- Don't import any modules globally unless you're certain it's necessary. Imports should be tightly controlled.
"""
if args[0] == "serve":
from linkpulse.logging import setup_logging
from linkpulse.utilities import is_development
from uvicorn import run
setup_logging()
logger.debug("Invoking uvicorn.run")
run(
"linkpulse.app:app",
reload=True,
host="0.0.0.0",
reload=is_development,
# Both options are special IP addresses that allow the server to listen on all network interfaces. One is for IPv4, the other for IPv6.
# Railway's private networking requires IPv6, so we must use that in production.
host="0.0.0.0" if is_development else "::",
port=int(os.getenv("PORT", "8000")),
log_config={
"version": 1,
@@ -32,28 +53,38 @@ def main(*args):
elif args[0] == "migrate":
from linkpulse.migrate import main
main(*args[1:])
main(*args)
elif args[0] == "repl":
import linkpulse
# import most useful objects, models, and functions
lp = linkpulse # alias
from linkpulse.app import app, db
from linkpulse.utilities import get_db
from linkpulse.app import app
from linkpulse.models import BaseModel, IPAddress
db = get_db()
# start REPL
from bpython import embed # type: ignore
embed(locals())
else:
print("Invalid command: {}".format(args[0]))
raise ValueError("Unexpected command: {}".format(" ".join(args)))
if __name__ == "__main__":
if len(sys.argv) == 1:
logger.debug("Entrypoint", argv=sys.argv)
args = sys.argv[1:]
if len(args) == 0:
logger.debug("No arguments provided, defaulting to 'serve'")
main("serve")
else:
# Check that args after aren't all whitespace
remaining_args = " ".join(sys.argv[1:]).strip()
if len(remaining_args) > 0:
main(*sys.argv[1:])
normalized_args = " ".join(args).strip()
if len(normalized_args) == 0:
logger.warning("Whitespace arguments provided, defaulting to 'serve'")
logger.debug("Invoking main with arguments", args=args)
main(*args)

View File

@@ -13,24 +13,25 @@ from apscheduler.triggers.interval import IntervalTrigger # type: ignore
from asgi_correlation_id import CorrelationIdMiddleware
from dotenv import load_dotenv
from fastapi import FastAPI, Request, Response, status
from fastapi.responses import ORJSONResponse
from fastapi_cache import FastAPICache
from fastapi_cache.backends.inmemory import InMemoryBackend
from fastapi_cache.decorator import cache
from linkpulse.logging import setup_logging
from linkpulse.middleware import LoggingMiddleware
from linkpulse.utilities import get_ip, hide_ip, is_development
from peewee import PostgresqlDatabase
from linkpulse.utilities import get_db, get_ip, hide_ip, is_development
from psycopg2.extras import execute_values
load_dotenv(dotenv_path=".env")
from linkpulse import models, responses # type: ignore
db: PostgresqlDatabase = models.BaseModel._meta.database # type: ignore
db = get_db()
def flush_ips():
if len(app.state.buffered_updates) == 0:
logger.debug("No IPs to flush to Database")
return
try:
@@ -55,7 +56,7 @@ def flush_ips():
logger.error("Failed to flush IPs to Database", error=e)
i = len(app.state.buffered_updates)
logger.debug("Flushed IPs to Database", count=i)
logger.debug("IPs written to database", count=i)
# Finish up
app.state.buffered_updates.clear()
@@ -110,10 +111,10 @@ async def lifespan(_: FastAPI) -> AsyncIterator[None]:
class IPCounter:
# Note: This is not the true 'seen' count, but the count of how many times the IP has been seen since the last flush.
count: int = 0
last_seen: datetime = field(default_factory=datetime.utcnow)
last_seen: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
app = FastAPI(lifespan=lifespan)
app = FastAPI(lifespan=lifespan, default_response_class=ORJSONResponse)
setup_logging()
@@ -161,7 +162,7 @@ async def get_ips(request: Request, response: Response):
"""
Returns a list of partially redacted IP addresses, as well as submitting the user's IP address to the database (buffered).
"""
now = datetime.utcnow()
now = datetime.now(timezone.utc)
# Get the user's IP address
user_ip = get_ip(request)

View File

@@ -1,13 +1,20 @@
import logging
import os
import sys
from typing import List, Optional
from typing import Any, List, Optional
import structlog
from structlog.types import EventDict, Processor
def rename_event_key(_, __, event_dict: EventDict) -> EventDict:
def decode_bytes(_: Any, __: Any, bs: bytes) -> str:
"""
orjson returns bytes; we need strings
"""
return bs.decode()
def rename_event_key(_: Any, __: Any, event_dict: EventDict) -> EventDict:
"""
Renames the `event` key to `msg`, as Railway expects it in that form.
"""
@@ -15,7 +22,7 @@ def rename_event_key(_, __, event_dict: EventDict) -> EventDict:
return event_dict
def drop_color_message_key(_, __, event_dict: EventDict) -> EventDict:
def drop_color_message_key(_: Any, __: Any, event_dict: EventDict) -> EventDict:
"""
Uvicorn logs the message a second time in the extra `color_message`, but we don't
need it. This processor drops the key from the event dict if it exists.
@@ -27,10 +34,14 @@ def drop_color_message_key(_, __, event_dict: EventDict) -> EventDict:
def setup_logging(
json_logs: Optional[bool] = None, log_level: Optional[str] = None
) -> None:
# Pull from environment variables, apply defaults if not set
json_logs = json_logs or os.getenv("LOG_JSON_FORMAT", "true").lower() == "true"
log_level = log_level or os.getenv("LOG_LEVEL", "INFO")
def flatten(n):
"""
Flattens a nested list into a single list of elements.
"""
match n:
case []:
return []
@@ -39,6 +50,7 @@ def setup_logging(
case [hd, *tl]:
return [hd, *flatten(tl)]
# Shared structlog processors, both for the root logger and foreign loggers
shared_processors: List[Processor] = flatten(
[
structlog.contextvars.merge_contextvars,
@@ -49,6 +61,7 @@ def setup_logging(
drop_color_message_key,
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
# Processors only used for the JSON renderer
(
[
rename_event_key,
@@ -61,6 +74,7 @@ def setup_logging(
]
)
# Main structlog configuration
structlog.configure(
processors=[
*shared_processors,
@@ -73,7 +87,9 @@ def setup_logging(
log_renderer: structlog.types.Processor
if json_logs:
log_renderer = structlog.processors.JSONRenderer()
import orjson
log_renderer = structlog.processors.JSONRenderer(serializer=orjson.dumps)
else:
log_renderer = structlog.dev.ConsoleRenderer()
@@ -85,6 +101,8 @@ def setup_logging(
# Remove _record & _from_structlog.
structlog.stdlib.ProcessorFormatter.remove_processors_meta,
log_renderer,
# required with orjson
*([decode_bytes] if json_logs else []), # type: ignore
],
)
@@ -101,6 +119,7 @@ def setup_logging(
clear: Optional[bool] = None,
propagate: Optional[bool] = None,
) -> None:
"""Helper function to configure a logger with the given parameters."""
logger = logging.getLogger(name)
if level is not None:
@@ -118,7 +137,10 @@ def setup_logging(
configure_logger("uvicorn", clear=True, propagate=True)
configure_logger("uvicorn.error", clear=True, propagate=True)
# Disable the apscheduler loggers, as they are too verbose
# TODO: This should be configurable easily from a TOML or YAML file
configure_logger("apscheduler.executors.default", level="WARNING")
configure_logger("apscheduler.scheduler", level="WARNING")
# Since we re-create the access logs ourselves, to add all information
# in the structured log (see the `logging_middleware` in main.py), we clear

View File

@@ -4,19 +4,28 @@ import sys
from typing import List, Optional, Tuple
import questionary
import structlog
from dotenv import load_dotenv
from peewee import PostgresqlDatabase
from peewee_migrate import Router, router
logger = structlog.get_logger()
load_dotenv(dotenv_path=".env")
class ExtendedRouter(Router):
"""
The original Router class from peewee_migrate didn't have all the functions I needed, so several functions are added here
Added
- show: Show the suggested migration that will be created, without actually creating it
- all_migrations: Get all migrations that have been applied
"""
def show(self, module: str) -> Optional[Tuple[str, str]]:
"""
Show the suggested migration that will be created, without actually creating it.
Show the suggested migration that will be created, without actually creating it
:param module: The module to scan & diff against.
:param module: The module to scan & diff against
"""
migrate = rollback = ""
@@ -55,7 +64,7 @@ class ExtendedRouter(Router):
def all_migrations(self) -> List[str]:
"""
Get all migrations that have been applied.
Get all migrations that have been applied
"""
return [mm.name for mm in self.model.select().order_by(self.model.id)]
@@ -65,38 +74,40 @@ def main(*args: str) -> None:
Main function for running migrations.
Args are fed directly from sys.argv.
"""
from linkpulse.utilities import get_db
from linkpulse import models
db: PostgresqlDatabase = models.BaseModel._meta.database
db = get_db()
router = ExtendedRouter(
database=db,
migrate_dir="linkpulse/migrations",
ignore=[models.BaseModel._meta.table_name],
)
auto = "linkpulse.models"
target_models = "linkpulse.models" # The module to scan for models & changes
current = router.all_migrations()
if len(current) == 0:
diff = router.diff
if len(diff) == 0:
print(
logger.info(
"No migrations found, no pending migrations to apply. Creating initial migration."
)
migration = router.create("initial", auto=auto)
migration = router.create("initial", auto=target_models)
if not migration:
print("No changes detected. Something went wrong.")
logger.error("No changes detected. Something went wrong.")
else:
print(f"Migration created: {migration}")
logger.info(f"Migration created: {migration}")
router.run(migration)
diff = router.diff
if len(diff) > 0:
print(
logger.info(
"Note: Selecting a migration will apply all migrations up to and including the selected migration."
)
print(
logger.info(
"e.g. Applying 004 while only 001 is applied would apply 002, 003, and 004."
)
@@ -104,95 +115,76 @@ def main(*args: str) -> None:
"Select highest migration to apply:", choices=diff
).ask()
if choice is None:
print(
logger.warning(
"For safety reasons, you won't be able to create migrations without applying the pending ones."
)
if len(current) == 0:
print(
logger.warning(
"Warn: No migrations have been applied globally, which is dangerous. Something may be wrong."
)
return
result = router.run(choice)
print(f"Done. Applied migrations: {result}")
print("Warning: You should commit and push any new migrations immediately!")
logger.info(f"Done. Applied migrations: {result}")
logger.warning("You should commit and push any new migrations immediately!")
else:
print("No pending migrations to apply.")
logger.info("No pending migrations to apply.")
# Inspects models and might generate a migration script
migration_available = router.show(target_models)
migration_available = router.show(auto)
if migration_available is not None:
print("A migration is available to be applied:")
logger.info("A migration is available to be applied:")
migrate_text, rollback_text = migration_available
print("MIGRATION:")
for line in migrate_text.split("\n"):
if line.strip() == "":
continue
print("\t" + line)
print("ROLLBACK:")
for line in rollback_text.split("\n"):
if line.strip() == "":
continue
print("\t" + line)
def _reformat_text(text: str) -> str:
# Remove empty lines
text = [line for line in text.split("\n") if line.strip() != ""]
# Add line numbers, indent, ensure it starts on a new line
return "\n" + "\n".join([f"{i:02}:\t{line}" for i, line in enumerate(text)])
logger.info("Migration Content", content=_reformat_text(migrate_text))
logger.info("Rollback Content", content=_reformat_text(rollback_text))
if questionary.confirm("Do you want to create this migration?").ask():
print(
'Lowercase letters and underscores only (e.g. "create_table", "remove_ipaddress_count").'
logger.info(
'Minimum length 9, lowercase letters and underscores only (e.g. "create_table", "remove_ipaddress_count").'
)
migration_name: Optional[str] = questionary.text(
"Enter migration name",
validate=lambda text: re.match("^[a-z_]+$", text) is not None,
validate=lambda text: re.match("^[a-z_]{9,}$", text) is not None,
).ask()
if migration_name is None:
return
migration = router.create(migration_name, auto=auto)
migration = router.create(migration_name, auto=target_models)
if migration:
print(f"Migration created: {migration}")
logger.info(f"Migration created: {migration}")
if len(router.diff) == 1:
if questionary.confirm(
"Do you want to apply this migration immediately?"
).ask():
router.run(migration)
print("Done.")
print("!!! Commit and push this migration file immediately!")
logger.info("Done.")
logger.warning(
"!!! Commit and push this migration file immediately!"
)
else:
print("No changes detected. Something went wrong.")
return
raise RuntimeError(
"Changes anticipated with show() but no migration created with create(), model definition may have reverted."
)
else:
print("No database changes detected.")
logger.info("No database changes detected.")
if len(current) > 5:
if questionary.confirm(
"There are more than 5 migrations applied. Do you want to merge them?",
default=False,
).ask():
print("Merging migrations...")
logger.info("Merging migrations...")
router.merge(name="initial")
print("Done.")
logger.info("Done.")
print("!!! Commit and push this merged migration file immediately!")
# Testing Code:
"""
print(router.print('linkpulse.models'))
# Create migration
print("Creating migration")
migration = router.create('test', auto='linkpulse.models')
if migration is None:
print("No changes detected")
else:
print(f"Migration Created: {migration}")
# Run migration/migrations
router.run(migration)
Run all unapplied migrations
print("Running all unapplied migrations")
applied = router.run()
print(f"Applied migrations: {applied}")
"""
logger.warning("Commit and push this merged migration file immediately!")

View File

@@ -1,14 +1,32 @@
from peewee import Model, CharField, DateTimeField, IntegerField
"""models.py
This module defines the database models for the LinkPulse backend.
It also provides a base model with database connection details.
"""
from os import getenv
import structlog
from peewee import CharField, DateTimeField, IntegerField, Model
from playhouse.db_url import connect
from os import environ
logger = structlog.get_logger()
# I can't pollute the class definition with these lines, so I'll move them to a separate function.
def _get_database_url():
url = getenv("DATABASE_URL")
if url is None or url.strip() == "":
raise ValueError("DATABASE_URL is not set")
return url
class BaseModel(Model):
class Meta:
database = connect(url=environ.get("DATABASE_URL"))
# accessed via `BaseModel._meta.database`
database = connect(url=_get_database_url())
class IPAddress(BaseModel):
ip = CharField(primary_key=True)
last_seen = DateTimeField()
last_seen = DateTimeField() # timezone naive
count = IntegerField(default=0)

View File

@@ -1,3 +1,8 @@
"""responses.py
This module contains the response models for the FastAPI application.
"""
from pydantic import BaseModel

View File

@@ -1,10 +1,29 @@
"""utilities.py
This module provides utility functions for database connection, string manipulation, and IP address handling.
"""
import os
from typing import Optional
from fastapi import Request
from fastapi import Request
from peewee import PostgresqlDatabase
# globally referenced
is_development = os.getenv("ENVIRONMENT") == "development"
def get_db() -> PostgresqlDatabase:
"""
Acquires the database connector from the BaseModel class.
This is not a cursor, but a connection to the database.
"""
# Might not be necessary, but I'd prefer to not import heavy modules with side effects in a utility module.
from linkpulse import models
return models.BaseModel._meta.database # type: ignore
def pluralize(count: int, word: Optional[str] = None) -> str:
"""
Pluralize a word based on count. Returns 's' if count is not 1, '' (empty string) otherwise.
@@ -69,6 +88,10 @@ def hide_ip(ip: str, hidden_octets: Optional[int] = None) -> str:
if ipv6 == ("." in ip):
raise ValueError("Invalid IP address format. Must be either IPv4 or IPv6.")
# Secondary check, if the IP address is an IPv6 address with unspecified address (::), return it as is.
if ipv6 and ip.startswith("::"):
return ip
total_octets = 8 if ipv6 else 4
separator = ":" if ipv6 else "."
replacement = "XXXX" if ipv6 else "X"

69
backend/poetry.lock generated
View File

@@ -531,6 +531,73 @@ files = [
[package.dependencies]
psutil = "*"
[[package]]
name = "orjson"
version = "3.10.10"
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
optional = false
python-versions = ">=3.8"
files = [
{file = "orjson-3.10.10-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b788a579b113acf1c57e0a68e558be71d5d09aa67f62ca1f68e01117e550a998"},
{file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:804b18e2b88022c8905bb79bd2cbe59c0cd014b9328f43da8d3b28441995cda4"},
{file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9972572a1d042ec9ee421b6da69f7cc823da5962237563fa548ab17f152f0b9b"},
{file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc6993ab1c2ae7dd0711161e303f1db69062955ac2668181bfdf2dd410e65258"},
{file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d78e4cacced5781b01d9bc0f0cd8b70b906a0e109825cb41c1b03f9c41e4ce86"},
{file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6eb2598df518281ba0cbc30d24c5b06124ccf7e19169e883c14e0831217a0bc"},
{file = "orjson-3.10.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23776265c5215ec532de6238a52707048401a568f0fa0d938008e92a147fe2c7"},
{file = "orjson-3.10.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8cc2a654c08755cef90b468ff17c102e2def0edd62898b2486767204a7f5cc9c"},
{file = "orjson-3.10.10-cp310-none-win32.whl", hash = "sha256:081b3fc6a86d72efeb67c13d0ea7c030017bd95f9868b1e329a376edc456153b"},
{file = "orjson-3.10.10-cp310-none-win_amd64.whl", hash = "sha256:ff38c5fb749347768a603be1fb8a31856458af839f31f064c5aa74aca5be9efe"},
{file = "orjson-3.10.10-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:879e99486c0fbb256266c7c6a67ff84f46035e4f8749ac6317cc83dacd7f993a"},
{file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:019481fa9ea5ff13b5d5d95e6fd5ab25ded0810c80b150c2c7b1cc8660b662a7"},
{file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0dd57eff09894938b4c86d4b871a479260f9e156fa7f12f8cad4b39ea8028bb5"},
{file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dbde6d70cd95ab4d11ea8ac5e738e30764e510fc54d777336eec09bb93b8576c"},
{file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2625cb37b8fb42e2147404e5ff7ef08712099197a9cd38895006d7053e69d6"},
{file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbf3c20c6a7db69df58672a0d5815647ecf78c8e62a4d9bd284e8621c1fe5ccb"},
{file = "orjson-3.10.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:75c38f5647e02d423807d252ce4528bf6a95bd776af999cb1fb48867ed01d1f6"},
{file = "orjson-3.10.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:23458d31fa50ec18e0ec4b0b4343730928296b11111df5f547c75913714116b2"},
{file = "orjson-3.10.10-cp311-none-win32.whl", hash = "sha256:2787cd9dedc591c989f3facd7e3e86508eafdc9536a26ec277699c0aa63c685b"},
{file = "orjson-3.10.10-cp311-none-win_amd64.whl", hash = "sha256:6514449d2c202a75183f807bc755167713297c69f1db57a89a1ef4a0170ee269"},
{file = "orjson-3.10.10-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8564f48f3620861f5ef1e080ce7cd122ee89d7d6dacf25fcae675ff63b4d6e05"},
{file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5bf161a32b479034098c5b81f2608f09167ad2fa1c06abd4e527ea6bf4837a9"},
{file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68b65c93617bcafa7f04b74ae8bc2cc214bd5cb45168a953256ff83015c6747d"},
{file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8e28406f97fc2ea0c6150f4c1b6e8261453318930b334abc419214c82314f85"},
{file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4d0d9fe174cc7a5bdce2e6c378bcdb4c49b2bf522a8f996aa586020e1b96cee"},
{file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3be81c42f1242cbed03cbb3973501fcaa2675a0af638f8be494eaf37143d999"},
{file = "orjson-3.10.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:65f9886d3bae65be026219c0a5f32dbbe91a9e6272f56d092ab22561ad0ea33b"},
{file = "orjson-3.10.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:730ed5350147db7beb23ddaf072f490329e90a1d059711d364b49fe352ec987b"},
{file = "orjson-3.10.10-cp312-none-win32.whl", hash = "sha256:a8f4bf5f1c85bea2170800020d53a8877812892697f9c2de73d576c9307a8a5f"},
{file = "orjson-3.10.10-cp312-none-win_amd64.whl", hash = "sha256:384cd13579a1b4cd689d218e329f459eb9ddc504fa48c5a83ef4889db7fd7a4f"},
{file = "orjson-3.10.10-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44bffae68c291f94ff5a9b4149fe9d1bdd4cd0ff0fb575bcea8351d48db629a1"},
{file = "orjson-3.10.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e27b4c6437315df3024f0835887127dac2a0a3ff643500ec27088d2588fa5ae1"},
{file = "orjson-3.10.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca84df16d6b49325a4084fd8b2fe2229cb415e15c46c529f868c3387bb1339d"},
{file = "orjson-3.10.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c14ce70e8f39bd71f9f80423801b5d10bf93d1dceffdecd04df0f64d2c69bc01"},
{file = "orjson-3.10.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:24ac62336da9bda1bd93c0491eff0613003b48d3cb5d01470842e7b52a40d5b4"},
{file = "orjson-3.10.10-cp313-none-win32.whl", hash = "sha256:eb0a42831372ec2b05acc9ee45af77bcaccbd91257345f93780a8e654efc75db"},
{file = "orjson-3.10.10-cp313-none-win_amd64.whl", hash = "sha256:f0c4f37f8bf3f1075c6cc8dd8a9f843689a4b618628f8812d0a71e6968b95ffd"},
{file = "orjson-3.10.10-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:829700cc18503efc0cf502d630f612884258020d98a317679cd2054af0259568"},
{file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0ceb5e0e8c4f010ac787d29ae6299846935044686509e2f0f06ed441c1ca949"},
{file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0c25908eb86968613216f3db4d3003f1c45d78eb9046b71056ca327ff92bdbd4"},
{file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:218cb0bc03340144b6328a9ff78f0932e642199ac184dd74b01ad691f42f93ff"},
{file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2277ec2cea3775640dc81ab5195bb5b2ada2fe0ea6eee4677474edc75ea6785"},
{file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:848ea3b55ab5ccc9d7bbd420d69432628b691fba3ca8ae3148c35156cbd282aa"},
{file = "orjson-3.10.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e3e67b537ac0c835b25b5f7d40d83816abd2d3f4c0b0866ee981a045287a54f3"},
{file = "orjson-3.10.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:7948cfb909353fce2135dcdbe4521a5e7e1159484e0bb024c1722f272488f2b8"},
{file = "orjson-3.10.10-cp38-none-win32.whl", hash = "sha256:78bee66a988f1a333dc0b6257503d63553b1957889c17b2c4ed72385cd1b96ae"},
{file = "orjson-3.10.10-cp38-none-win_amd64.whl", hash = "sha256:f1d647ca8d62afeb774340a343c7fc023efacfd3a39f70c798991063f0c681dd"},
{file = "orjson-3.10.10-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5a059afddbaa6dd733b5a2d76a90dbc8af790b993b1b5cb97a1176ca713b5df8"},
{file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f9b5c59f7e2a1a410f971c5ebc68f1995822837cd10905ee255f96074537ee6"},
{file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d5ef198bafdef4aa9d49a4165ba53ffdc0a9e1c7b6f76178572ab33118afea25"},
{file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf29ce0bb5d3320824ec3d1508652421000ba466abd63bdd52c64bcce9eb1fa"},
{file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dddd5516bcc93e723d029c1633ae79c4417477b4f57dad9bfeeb6bc0315e654a"},
{file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12f2003695b10817f0fa8b8fca982ed7f5761dcb0d93cff4f2f9f6709903fd7"},
{file = "orjson-3.10.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:672f9874a8a8fb9bb1b771331d31ba27f57702c8106cdbadad8bda5d10bc1019"},
{file = "orjson-3.10.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1dcbb0ca5fafb2b378b2c74419480ab2486326974826bbf6588f4dc62137570a"},
{file = "orjson-3.10.10-cp39-none-win32.whl", hash = "sha256:d9bbd3a4b92256875cb058c3381b782649b9a3c68a4aa9a2fff020c2f9cfc1be"},
{file = "orjson-3.10.10-cp39-none-win_amd64.whl", hash = "sha256:766f21487a53aee8524b97ca9582d5c6541b03ab6210fbaf10142ae2f3ced2aa"},
{file = "orjson-3.10.10.tar.gz", hash = "sha256:37949383c4df7b4337ce82ee35b6d7471e55195efa7dcb45ab8226ceadb0fe3b"},
]
[[package]]
name = "packaging"
version = "24.1"
@@ -1126,4 +1193,4 @@ files = [
[metadata]
lock-version = "2.0"
python-versions = "^3.12"
content-hash = "a0cc32861b71da789edc5df54e79239d6cca81cb3d14984a1306a3f92735589f"
content-hash = "674210864455c4a103c7e78f9879c0360fcdc0ae62d36a2fe44f1df4f59f04e6"

View File

@@ -1,6 +1,6 @@
[tool.poetry]
name = "linkpulse"
version = "0.2.1"
version = "0.2.2"
description = ""
authors = ["Xevion <xevion@xevion.dev>"]
license = "GNU GPL v3"
@@ -26,6 +26,7 @@ psycopg2 = "^2.9.10"
structlog = "^24.4.0"
uvicorn = "^0.32.0"
asgi-correlation-id = "^4.3.4"
orjson = "^3.10.10"
[tool.poetry.group.dev.dependencies]