1) Install with pip in a virtual environment
install.sh
python3 -m venv .venv
source .venv/bin/activate
python -m pip install --upgrade pip
pip install jazzmine-logging2) Installation options
Core package only
install_core.sh
pip install jazzmine-loggingInstall one optional sink extra
install_one_extra.sh
pip install "jazzmine-logging[sentry]"
pip install "jazzmine-logging[kafka]"
pip install "jazzmine-logging[redis]"
pip install "jazzmine-logging[honeycomb]"
pip install "jazzmine-logging[aws]"
pip install "jazzmine-logging[gcp]"Install multiple extras together
install_multi_extras.sh
pip install "jazzmine-logging[sentry,redis,kafka]"Install all extras
install_all_extras.sh
pip install "jazzmine-logging[all]"Local editable install (from this repository)
install_editable.sh
pip install -e .
pip install -e ".[all]"Extra to sink mapping
| Extra | Needed for sink type |
|---|---|
| sentry | sentry |
| kafka | kafka |
| redis | redis |
| honeycomb | honeycomb |
| aws | cloudwatch |
| gcp | pubsub |
No extra is required for: console, file, http, datadog, loki, newrelic, logstash, vector, fluentbit, promtail, graylog.
3) Minimal example (sync, console)
logging_sync.py
from jazzmine.logging import BaseLogger
config = {
"name": "demo-app",
"level": "INFO",
"json": True,
"sinks": [
{"type": "console"}
],
}
logger = BaseLogger(config)
logger.resolve_config()
logger.info("service started", version="0.1.0")
logger.warning("slow response", endpoint="/health", latency_ms=230)
logger.error("request failed", status=500, request_id="req-123")4) Minimal example (async + sink lifecycle)
For sink adapters from the registry (for example sentry, redis, kafka, and others), call start_async_workers() before logging and shutdown() on exit.
logging_async.py
import asyncio
from jazzmine.logging import BaseLogger
config = {
"name": "demo-app",
"level": "INFO",
"json": True,
"sinks": [
{"type": "console"},
{"type": "loki", "url": "http://localhost:3100/loki/api/v1/push"},
],
}
async def main():
logger = BaseLogger(config)
logger.resolve_config()
await logger.start_async_workers()
logger.info("sync log")
await logger.ainfo("async log", request_id="abc-123")
await logger.shutdown()
asyncio.run(main())5) Other provided functionality (minimal usage)
Load config from JSON or YAML file
logging_from_file.py
from jazzmine.logging import BaseLogger
logger = BaseLogger("logging.yaml") # also supports .json
logger.resolve_config()
logger.info("loaded from file")Add request context fields
logging_request_context.py
from jazzmine.logging import BaseLogger, RequestContext
logger = BaseLogger({
"name": "api",
"level": "INFO",
"json": True,
"sinks": [{"type": "console"}],
})
logger.resolve_config()
ctx = RequestContext(user_id="u-42", endpoint="/orders")
logger.info("request complete", **ctx.to_dict())Per-sink level override
logging_sink_levels.py
from jazzmine.logging import BaseLogger
config = {
"name": "level-demo",
"level": "DEBUG",
"json": True,
"sinks": [
{"type": "console", "level": "DEBUG"},
{"type": "loki", "url": "http://localhost:3100/loki/api/v1/push", "level": "ERROR"},
],
}
logger = BaseLogger(config)
logger.resolve_config()6) Sink examples (all supported sink types)
Copy one or more sink entries into your sinks list.
sink_examples.py
SINK_EXAMPLES = {
# Built-in stdlib sinks
"console": {
"type": "console"
},
"file": {
"type": "file",
"path": "logs/app.log",
"rotate": {
"when": "midnight",
"backupCount": 7
}
},
"http_sync": {
"type": "http",
"url": "https://collector.example.com/logs",
"method": "POST",
"headers": {
"Authorization": "Bearer <token>"
}
},
"http_async_worker": {
"type": "http",
"url": "https://collector.example.com/logs",
"method": "POST",
"headers": {
"Authorization": "Bearer <token>"
},
"async": True,
"queue_max": 2000
},
# HTTP API sinks
"datadog": {
"type": "datadog",
"api_key": "<DATADOG_API_KEY>",
"site": "datadoghq.com",
"service": "demo-app"
},
"loki": {
"type": "loki",
"url": "http://localhost:3100/loki/api/v1/push",
"labels": {
"job": "demo-app",
"env": "dev"
}
},
"newrelic": {
"type": "newrelic",
"license_key": "<NEW_RELIC_LICENSE_KEY>",
"region": "us"
},
"logstash": {
"type": "logstash",
"url": "http://localhost:8080"
},
# Observability sinks
"sentry": {
"type": "sentry",
"dsn": "<SENTRY_DSN>",
"environment": "production",
"traces_sample_rate": 0.1
},
"honeycomb": {
"type": "honeycomb",
"api_key": "<HONEYCOMB_API_KEY>",
"dataset": "demo-app"
},
# Queue / stream sinks
"kafka_async": {
"type": "kafka",
"bootstrap_servers": ["localhost:9092"],
"topic": "app-logs",
"async": True
},
"kafka_sync": {
"type": "kafka",
"bootstrap_servers": ["localhost:9092"],
"topic": "app-logs",
"async": False
},
"redis": {
"type": "redis",
"host": "localhost",
"port": 6379,
"db": 0,
"stream_key": "logs:demo-app"
},
# Cloud sinks
"cloudwatch": {
"type": "cloudwatch",
"region": "us-east-1",
"log_group": "/logs/demo-app",
"log_stream": "default"
},
"pubsub": {
"type": "pubsub",
"project_id": "my-gcp-project",
"topic": "app-logs"
},
# Agent / collector sinks
"vector": {
"type": "vector",
"url": "http://localhost:8686"
},
"fluentbit": {
"type": "fluentbit",
"url": "http://localhost:2020",
"tag": "demo-app"
},
"promtail": {
"type": "promtail",
"url": "http://localhost:3100/loki/api/v1/push",
"labels": {
"job": "demo-app",
"env": "dev"
}
},
# GELF sink
"graylog_http": {
"type": "graylog",
"host": "localhost",
"port": 12201,
"protocol": "http"
},
"graylog_udp": {
"type": "graylog",
"host": "localhost",
"port": 12201,
"protocol": "udp"
},
"graylog_tcp": {
"type": "graylog",
"host": "localhost",
"port": 12201,
"protocol": "tcp"
},
}7) Reusable runner for trying any sink quickly
sink_runner.py
import asyncio
from jazzmine.logging import BaseLogger
async def run_with_sink(sink_config: dict):
config = {
"name": "playground",
"level": "INFO",
"json": True,
"sinks": [
{"type": "console"},
sink_config,
],
}
logger = BaseLogger(config)
logger.resolve_config()
await logger.start_async_workers()
logger.info("sync event", component="demo")
await logger.ainfo("async event", component="demo")
await logger.shutdown()
# Example:
# asyncio.run(run_with_sink(SINK_EXAMPLES["datadog"]))8) Practical notes
- Keep API keys and DSNs in environment variables, then inject them into your config.
- If you configure only console, file, or http, resolve_config() is enough for basic sync logs.
- If you use sink adapters from the registry (datadog, kafka, redis, and others), call await logger.start_async_workers() before logging.
- Always call await logger.shutdown() for clean resource cleanup.
Deep reference
Use reference pages for runtime internals and sink adapter implementation details.