Introduction
MariaDB’s versatility makes it suitable for a wide range of applications, from small web applications to enterprise-scale deployments. With multiple storage engines and excellent MySQL compatibility, MariaDB powers critical systems across industries.
This article explores real-world MariaDB use cases, implementation patterns, and best practices for production deployments.
Web Applications
WordPress and CMS
# Install MariaDB for WordPress
sudo apt install mariadb-server
# Create WordPress database
sudo mysql
CREATE DATABASE wordpress CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;
CREATE USER 'wordpress'@'localhost' IDENTIFIED BY 'strong_password';
GRANT ALL PRIVILEGES ON wordpress.* TO 'wordpress'@'localhost';
FLUSH PRIVILEGES;
EXIT;
# WordPress automatically uses MariaDB/MySQL
# Compatible: 100% with WordPress
Custom Web Application
# Flask with MariaDB
from flask import Flask, request, jsonify
import mariadb
from mariadb.pooling import Pool
app = Flask(__name__)
# Connection pool
pool = Pool(
pool_name="webapp_pool",
pool_size=10,
host="localhost",
user="webapp",
password="secret",
database="webapp"
)
@app.route('/api/users', methods=['GET'])
def get_users():
conn = pool.get_connection()
try:
cursor = conn.cursor(dictionary=True)
cursor.execute("SELECT id, name, email FROM users LIMIT 20")
users = cursor.fetchall()
return jsonify(users)
finally:
conn.close()
@app.route('/api/users', methods=['POST'])
def create_user():
data = request.json
conn = pool.get_connection()
try:
cursor = conn.cursor()
cursor.execute(
"INSERT INTO users (name, email) VALUES (?, ?)",
(data['name'], data['email'])
)
conn.commit()
return jsonify({'id': cursor.lastrowid}), 201
finally:
conn.close()
# Session management
@app.route('/login', methods=['POST'])
def login():
data = request.json
conn = pool.get_connection()
try:
cursor = conn.cursor(dictionary=True)
cursor.execute(
"SELECT id, name FROM users WHERE email = ? AND password = ?",
(data['email'], data['password']) # In production: hash passwords!
)
user = cursor.fetchone()
if user:
return jsonify({'user': user})
return jsonify({'error': 'Invalid credentials'}), 401
finally:
conn.close()
app.run(host='0.0.0.0', port=5000)
E-commerce Platforms
Product Catalog
-- Product catalog schema
CREATE TABLE categories (
id INT PRIMARY KEY AUTO_INCREMENT,
name VARCHAR(100) NOT NULL,
parent_id INT,
FOREIGN KEY (parent_id) REFERENCES categories(id) ON DELETE SET NULL
);
CREATE TABLE products (
id INT PRIMARY KEY AUTO_INCREMENT,
name VARCHAR(255) NOT NULL,
slug VARCHAR(255) UNIQUE NOT NULL,
description TEXT,
price DECIMAL(10,2) NOT NULL,
compare_price DECIMAL(10,2), -- Original price for discounts
category_id INT,
stock INT DEFAULT 0,
status ENUM('active', 'draft', 'archived') DEFAULT 'active',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
FOREIGN KEY (category_id) REFERENCES categories(id),
INDEX idx_slug (slug),
INDEX idx_category (category_id),
INDEX idx_status (status)
) ENGINE=InnoDB;
CREATE TABLE product_attributes (
id INT PRIMARY KEY AUTO_INCREMENT,
product_id INT,
attribute_name VARCHAR(50),
attribute_value VARCHAR(255),
FOREIGN KEY (product_id) REFERENCES products(id) ON DELETE CASCADE
);
CREATE TABLE product_images (
id INT PRIMARY KEY AUTO_INCREMENT,
product_id INT,
image_url VARCHAR(500),
is_primary BOOLEAN DEFAULT FALSE,
sort_order INT DEFAULT 0,
FOREIGN KEY (product_id) REFERENCES products(id) ON DELETE CASCADE
);
-- Full-text search for products
ALTER TABLE products
ADD FULLTEXT INDEX ft_search (name, description);
Shopping Cart and Orders
-- Shopping cart
CREATE TABLE carts (
session_id VARCHAR(64) PRIMARY KEY,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
);
CREATE TABLE cart_items (
id INT PRIMARY KEY AUTO_INCREMENT,
cart_id VARCHAR(64),
product_id INT,
quantity INT NOT NULL DEFAULT 1,
FOREIGN KEY (cart_id) REFERENCES carts(session_id),
FOREIGN KEY (product_id) REFERENCES products(id)
);
-- Orders
CREATE TABLE orders (
id INT PRIMARY KEY AUTO_INCREMENT,
order_number VARCHAR(20) UNIQUE NOT NULL,
user_id INT,
status ENUM('pending', 'processing', 'shipped', 'delivered', 'cancelled') DEFAULT 'pending',
subtotal DECIMAL(10,2),
tax DECIMAL(10,2),
shipping DECIMAL(10,2),
total DECIMAL(10,2),
shipping_address TEXT,
payment_method VARCHAR(50),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id),
INDEX idx_order_number (order_number),
INDEX idx_user (user_id),
INDEX idx_status (status)
) ENGINE=InnoDB;
CREATE TABLE order_items (
id INT PRIMARY KEY AUTO_INCREMENT,
order_id INT,
product_id INT,
quantity INT NOT NULL,
unit_price DECIMAL(10,2),
total DECIMAL(10,2),
FOREIGN KEY (order_id) REFERENCES orders(id) ON DELETE RESTRICT,
FOREIGN KEY (product_id) REFERENCES products(id)
);
Inventory Management
-- Inventory tracking
CREATE TABLE inventory (
id INT PRIMARY KEY AUTO_INCREMENT,
product_id INT UNIQUE,
quantity INT DEFAULT 0,
reserved INT DEFAULT 0, -- Reserved in carts
available INT GENERATED ALWAYS AS (quantity - reserved) STORED,
last_restock DATE,
FOREIGN KEY (product_id) REFERENCES products(id)
);
-- Reserve inventory during checkout
START TRANSACTION;
UPDATE inventory
SET reserved = reserved + ?
WHERE product_id = ? AND (quantity - reserved) >= ?;
IF ROW_COUNT() > 0 THEN
COMMIT;
-- Continue checkout
ELSE
ROLLBACK;
-- Out of stock
END IF;
Analytics and Reporting
MariaDB ColumnStore for OLAP
-- Use ColumnStore for analytical workloads
CREATE TABLE factsales (
id BIGINT PRIMARY KEY,
date_id INT,
product_id INT,
customer_id INT,
store_id INT,
quantity INT,
revenue DECIMAL(12,2),
cost DECIMAL(12,2)
) ENGINE=ColumnStore;
-- Dimension tables
CREATE TABLE dim_date (
date_id INT PRIMARY KEY,
date DATE,
day_of_week INT,
month INT,
quarter INT,
year INT,
is_weekend BOOLEAN
) ENGINE=ColumnStore;
CREATE TABLE dim_product (
product_id INT PRIMARY KEY,
product_name VARCHAR(255),
category VARCHAR(100),
subcategory VARCHAR(100),
brand VARCHAR(100)
) ENGINE=ColumnStore;
CREATE TABLE dim_customer (
customer_id INT PRIMARY KEY,
name VARCHAR(100),
email VARCHAR(255),
city VARCHAR(100),
country VARCHAR(100),
segment VARCHAR(50)
) ENGINE=ColumnStore;
-- Analytical queries
SELECT
d.year,
d.quarter,
p.category,
SUM(f.revenue) as revenue,
SUM(f.quantity) as units_sold
FROM factsales f
JOIN dim_date d ON f.date_id = d.date_id
JOIN dim_product p ON f.product_id = p.product_id
GROUP BY d.year, d.quarter, p.category
ORDER BY d.year, d.quarter, revenue DESC;
Real-time Dashboards
import mariadb
from datetime import datetime, timedelta
class AnalyticsDashboard:
"""Real-time analytics with MariaDB."""
def __init__(self, db_config):
self.conn = mariadb.connect(**db_config)
def get_sales_summary(self, days=30):
cursor = self.conn.cursor(dictionary=True)
cursor.execute("""
SELECT
COUNT(*) as total_orders,
SUM(total) as revenue,
AVG(total) as avg_order_value
FROM orders
WHERE created_at >= DATE_SUB(NOW(), INTERVAL %s DAY)
AND status != 'cancelled'
""", (days,))
return cursor.fetchone()
def get_top_products(self, limit=10):
cursor = self.conn.cursor(dictionary=True)
cursor.execute("""
SELECT
p.name,
SUM(oi.quantity) as units_sold,
SUM(oi.total) as revenue
FROM order_items oi
JOIN products p ON oi.product_id = p.id
JOIN orders o ON oi.order_id = o.id
WHERE o.created_at >= DATE_SUB(NOW(), INTERVAL 30 DAY)
GROUP BY p.id, p.name
ORDER BY revenue DESC
LIMIT %s
""", (limit,))
return cursor.fetchall()
def get_revenue_trend(self, days=30):
cursor = self.conn.cursor(dictionary=True)
cursor.execute("""
SELECT
DATE(created_at) as date,
COUNT(*) as orders,
SUM(total) as revenue
FROM orders
WHERE created_at >= DATE_SUB(NOW(), INTERVAL %s DAY)
AND status != 'cancelled'
GROUP BY DATE(created_at)
ORDER BY date
""", (days,))
return cursor.fetchall()
# Usage for dashboard
dashboard = AnalyticsDashboard({'host': 'localhost', 'user': 'root',
'password': 'secret', 'database': 'analytics'})
print("Sales Summary:", dashboard.get_sales_summary(30))
print("Top Products:", dashboard.get_top_products(10))
IoT and Time-Series
Sensor Data Storage
-- Time-series data with InnoDB
CREATE TABLE sensor_readings (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
sensor_id VARCHAR(50) NOT NULL,
measurement_type VARCHAR(50),
value DECIMAL(10,4),
unit VARCHAR(20),
recorded_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
INDEX idx_sensor_time (sensor_id, recorded_at),
INDEX idx_type_time (measurement_type, recorded_at)
) ENGINE=InnoDB;
-- Partition by range (for large datasets)
ALTER TABLE sensor_readings
PARTITION BY RANGE (TO_DAYS(recorded_at)) (
PARTITION p202501 VALUES LESS THAN (TO_DAYS('2026-02-01')),
PARTITION p202502 VALUES LESS THAN (TO_DAYS('2026-03-01')),
PARTITION p202503 VALUES LESS THAN (TO_DAYS('2026-04-01')),
PARTITION p_future VALUES LESS THAN MAXVALUE
);
IoT Data Collection
import mariadb
import time
from datetime import datetime
class IoTDataCollector:
"""Collect and store IoT sensor data."""
def __init__(self, db_config):
self.conn = mariadb.connect(**db_config)
self._init_table()
def _init_table(self):
cursor = self.conn.cursor()
cursor.execute("""
CREATE TABLE IF NOT EXISTS sensor_readings (
id BIGINT PRIMARY KEY AUTO_INCREMENT,
sensor_id VARCHAR(50) NOT NULL,
measurement_type VARCHAR(50),
value DECIMAL(10,4),
unit VARCHAR(20),
recorded_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
INDEX idx_sensor_time (sensor_id, recorded_at)
) ENGINE=InnoDB
""")
self.conn.commit()
def record_reading(self, sensor_id, measurement_type, value, unit=None):
cursor = self.conn.cursor()
cursor.execute("""
INSERT INTO sensor_readings
(sensor_id, measurement_type, value, unit)
VALUES (?, ?, ?, ?)
""", (sensor_id, measurement_type, value, unit))
self.conn.commit()
def record_batch(self, readings):
"""Batch record multiple readings."""
cursor = self.conn.cursor()
cursor.executemany("""
INSERT INTO sensor_readings
(sensor_id, measurement_type, value, unit)
VALUES (?, ?, ?, ?)
""", readings)
self.conn.commit()
def get_readings(self, sensor_id, hours=24):
cursor = self.conn.cursor(dictionary=True)
cursor.execute("""
SELECT * FROM sensor_readings
WHERE sensor_id = %s
AND recorded_at >= DATE_SUB(NOW(), INTERVAL %s HOUR)
ORDER BY recorded_at DESC
""", (sensor_id, hours))
return cursor.fetchall()
def get_aggregates(self, sensor_id, hours=24):
"""Get min/max/avg for time period."""
cursor = self.conn.cursor(dictionary=True)
cursor.execute("""
SELECT
measurement_type,
COUNT(*) as readings,
MIN(value) as min_value,
MAX(value) as max_value,
AVG(value) as avg_value
FROM sensor_readings
WHERE sensor_id = %s
AND recorded_at >= DATE_SUB(NOW(), INTERVAL %s HOUR)
GROUP BY measurement_type
""", (sensor_id, hours))
return cursor.fetchall()
# Simulate IoT data collection
collector = IoTDataCollector({'host': 'localhost', 'user': 'root',
'password': 'secret', 'database': 'iot'})
# Record sensor data
collector.record_reading('temp_sensor_1', 'temperature', 23.5, 'celsius')
collector.record_reading('humidity_sensor_1', 'humidity', 65.2, 'percent')
# Get aggregated data
print(collector.get_aggregates('temp_sensor_1', 24))
Content Management
Blog and CMS Schema
-- Posts
CREATE TABLE posts (
id INT PRIMARY KEY AUTO_INCREMENT,
title VARCHAR(255) NOT NULL,
slug VARCHAR(255) UNIQUE NOT NULL,
content LONGTEXT,
excerpt TEXT,
featured_image VARCHAR(500),
status ENUM('draft', 'published', 'archived') DEFAULT 'draft',
author_id INT,
published_at TIMESTAMP NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
FOREIGN KEY (author_id) REFERENCES users(id),
FULLTEXT INDEX ft_title_content (title, content)
) ENGINE=InnoDB;
-- Categories
CREATE TABLE categories (
id INT PRIMARY KEY AUTO_INCREMENT,
name VARCHAR(100) NOT NULL,
slug VARCHAR(100) UNIQUE NOT NULL,
parent_id INT,
FOREIGN KEY (parent_id) REFERENCES categories(id) ON DELETE SET NULL
);
-- Tags
CREATE TABLE tags (
id INT PRIMARY KEY AUTO_INCREMENT,
name VARCHAR(50) UNIQUE NOT NULL,
slug VARCHAR(50) UNIQUE NOT NULL
);
-- Post-Category relationship
CREATE TABLE post_categories (
post_id INT,
category_id INT,
PRIMARY KEY (post_id, category_id),
FOREIGN KEY (post_id) REFERENCES posts(id) ON DELETE CASCADE,
FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE
);
-- Post-Tag relationship
CREATE TABLE post_tags (
post_id INT,
tag_id INT,
PRIMARY KEY (post_id, tag_id),
FOREIGN KEY (post_id) REFERENCES posts(id) ON DELETE CASCADE,
FOREIGN KEY (tag_id) REFERENCES tags(id) ON DELETE CASCADE
);
-- Comments
CREATE TABLE comments (
id INT PRIMARY KEY AUTO_INCREMENT,
post_id INT,
parent_id INT,
author_name VARCHAR(100),
author_email VARCHAR(255),
content TEXT NOT NULL,
status ENUM('pending', 'approved', 'spam') DEFAULT 'pending',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (post_id) REFERENCES posts(id) ON DELETE CASCADE,
FOREIGN KEY (parent_id) REFERENCES comments(id) ON DELETE CASCADE
);
Multi-Tenant Applications
SaaS Database Design
-- Multi-tenant with shared database
CREATE TABLE tenants (
id INT PRIMARY KEY AUTO_INCREMENT,
name VARCHAR(100) NOT NULL,
subdomain VARCHAR(50) UNIQUE,
plan ENUM('free', 'starter', 'professional', 'enterprise') DEFAULT 'free',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE users (
id INT PRIMARY KEY AUTO_INCREMENT,
tenant_id INT NOT NULL,
email VARCHAR(255) NOT NULL,
name VARCHAR(100),
role ENUM('owner', 'admin', 'member') DEFAULT 'member',
FOREIGN KEY (tenant_id) REFERENCES tenants(id) ON DELETE CASCADE,
UNIQUE KEY tenant_email (tenant_id, email)
);
-- All tables include tenant_id for isolation
CREATE TABLE orders (
id INT PRIMARY KEY AUTO_INCREMENT,
tenant_id INT NOT NULL,
customer_name VARCHAR(100),
total DECIMAL(10,2),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (tenant_id) REFERENCES tenants(id) ON DELETE CASCADE,
INDEX idx_tenant (tenant_id)
);
-- Row-level security (application enforces)
-- SELECT * FROM orders WHERE tenant_id = ?
Best Practices Summary
Storage Engine Selection
| Use Case | Recommended Engine |
|---|---|
| Transactional (OLTP) | InnoDB |
| Analytical (OLAP) | ColumnStore |
| Write-heavy logs | MyRocks |
| Archive/History | Aria |
| Full-text search | InnoDB + FULLTEXT |
Configuration for Workloads
# Web application
innodb-buffer-pool-size=2G
max_connections=200
# Analytics
innodb-buffer-pool-size=4G
# ColumnStore handles its own memory
# IoT/Time-series
innodb-buffer-pool-size=1G
innodb-flush-log-at-trx-commit=2
Resources
Conclusion
MariaDB’s flexibility with multiple storage engines makes it ideal for diverse applications. From web platforms to analytics, IoT to AI, MariaDB provides robust solutions for modern data requirements.
With this comprehensive use case guide, you now have the knowledge to implement MariaDB in various production scenarios effectively.
Comments