📚 Cloud Platform Documentation

Complete guide to managing your cloud infrastructure with step-by-step examples

📊 Database Operations

💾 Storage Usage

🚀 Deployments

⚡ Quick Start Guide

1

Create an Account

Register for a free account to get started with 10 credits

curl -X POST http://localhost:5000/api/register \
  -H "Content-Type: application/json" \
  -d '{"username":"demo","email":"demo@example.com","password":"Demo@123"}'
2

Get Your API Key

Your API key is provided upon registration or can be found in your profile

demo-api-key-12345
3

Make Your First API Call

Test your API key with a simple status check

curl -H "X-API-Key: your-api-key" http://localhost:5000/api/system/status

🗄️ Create Database

📌 Note: We support multiple database types including SQLite, PostgreSQL, MongoDB, and MySQL

SQLite Database (Default)

Our platform automatically creates a SQLite database for each user

import sqlite3

# Connect to database (creates if not exists)
conn = sqlite3.connect('cloud_platform.db')
cursor = conn.cursor()

# Create a custom database
cursor.execute('''
    CREATE TABLE IF NOT EXISTS custom_data (
        id INTEGER PRIMARY KEY,
        name TEXT NOT NULL,
        value TEXT,
        created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
    )
''')
conn.commit()
conn.close()

PostgreSQL Database

Connect to PostgreSQL for advanced relational database needs

import psycopg2

# Connect to PostgreSQL
conn = psycopg2.connect(
    host="localhost",
    database="cloudplatform",
    user="postgres",
    password="postgres"
)
cursor = conn.cursor()

# Create database
cursor.execute("CREATE DATABASE myapp")
conn.commit()
conn.close()

MongoDB Database

Use MongoDB for flexible, document-based storage

from pymongo import MongoClient

# Connect to MongoDB
client = MongoClient('mongodb://localhost:27017/')
db = client['cloudplatform']

# Create a new database
new_db = client['myapp']
collection = new_db['users']

📊 Create Tables

Example: Creating a Users Table

CREATE TABLE IF NOT EXISTS users (
    id INTEGER PRIMARY KEY AUTOINCREMENT,
    username TEXT UNIQUE NOT NULL,
    email TEXT UNIQUE NOT NULL,
    password_hash TEXT NOT NULL,
    api_key TEXT UNIQUE,
    plan TEXT DEFAULT 'free',
    credits REAL DEFAULT 10.0,
    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
    last_login TIMESTAMP,
    is_active BOOLEAN DEFAULT 1
);

Example: Creating a Products Table

CREATE TABLE IF NOT EXISTS products (
    id INTEGER PRIMARY KEY AUTOINCREMENT,
    name TEXT NOT NULL,
    description TEXT,
    price REAL NOT NULL,
    quantity INTEGER DEFAULT 0,
    category TEXT,
    created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
    updated_at TIMESTAMP
);
💡 Tip: Use appropriate data types and constraints to maintain data integrity

✏️ CRUD Operations (Create, Read, Update, Delete)

📝 Create (Insert) Data

# Insert a single record
cursor.execute('''
    INSERT INTO users (username, email, password_hash, api_key)
    VALUES (?, ?, ?, ?)
''', ('john_doe', 'john@example.com', 'hashed_password', 'api_key_123'))
conn.commit()

# Insert multiple records
users = [
    ('jane_doe', 'jane@example.com', 'hash1', 'key1'),
    ('bob_smith', 'bob@example.com', 'hash2', 'key2')
]
cursor.executemany('''
    INSERT INTO users (username, email, password_hash, api_key)
    VALUES (?, ?, ?, ?)
''', users)
conn.commit()

print(f"Inserted {cursor.rowcount} records")

📖 Read (Select) Data

# Select all records
cursor.execute("SELECT * FROM users")
all_users = cursor.fetchall()
for user in all_users:
    print(user)

# Select with conditions
cursor.execute("SELECT * FROM users WHERE plan = ? AND credits > ?", ('premium', 100))
premium_users = cursor.fetchall()

# Select specific columns
cursor.execute("SELECT username, email, credits FROM users WHERE is_active = 1")
active_users = cursor.fetchall()

# Select with JOIN
cursor.execute('''
    SELECT u.username, v.name as vm_name, v.instance_type
    FROM users u
    LEFT JOIN virtual_machines v ON u.id = v.user_id
    WHERE u.plan = 'enterprise'
''')
results = cursor.fetchall()

✏️ Update Data

# Update single record
cursor.execute('''
    UPDATE users 
    SET credits = ?, last_login = CURRENT_TIMESTAMP 
    WHERE username = ?
''', (50.0, 'john_doe'))
conn.commit()
print(f"Updated {cursor.rowcount} record(s)")

# Update multiple records
cursor.execute('''
    UPDATE users 
    SET plan = 'inactive' 
    WHERE last_login < date('now', '-30 days') AND is_active = 1
''')
conn.commit()
print(f"Deactivated {cursor.rowcount} inactive users")

🗑️ Delete Data

# Delete single record
cursor.execute("DELETE FROM users WHERE username = ?", ('inactive_user',))
conn.commit()

# Delete multiple records
cursor.execute("DELETE FROM users WHERE last_login < date('now', '-1 year')")
conn.commit()
print(f"Deleted {cursor.rowcount} old records")

# Delete all records (be careful!)
# cursor.execute("DELETE FROM users")
# conn.commit()

📦 Create Storage Bucket

1

Create Bucket via API

curl -X POST http://localhost:5000/api/buckets \
  -H "Content-Type: application/json" \
  -H "X-API-Key: your-api-key" \
  -d '{
    "name": "my-documents",
    "region": "us-east-1",
    "versioning": true,
    "encryption": true
  }'
2

Create Bucket via Python

import requests

response = requests.post(
    'http://localhost:5000/api/buckets',
    headers={'X-API-Key': 'your-api-key'},
    json={
        'name': 'my-documents',
        'region': 'us-east-1',
        'versioning': True,
        'encryption': True
    }
)

if response.status_code == 201:
    print(f"Bucket created: {response.json()}")
else:
    print(f"Error: {response.json()}")
📌 Bucket Naming Rules:
  • Must be between 3 and 63 characters
  • Can only contain lowercase letters, numbers, dots, and hyphens
  • Must start and end with a letter or number

📤 Upload Files

Upload via cURL

curl -X POST http://localhost:5000/api/buckets/my-documents/upload \
  -H "X-API-Key: your-api-key" \
  -F "file=@/path/to/your/document.pdf"

Upload via Python

import requests

files = {'file': open('document.pdf', 'rb')}
response = requests.post(
    'http://localhost:5000/api/buckets/my-documents/upload',
    headers={'X-API-Key': 'your-api-key'},
    files=files
)

if response.status_code == 200:
    result = response.json()
    print(f"File uploaded: {result}")
    print(f"Object ID: {result['object_id']}")
    print(f"ETag: {result['etag']}")

Upload with Metadata

import requests

files = {'file': open('image.jpg', 'rb')}
data = {
    'metadata': '{"author":"John Doe", "project":"docs"}',
    'storage_class': 'STANDARD'
}

response = requests.post(
    'http://localhost:5000/api/buckets/my-images/upload',
    headers={'X-API-Key': 'your-api-key'},
    data=data,
    files=files
)

📥 Download Files

Download via Browser

http://localhost:5000/api/storage/obj-abc123/download

Download via Python

import requests

object_id = 'obj-abc123'
response = requests.get(
    f'http://localhost:5000/api/storage/{object_id}/download',
    headers={'X-API-Key': 'your-api-key'},
    stream=True
)

if response.status_code == 200:
    # Get filename from Content-Disposition header
    filename = response.headers.get('Content-Disposition').split('filename=')[1]
    
    # Save file
    with open(filename, 'wb') as f:
        for chunk in response.iter_content(chunk_size=8192):
            f.write(chunk)
    
    print(f"File downloaded as {filename}")

Get CDN URL for Fast Delivery

# Get CDN URL for faster downloads
response = requests.get(
    f'http://localhost:5000/api/storage/{object_id}/cdn-url',
    headers={'X-API-Key': 'your-api-key'}
)

cdn_url = response.json()['cdn_url']
print(f"Download from CDN: {cdn_url}")

📋 List Files in Bucket

curl -X GET http://localhost:5000/api/buckets/my-documents/files \
  -H "X-API-Key: your-api-key"
import requests

response = requests.get(
    'http://localhost:5000/api/buckets/my-documents/files',
    headers={'X-API-Key': 'your-api-key'}
)

files = response.json()
for file in files:
    print(f"📄 {file['name']} - {file['size_formatted']} - {file['created_at']}")

Sample Output:

[
    {
        "id": "obj-abc123",
        "name": "document.pdf",
        "size": 1048576,
        "size_formatted": "1.00 MB",
        "mime_type": "application/pdf",
        "created_at": "2024-01-15 10:30:00",
        "last_accessed": "2024-01-16 14:20:00"
    },
    {
        "id": "obj-def456",
        "name": "image.jpg",
        "size": 512000,
        "size_formatted": "500.00 KB",
        "mime_type": "image/jpeg",
        "created_at": "2024-01-14 09:15:00",
        "last_accessed": null
    }
]

❌ Delete Files

# Delete single file
curl -X DELETE http://localhost:5000/api/storage/obj-abc123 \
  -H "X-API-Key: your-api-key"
import requests

object_id = 'obj-abc123'
response = requests.delete(
    f'http://localhost:5000/api/storage/{object_id}',
    headers={'X-API-Key': 'your-api-key'}
)

if response.status_code == 200:
    print("File deleted successfully")

# Delete with confirmation
def delete_file(object_id, confirm=False):
    if confirm or input(f"Delete {object_id}? (y/n): ").lower() == 'y':
        response = requests.delete(
            f'http://localhost:5000/api/storage/{object_id}',
            headers={'X-API-Key': 'your-api-key'}
        )
        return response.status_code == 200
    return False

🔄 File Versioning

💡 Versioning enabled buckets automatically keep all versions of your files

Upload New Version

# Upload same filename multiple times
curl -X POST http://localhost:5000/api/buckets/my-documents/upload \
  -H "X-API-Key: your-api-key" \
  -F "file=@document_v2.pdf"

List All Versions

curl -X GET http://localhost:5000/api/buckets/my-documents/files?include_versions=true \
  -H "X-API-Key: your-api-key"

Download Specific Version

# Download specific version by ID
curl -X GET http://localhost:5000/api/storage/obj-abc123/download?version_id=ver-456 \
  -H "X-API-Key: your-api-key" -o document_v1.pdf

🖥️ Deploy Virtual Machine

Available Instance Types

Type vCPU RAM Storage Price/Hour Best For
t2.micro11 GB30 GB$0.0116Free tier, testing
t2.small12 GB50 GB$0.023Small web apps
t2.medium24 GB100 GB$0.0464Web servers
c5.large24 GB200 GB$0.085Compute optimized
c5.xlarge48 GB400 GB$0.17High performance
r5.large216 GB500 GB$0.126Memory optimized
p3.2xlarge864 GB1 TB$3.06GPU instances

Create VM via API

curl -X POST http://localhost:5000/api/vms \
  -H "Content-Type: application/json" \
  -H "X-API-Key: your-api-key" \
  -d '{
    "name": "my-web-server",
    "instance_type": "t2.medium",
    "region": "us-east-1",
    "tags": "web,production"
  }'

Create VM via Python

import requests

vm_config = {
    'name': 'my-web-server',
    'instance_type': 't2.medium',
    'region': 'us-east-1',
    'tags': 'web,production'
}

response = requests.post(
    'http://localhost:5000/api/vms',
    headers={'X-API-Key': 'your-api-key'},
    json=vm_config
)

if response.status_code == 201:
    vm = response.json()
    print(f"VM created: {vm['vm_id']}")
    print(f"IP Address: {vm['ip_address']}")
    
    # Start the VM
    start_response = requests.post(
        f"http://localhost:5000/api/vms/{vm['vm_id']}/start",
        headers={'X-API-Key': 'your-api-key'}
    )
    print(f"VM starting: {start_response.json()}")
else:
    print(f"Error: {response.json()}")

Manage VM Lifecycle

# List all VMs
response = requests.get(
    'http://localhost:5000/api/vms',
    headers={'X-API-Key': 'your-api-key'}
)

# Stop VM
response = requests.post(
    f'http://localhost:5000/api/vms/{vm_id}/stop',
    headers={'X-API-Key': 'your-api-key'}
)

# Delete VM (must be stopped first)
response = requests.delete(
    f'http://localhost:5000/api/vms/{vm_id}',
    headers={'X-API-Key': 'your-api-key'}
)

# Get VM metrics
response = requests.get(
    f'http://localhost:5000/api/vms/{vm_id}/metrics',
    headers={'X-API-Key': 'your-api-key'}
)
metrics = response.json()

📱 Deploy Application

1

Prepare Your Application

Create a simple Flask application

# app.py
from flask import Flask
app = Flask(__name__)

@app.route('/')
def hello():
    return "Hello from Cloud Platform!"

if __name__ == '__main__':
    app.run(host='0.0.0.0', port=8080)
2

Create requirements.txt

flask==2.3.3
gunicorn==21.2.0
3

Create VM and Deploy

# First create VM
vm_id = create_vm('app-server', 't2.medium')
start_vm(vm_id)

# Get VM IP
vm_info = get_vm(vm_id)
ip_address = vm_info['ip_address']

# Deploy via SSH (simplified)
print(f"Deploy to {ip_address} via SSH")

# On the VM:
# scp app.py ubuntu@{ip}:~/
# scp requirements.txt ubuntu@{ip}:~/
# ssh ubuntu@{ip} "pip install -r requirements.txt && gunicorn --bind 0.0.0.0:8080 app:app &"

🐳 Docker Deployment

Create Dockerfile

FROM python:3.11-slim

WORKDIR /app

COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt

COPY . .

CMD ["gunicorn", "--bind", "0.0.0.0:8080", "app:app"]

Build and Deploy

# Build Docker image
docker build -t myapp:latest .

# Save image
docker save myapp:latest | gzip > myapp.tar.gz

# Upload to VM
# scp myapp.tar.gz ubuntu@{ip}:~/

# On VM: Load and run
# docker load < myapp.tar.gz
# docker run -d -p 8080:8080 --name myapp myapp:latest

📤 Upload and Process Data

Upload CSV Data

import csv
import requests
import json

# Create CSV data
data = [
    ['name', 'age', 'city'],
    ['John', '30', 'New York'],
    ['Jane', '25', 'Los Angeles'],
    ['Bob', '35', 'Chicago']
]

# Save to temporary file
with open('data.csv', 'w', newline='') as f:
    writer = csv.writer(f)
    writer.writerows(data)

# Upload to bucket
files = {'file': open('data.csv', 'rb')}
response = requests.post(
    'http://localhost:5000/api/buckets/my-data/upload',
    headers={'X-API-Key': 'your-api-key'},
    files=files
)

if response.status_code == 200:
    result = response.json()
    object_id = result['object_id']
    print(f"Data uploaded with ID: {object_id}")

Upload JSON Data

import json

# Create JSON data
json_data = {
    'users': [
        {'id': 1, 'name': 'John', 'email': 'john@example.com'},
        {'id': 2, 'name': 'Jane', 'email': 'jane@example.com'}
    ],
    'total': 2,
    'timestamp': '2024-01-15T10:30:00'
}

# Save to file
with open('users.json', 'w') as f:
    json.dump(json_data, f, indent=2)

# Upload
files = {'file': open('users.json', 'rb')}
response = requests.post(
    'http://localhost:5000/api/buckets/my-data/upload',
    headers={'X-API-Key': 'your-api-key'},
    files=files
)

📥 Retrieve and Process Data

Download and Parse CSV

import csv
import requests
from io import StringIO

# Download file
object_id = 'obj-abc123'
response = requests.get(
    f'http://localhost:5000/api/storage/{object_id}/download',
    headers={'X-API-Key': 'your-api-key'}
)

# Parse CSV
csv_data = response.content.decode('utf-8')
csv_file = StringIO(csv_data)
reader = csv.reader(csv_file)

# Process data
header = next(reader)
rows = []
for row in reader:
    rows.append({
        header[0]: row[0],
        header[1]: int(row[1]),
        header[2]: row[2]
    })

# Calculate statistics
average_age = sum(row['age'] for row in rows) / len(rows)
print(f"Average age: {average_age}")

# Group by city
city_count = {}
for row in rows:
    city = row['city']
    city_count[city] = city_count.get(city, 0) + 1

print(f"People by city: {city_count}")

Process JSON Data

import json

# Download JSON file
response = requests.get(
    f'http://localhost:5000/api/storage/{object_id}/download',
    headers={'X-API-Key': 'your-api-key'}
)

# Parse JSON
data = response.json()

# Process data
for user in data['users']:
    print(f"User: {user['name']} - {user['email']}")

# Transform data
email_list = [user['email'] for user in data['users']]
print(f"Emails: {email_list}")

💿 Backup & Restore

⚠️ Important: Regular backups protect your data from accidental loss

Create Backup

# Admin only: Create recovery point
curl -X POST http://localhost:5000/api/disaster-recovery/points \
  -H "X-API-Key: admin-api-key"

Manual Backup Script

import shutil
import datetime

def create_backup():
    # Backup database
    timestamp = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
    backup_file = f"backup_{timestamp}.db"
    
    # Copy SQLite database
    shutil.copy2('cloud_platform.db', backup_file)
    
    # Backup user files
    shutil.make_archive(
        f"files_backup_{timestamp}",
        'zip',
        'cloud_storage'
    )
    
    print(f"Backup created: {backup_file}")
    return backup_file

# Schedule automatic backups
import schedule
import time

# Backup every day at 2 AM
schedule.every().day.at("02:00").do(create_backup)

while True:
    schedule.run_pending()
    time.sleep(60)

Restore from Backup

def restore_from_backup(backup_file):
    # Stop services
    # os.system("systemctl stop cloud-platform")
    
    # Restore database
    shutil.copy2(backup_file, 'cloud_platform.db')
    
    # Restore files
    backup_zip = backup_file.replace('.db', '.zip').replace('backup_', 'files_backup_')
    shutil.unpack_archive(backup_zip, 'cloud_storage')
    
    # Restart services
    # os.system("systemctl start cloud-platform")
    
    print(f"Restored from {backup_file}")

📈 System Metrics

Get System Status

curl http://localhost:5000/api/system/status

Get VM Metrics

curl -X GET http://localhost:5000/api/vms/{vm_id}/metrics \
  -H "X-API-Key: your-api-key"

Generate Usage Charts

import matplotlib.pyplot as plt
import requests

# Get metrics
response = requests.get(
    'http://localhost:5000/api/system/status'
)
data = response.json()

# Create chart
labels = ['CPU', 'Memory', 'Disk']
values = [
    data['metrics']['cpu'],
    data['metrics']['memory'],
    data['metrics']['disk']
]

plt.figure(figsize=(10, 6))
plt.bar(labels, values, color=['#4299e1', '#48bb78', '#ed8936'])
plt.title('System Metrics')
plt.ylabel('Usage %')

# Add value labels
for i, v in enumerate(values):
    plt.text(i, v + 1, f"{v}%", ha='center')

plt.savefig('system_metrics.png')
plt.close()

💰 Billing Analytics

Get Usage Report

curl -X GET http://localhost:5000/api/billing/usage \
  -H "X-API-Key: your-api-key"

Generate Cost Breakdown Chart

import matplotlib.pyplot as plt

# Sample cost data
costs = {
    'Compute': 45.67,
    'Storage': 12.34,
    'Network': 5.89,
    'Database': 8.45
}

# Create pie chart
plt.figure(figsize=(8, 8))
colors = ['#4299e1', '#48bb78', '#ed8936', '#9f7aea']
plt.pie(
    costs.values(),
    labels=costs.keys(),
    colors=colors,
    autopct='%1.1f%%',
    startangle=90
)
plt.title('Monthly Cost Breakdown')
plt.axis('equal')
plt.savefig('cost_breakdown.png')
plt.close()