Recommended patterns and practices for building reliable Streamline automations.
my-automation/
├── main.py # Entry point
├── config/
│ ├── settings.json # Configuration
│ └── secrets.env # Environment variables
├── src/
│ ├── handlers/ # Event handlers
│ ├── utils/ # Utility functions
│ └── models/ # Data models
├── tests/
│ └── test_main.py # Unit tests
└── requirements.txt # DependenciesAlways define a clear entry point for your automation:
# main.py
def main(params):
"""Main automation entry point"""
# Your automation logic
return result
if __name__ == "__main__":
main()try:
result = process_data(input_data)
except ValueError as e:
logger.error(f"Invalid input: {e}")
return {"success": False, "error": str(e)}
except Exception as e:
logger.error(f"Unexpected error: {e}")
raiseimport logging
logger = logging.getLogger(__name__)
logger.info("Processing started", extra={
"automation_id": automation_id,
"input_size": len(data)
})❌ Bad:
API_KEY = "sk_live_abc123"✅ Good:
import os
API_KEY = os.environ["FREDDY_API_KEY"]def process_user_input(data):
if not isinstance(data, dict):
raise ValueError("Input must be a dictionary")
required_fields = ["name", "email"]
for field in required_fields:
if field not in data:
raise ValueError(f"Missing required field: {field}")
return sanitize_data(data)Store sensitive configuration in environment variables:
# .env
FREDDY_API_KEY=your-api-key
DATABASE_URL=postgresql://...
WEBHOOK_SECRET=your-webhook-secret- Cache frequently accessed data
- Use batch operations when possible
- Avoid unnecessary API calls
- Implement pagination for large datasets
# Cache expensive operations
from functools import lru_cache
@lru_cache(maxsize=100)
def get_user_data(user_id):
return api.fetch_user(user_id)import time
from requests.exceptions import HTTPError
def api_call_with_retry(url, max_retries=3):
for attempt in range(max_retries):
try:
response = requests.get(url)
response.raise_for_status()
return response.json()
except HTTPError as e:
if e.response.status_code == 429:
wait_time = 2 ** attempt
logger.warning(f"Rate limited, waiting {wait_time}s")
time.sleep(wait_time)
else:
raise
raise Exception("Max retries exceeded")# tests/test_main.py
import pytest
from main import process_data
def test_process_data_valid_input():
result = process_data({"key": "value"})
assert result["success"] is True
def test_process_data_invalid_input():
with pytest.raises(ValueError):
process_data(None)Always test your automation locally before deploying:
streamline run main.py --params '{"test": true}'# tests/conftest.py
import pytest
@pytest.fixture
def mock_api_response():
return {
"id": "test_123",
"status": "success",
"data": {"result": "test"}
}git commit -m "Add error handling for API timeouts"
git commit -m "Fix: Resolve pagination issue in list endpoint"main- Production-ready codedevelop- Development branchfeature/*- Feature brancheshotfix/*- Emergency fixes
# .gitignore
.env
*.env
secrets/
config/local.jsonlogger.info("Automation started", extra={
"automation_id": automation_id,
"trigger": "webhook"
})
logger.info("Processing complete", extra={
"records_processed": count,
"duration_ms": duration
})import time
start_time = time.time()
result = process_data(input_data)
duration = (time.time() - start_time) * 1000
logger.info(f"Processing took {duration:.2f}ms")Regularly check sync logs for errors:
curl -H "X-API-Key: $FREDDY_API_KEY" \
"https://api.aitronos.com/api/v1/streamline/automations/sauto_abc123/sync-logs?status=error"from typing import Dict, List, Optional
def process_items(
items: List[Dict],
filter_key: Optional[str] = None
) -> Dict:
"""Process a list of items with optional filtering"""
passdef main(params: Dict = None):
params = params or {}
# Set defaults
batch_size = params.get("batch_size", 100)
timeout = params.get("timeout", 30)
# Your logic heredef validate_params(params: Dict) -> None:
"""Validate automation parameters"""
if "api_key" not in params:
raise ValueError("api_key is required")
if params.get("batch_size", 0) < 1:
raise ValueError("batch_size must be positive")def process_webhook(payload: Dict) -> Dict:
"""
Process incoming webhook payload.
Args:
payload: Webhook data containing event information
Returns:
Dict with processing results and status
Raises:
ValueError: If payload is invalid
HTTPError: If API call fails
"""
pass# My Automation
## Description
Brief description of what this automation does.
## Setup
1. Install dependencies: `pip install -r requirements.txt`
2. Configure environment: `cp .env.example .env`
3. Run locally: `streamline run`
## Parameters
- `batch_size` (int): Number of items to process per batch
- `timeout` (int): Request timeout in seconds
## Deployment
Deploy with: `streamline deploy`❌ Hardcoding configuration values
❌ Ignoring error handling
❌ Not testing locally before deploying
❌ Committing secrets to Git
❌ Not validating input parameters
❌ Ignoring rate limits
❌ Not logging important events
✅ Use environment variables
✅ Implement comprehensive error handling
✅ Test thoroughly before deployment
✅ Use .gitignore for secrets
✅ Validate all inputs
✅ Implement retry logic
✅ Log all significant operations
- Getting Started - Create your first automation
- Project Structure - Organize your code
- Parameters - Configure automation parameters
- GitHub Deployment - Deploy from GitHub
- API Reference - Complete API documentation