Skip to content
This repository was archived by the owner on Mar 2, 2026. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"name": "MDPS Dev Container",
"image": "mcr.microsoft.com/devcontainers/python:3.11",
"postCreateCommand": "python -m pip install --upgrade pip && python -m pip install -r requirements.txt -r requirements-api.txt && python -m pip install -e . && python -m ipykernel install --user --name mdps-dev --display-name 'MDPS (devcontainer)'",
"customizations": {
"vscode": {
"extensions": [
"ms-python.python",
"ms-python.vscode-pylance"
]
}
}
}
44 changes: 44 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# Python
__pycache__/
*.py[cod]
*$py.class
*.so
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg

# Virtual environments
venv/
ENV/
env/
.venv

# IDE
.vscode/
.idea/
*.swp
*.swo
*~

# MDPS specific
.quant_runs/
logs/
*.log
.env

# OS
.DS_Store
Thumbs.db
104 changes: 104 additions & 0 deletions app/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
import os
import json
import uuid
import traceback
import importlib
from fastapi import FastAPI, BackgroundTasks, HTTPException
from pydantic import BaseModel
from typing import Any, Dict

app = FastAPI(title="MDPS Trigger API")

WORKDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
OUT_DIR = os.path.join(WORKDIR, ".quant_runs")
os.makedirs(OUT_DIR, exist_ok=True)

# MDPS_ENTRYPOINT format: "module.path:callable"
ENTRYPOINT = os.environ.get("MDPS_ENTRYPOINT", "src.api:call")

# tasks.yml mapping file (optional)
TASKS_FILE = os.path.join(WORKDIR, "tasks.yml") # repo root
TASKS = {}
if os.path.exists(TASKS_FILE):
try:
import yaml
with open(TASKS_FILE, "r", encoding="utf-8") as f:
TASKS = yaml.safe_load(f) or {}
except Exception:
TASKS = {}

class RunRequest(BaseModel):
params: Dict[str, Any] = {}

def _load_entrypoint(entrypoint: str):
if ":" not in entrypoint:
raise ValueError("ENTRYPOINT must be in module.path:callable format")
module_name, fn_name = entrypoint.split(":", 1)
module = importlib.import_module(module_name)
fn = getattr(module, fn_name)
if not callable(fn):
raise TypeError(f"{fn_name} in {module_name} is not callable")
return fn

def _write_status(job_id: str, payload: dict):
path = os.path.join(OUT_DIR, f"{job_id}.json")
with open(path, "w", encoding="utf-8") as f:
json.dump(payload, f, indent=2)

def _safe_call_entrypoint(fn, params: dict, job_id: str):
try:
_write_status(job_id, {"job_id": job_id, "status": "running"})
result = fn(params)
if result is None:
_write_status(job_id, {"job_id": job_id, "status": "completed", "result": "written_by_job"})
else:
_write_status(job_id, {"job_id": job_id, "status": "completed", "result": result})
except Exception as e:
tb = traceback.format_exc()
_write_status(job_id, {"job_id": job_id, "status": "failed", "error": str(e), "traceback": tb})

@app.post("/run")
def run_quant(req: RunRequest, background_tasks: BackgroundTasks):
job_id = str(uuid.uuid4())
_write_status(job_id, {"job_id": job_id, "status": "queued"})
try:
fn = _load_entrypoint(ENTRYPOINT)
except Exception as e:
_write_status(job_id, {"job_id": job_id, "status": "failed", "error": f"entrypoint load error: {e}"})
raise HTTPException(status_code=500, detail=f"cannot load entrypoint: {e}")
background_tasks.add_task(_safe_call_entrypoint, fn, req.params, job_id)
return {"job_id": job_id, "status_url": f"/status/{job_id}"}

@app.post("/run/{task_name}")
def run_task(task_name: str, req: RunRequest, background_tasks: BackgroundTasks):
if not TASKS:
try:
import yaml
with open(TASKS_FILE, "r", encoding="utf-8") as f:
mapping = yaml.safe_load(f) or {}
except Exception:
mapping = {}
else:
mapping = TASKS

if task_name not in mapping:
raise HTTPException(status_code=404, detail="task not found")
entry = mapping[task_name]
job_id = str(uuid.uuid4())
_write_status(job_id, {"job_id": job_id, "status": "queued", "task": task_name})
try:
fn = _load_entrypoint(entry)
except Exception as e:
_write_status(job_id, {"job_id": job_id, "status": "failed", "error": f"entrypoint load error: {e}"})
raise HTTPException(status_code=500, detail=f"cannot load entrypoint: {e}")
background_tasks.add_task(_safe_call_entrypoint, fn, req.params, job_id)
return {"job_id": job_id, "status_url": f"/status/{job_id}"}

@app.get("/status/{job_id}")
def job_status(job_id: str):
status_file = os.path.join(OUT_DIR, f"{job_id}.json")
if not os.path.exists(status_file):
raise HTTPException(status_code=404, detail="job not found")
with open(status_file, "r", encoding="utf-8") as f:
data = json.load(f)
return data
5 changes: 5 additions & 0 deletions requirements-api.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# API/Web Framework Requirements
fastapi==0.103.1
uvicorn[standard]==0.23.2
pydantic==2.3.0
pyyaml==6.0.1
104 changes: 9 additions & 95 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,95 +1,9 @@
"""
MDPS Setup Script
Initializes the environment and checks dependencies
"""

import os
import sys
import subprocess
import shutil
from pathlib import Path

def check_dependencies():
"""Check if all required dependencies are installed"""
try:
import MetaTrader5
import pandas
import numpy
import tensorflow
import torch
print("✅ Core dependencies found")
except ImportError as e:
print(f"❌ Missing dependency: {e}")
print("Please run: pip install -r requirements.txt")
sys.exit(1)

def setup_directories():
"""Create necessary directories"""
directories = [
"data/raw",
"data/processed",
"data/features",
"data/models",
"logs",
"models",
"configs"
]

for dir_path in directories:
Path(dir_path).mkdir(parents=True, exist_ok=True)
print(f"✅ Created directory: {dir_path}")

def check_mt5_installation():
"""Check MetaTrader 5 installation"""
try:
import MetaTrader5 as mt5
if not mt5.initialize():
print("❌ MetaTrader 5 initialization failed")
print("Please check MT5 installation and credentials in .env file")
return False
mt5.shutdown()
print("✅ MetaTrader 5 connection successful")
return True
except Exception as e:
print(f"❌ MetaTrader 5 error: {e}")
return False

def setup_environment():
"""Setup environment variables and configurations"""
if not os.path.exists('.env'):
print("❌ .env file not found")
print("Please create .env file with necessary credentials")
return False

try:
from dotenv import load_dotenv
load_dotenv()
print("✅ Environment variables loaded")
return True
except Exception as e:
print(f"❌ Environment setup error: {e}")
return False

def main():
"""Main setup function"""
print("Starting MDPS Setup...")

# Check Python version
if sys.version_info < (3, 8):
print("❌ Python 3.8 or higher is required")
sys.exit(1)

# Run setup steps
check_dependencies()
setup_directories()
setup_environment()
check_mt5_installation()

print("\nSetup Complete! You can now run MDPS.")
print("\nTo start the system:")
print("1. Configure your .env file with your credentials")
print("2. Run 'python -m mdps' to start the system")
print(" or 'python run_mdps.py' for a single-cycle run")

if __name__ == "__main__":
main()
from setuptools import setup, find_packages

setup(
name="mdps",
version="0.0.1",
packages=find_packages(where="src"),
package_dir={"": "src"},
install_requires=[],
)
14 changes: 14 additions & 0 deletions src/indicators/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# MDPS Plugin Structure

This directory contains indicator plugins for the MDPS system.

## Structure

- `vortex/` - Vortex indicator plugin
- `clime/` - CLIME algorithm integration plugin

## Usage

Each plugin exposes a `*_run(params: dict) -> dict` function that can be called via the FastAPI endpoint `/run/{task_name}`.

Tasks are mapped in the root `tasks.yml` file.
Empty file added src/indicators/__init__.py
Empty file.
3 changes: 3 additions & 0 deletions src/indicators/clime/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from .clime import clime_run

__all__ = ["clime_run"]
10 changes: 10 additions & 0 deletions src/indicators/clime/clime.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
"""CLIME integration placeholder.
Replace with the actual CLIME algorithm and interface.
"""

def clime_run(params: dict) -> dict:
"""Run CLIME algorithm with given params and return results as a dict."""
symbol = params.get("symbol", "UNKNOWN")
# placeholder
results = {"job": "clime", "symbol": symbol, "status": "ok"}
return results
3 changes: 3 additions & 0 deletions src/indicators/vortex/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from .vortex import vortex_run

__all__ = ["vortex_run"]
18 changes: 18 additions & 0 deletions src/indicators/vortex/vortex.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
"""Simple placeholder Vortex indicator module.
Replace the implementation with your production Vortex code.
"""

def vortex_run(params: dict) -> dict:
"""Run the Vortex indicator logic.

params example: {"symbol": "BTCUSDT", "window": 14}
Returns a JSON-serializable dict with signals and metrics.
"""
symbol = params.get("symbol", "UNKNOWN")
window = int(params.get("window", 14))

# placeholder computation
signals = [{"timestamp": 0, "signal": "hold"}]
metrics = {"window": window, "symbol": symbol}

return {"job": "vortex", "symbol": symbol, "signals": signals, "metrics": metrics}
2 changes: 2 additions & 0 deletions tasks.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
vortex: indicators.vortex:vortex_run
clime: indicators.clime:clime_run