diff --git a/scripts/db-debug.sh b/scripts/db-debug.sh index dcab08236b..0c608bded4 100755 --- a/scripts/db-debug.sh +++ b/scripts/db-debug.sh @@ -5,6 +5,9 @@ # Usage: # ./scripts/db-debug.sh # Default query (assets) # ./scripts/db-debug.sh "SELECT TOP 10 id FROM asset" # Custom SQL query +# ./scripts/db-debug.sh --anomalies # Show invalid FinancialDataLog entries +# ./scripts/db-debug.sh --balance # Show recent balance history +# ./scripts/db-debug.sh --asset-history Yapeal/EUR 10 # Show asset balance history # # Environment: # Copy .env.db-debug.sample to .env.db-debug and fill in your credentials @@ -15,10 +18,92 @@ set -e +# --- Help (before auth) --- +if [ "${1:-}" = "-h" ] || [ "${1:-}" = "--help" ]; then + echo "DFX API Debug Database Access Script" + echo "" + echo "Usage:" + echo " ./scripts/db-debug.sh [OPTIONS] [SQL_QUERY]" + echo "" + echo "Options:" + echo " -h, --help Show this help" + echo " -a, --anomalies [N] Show invalid FinancialDataLog entries (default: 20)" + echo " -b, --balance [N] Show recent total balance history (default: 20)" + echo " -s, --stats Show log statistics by system/subsystem" + echo " -A, --asset-history [N]" + echo " Show balance history for asset (default: 10)" + echo "" + echo "Examples:" + echo " ./scripts/db-debug.sh --anomalies 50" + echo " ./scripts/db-debug.sh --balance 10" + echo " ./scripts/db-debug.sh --asset-history 405 20" + echo " ./scripts/db-debug.sh --asset-history Yapeal/EUR 20" + echo " ./scripts/db-debug.sh --asset-history MaerkiBaumann/CHF 10" + echo " ./scripts/db-debug.sh \"SELECT TOP 10 * FROM asset\"" + exit 0 +fi + +# --- Predefined Queries --- +query_anomalies() { + local limit="${1:-20}" + echo "SELECT TOP $limit id, created, JSON_VALUE(message, '\$.balancesTotal.totalBalanceChf') as totalBalanceChf, JSON_VALUE(message, '\$.balancesTotal.plusBalanceChf') as plusBalanceChf, JSON_VALUE(message, '\$.balancesTotal.minusBalanceChf') as minusBalanceChf, valid FROM log WHERE subsystem = 'FinancialDataLog' AND valid = 0 ORDER BY id DESC" +} + +query_stats() { + echo "SELECT system, subsystem, severity, COUNT(*) as count FROM log GROUP BY system, subsystem, severity ORDER BY count DESC" +} + +query_balance() { + local limit="${1:-20}" + echo "SELECT TOP $limit id, created, JSON_VALUE(message, '\$.balancesTotal.totalBalanceChf') as totalBalanceChf, JSON_VALUE(message, '\$.balancesTotal.plusBalanceChf') as plusBalanceChf, JSON_VALUE(message, '\$.balancesTotal.minusBalanceChf') as minusBalanceChf, valid FROM log WHERE subsystem = 'FinancialDataLog' ORDER BY id DESC" +} + +query_asset_raw() { + local limit="${1:-10}" + echo "SELECT TOP $limit id, created, message FROM log WHERE subsystem = 'FinancialDataLog' ORDER BY id DESC" +} + +# --- Parse arguments FIRST --- +SQL="" +ASSET_HISTORY_MODE="" +ASSET_ID="" +ASSET_INPUT="" +ASSET_LIMIT="10" + +case "${1:-}" in + -a|--anomalies) + SQL=$(query_anomalies "${2:-20}") + ;; + -s|--stats) + SQL=$(query_stats) + ;; + -b|--balance) + SQL=$(query_balance "${2:-20}") + ;; + -A|--asset-history) + if [ -z "${2:-}" ]; then + echo "Error: --asset-history requires an asset ID or name" + echo "Usage: ./scripts/db-debug.sh --asset-history [LIMIT]" + echo "" + echo "Examples:" + echo " ./scripts/db-debug.sh --asset-history 405 20" + echo " ./scripts/db-debug.sh --asset-history Yapeal/EUR 20" + echo " ./scripts/db-debug.sh --asset-history MaerkiBaumann/CHF 10" + exit 1 + fi + ASSET_HISTORY_MODE="1" + ASSET_INPUT="$2" + ASSET_LIMIT="${3:-10}" + ;; + *) + SQL="${1:-SELECT TOP 5 id, name, blockchain FROM asset ORDER BY id DESC}" + ;; +esac + +# --- Load environment --- SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" ENV_FILE="$SCRIPT_DIR/.env.db-debug" -# Load environment variables if [ -f "$ENV_FILE" ]; then source "$ENV_FILE" else @@ -27,7 +112,6 @@ else exit 1 fi -# Validate required variables if [ -z "$DEBUG_ADDRESS" ] || [ -z "$DEBUG_SIGNATURE" ]; then echo "Error: DEBUG_ADDRESS and DEBUG_SIGNATURE must be set in $ENV_FILE" exit 1 @@ -35,7 +119,7 @@ fi API_URL="${DEBUG_API_URL:-https://api.dfx.swiss/v1}" -# Get JWT Token +# --- Authenticate --- echo "=== Authenticating to $API_URL ===" TOKEN_RESPONSE=$(curl -s -X POST "$API_URL/auth/signIn" \ -H "Content-Type: application/json" \ @@ -49,27 +133,74 @@ if [ "$TOKEN" == "null" ] || [ -z "$TOKEN" ]; then exit 1 fi -# Decode and show role ROLE=$(echo "$TOKEN" | cut -d'.' -f2 | base64 -d 2>/dev/null | jq -r '.role' 2>/dev/null || echo "unknown") echo "Authenticated with role: $ROLE" echo "" -# Default SQL query if none provided -SQL="${1:-SELECT TOP 5 id, name, blockchain FROM asset ORDER BY id DESC}" +# --- Resolve asset ID if needed --- +if [ -n "$ASSET_HISTORY_MODE" ]; then + if [[ "$ASSET_INPUT" =~ ^[0-9]+$ ]]; then + ASSET_ID="$ASSET_INPUT" + else + # Parse Blockchain/Name format + BLOCKCHAIN=$(echo "$ASSET_INPUT" | cut -d'/' -f1) + ASSET_NAME=$(echo "$ASSET_INPUT" | cut -d'/' -f2) + + echo "=== Resolving Asset: $BLOCKCHAIN/$ASSET_NAME ===" + ASSET_QUERY="SELECT id, name, blockchain FROM asset WHERE blockchain = '$BLOCKCHAIN' AND name = '$ASSET_NAME'" + ASSET_RESULT=$(curl -s -X POST "$API_URL/gs/debug" \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d "{\"sql\":\"$ASSET_QUERY\"}") + ASSET_ID=$(echo "$ASSET_RESULT" | jq -r '.[0].id // empty' 2>/dev/null) + + if [ -z "$ASSET_ID" ]; then + echo "Error: Asset '$ASSET_INPUT' not found" + echo "$ASSET_RESULT" | jq . 2>/dev/null + exit 1 + fi + echo "Found: Asset ID $ASSET_ID" + echo "" + fi + SQL=$(query_asset_raw "$ASSET_LIMIT") +fi + +# --- Execute query --- echo "=== Executing SQL Query ===" echo "Query: $SQL" echo "" -# Execute debug query RESULT=$(curl -s -X POST "$API_URL/gs/debug" \ -H "Authorization: Bearer $TOKEN" \ -H "Content-Type: application/json" \ -d "{\"sql\":\"$SQL\"}") echo "=== Result ===" -if command -v jq &> /dev/null; then - echo "$RESULT" | jq . + +# Special handling for asset history mode (client-side JSON parsing) +if [ -n "$ASSET_HISTORY_MODE" ]; then + if ! command -v jq &> /dev/null; then + echo "Error: jq is required for --asset-history" + exit 1 + fi + + echo "Asset ID: $ASSET_ID" + echo "" + echo "$RESULT" | jq -r --arg aid "$ASSET_ID" ' + .[] | + (.message | fromjson) as $msg | + $msg.assets[$aid] as $asset | + if $asset then + "[\(.id)] \(.created | split("T") | .[0]) \(.created | split("T") | .[1] | split(".") | .[0]) plus: \($asset.plusBalance.total // 0 | tostring | .[0:12]) minus: \($asset.minusBalance.total // 0 | tostring | .[0:12]) price: \($asset.priceChf // 0 | tostring | .[0:10])" + else + "[\(.id)] Asset \($aid) not found in this log entry" + end + ' 2>/dev/null || echo "$RESULT" | jq . else - echo "$RESULT" + if command -v jq &> /dev/null; then + echo "$RESULT" | jq . + else + echo "$RESULT" + fi fi diff --git a/scripts/kyc/dummy-files/additional_document.pdf b/scripts/kyc/dummy-files/additional_document.pdf new file mode 100644 index 0000000000..47cd93b2ef Binary files /dev/null and b/scripts/kyc/dummy-files/additional_document.pdf differ diff --git a/scripts/kyc/dummy-files/bank_statement.pdf b/scripts/kyc/dummy-files/bank_statement.pdf new file mode 100644 index 0000000000..8370cc1c85 Binary files /dev/null and b/scripts/kyc/dummy-files/bank_statement.pdf differ diff --git a/scripts/kyc/dummy-files/commercial_register.pdf b/scripts/kyc/dummy-files/commercial_register.pdf new file mode 100644 index 0000000000..4710a3239d Binary files /dev/null and b/scripts/kyc/dummy-files/commercial_register.pdf differ diff --git a/scripts/kyc/dummy-files/id_back.png b/scripts/kyc/dummy-files/id_back.png new file mode 100644 index 0000000000..a27a2d07f0 Binary files /dev/null and b/scripts/kyc/dummy-files/id_back.png differ diff --git a/scripts/kyc/dummy-files/id_front.png b/scripts/kyc/dummy-files/id_front.png new file mode 100644 index 0000000000..a27a2d07f0 Binary files /dev/null and b/scripts/kyc/dummy-files/id_front.png differ diff --git a/scripts/kyc/dummy-files/passport.png b/scripts/kyc/dummy-files/passport.png new file mode 100644 index 0000000000..a27a2d07f0 Binary files /dev/null and b/scripts/kyc/dummy-files/passport.png differ diff --git a/scripts/kyc/dummy-files/proof_of_address.pdf b/scripts/kyc/dummy-files/proof_of_address.pdf new file mode 100644 index 0000000000..b56ef3653e Binary files /dev/null and b/scripts/kyc/dummy-files/proof_of_address.pdf differ diff --git a/scripts/kyc/dummy-files/residence_permit.png b/scripts/kyc/dummy-files/residence_permit.png new file mode 100644 index 0000000000..a27a2d07f0 Binary files /dev/null and b/scripts/kyc/dummy-files/residence_permit.png differ diff --git a/scripts/kyc/dummy-files/selfie.jpg b/scripts/kyc/dummy-files/selfie.jpg new file mode 100644 index 0000000000..6ec79cc158 Binary files /dev/null and b/scripts/kyc/dummy-files/selfie.jpg differ diff --git a/scripts/kyc/dummy-files/source_of_funds.pdf b/scripts/kyc/dummy-files/source_of_funds.pdf new file mode 100644 index 0000000000..8e04791f57 Binary files /dev/null and b/scripts/kyc/dummy-files/source_of_funds.pdf differ diff --git a/scripts/kyc/kyc-storage.js b/scripts/kyc/kyc-storage.js new file mode 100644 index 0000000000..ba8fa2280a --- /dev/null +++ b/scripts/kyc/kyc-storage.js @@ -0,0 +1,63 @@ +/** + * This script shows KYC files stored in the database. + * In local development mode, KYC files are loaded from scripts/kyc/dummy-files/. + * + * Usage: node scripts/kyc/kyc-storage.js + */ + +const mssql = require('mssql'); + +const dbConfig = { + user: process.env.SQL_USERNAME || 'sa', + password: process.env.SQL_PASSWORD || 'LocalDev2026@SQL', + server: 'localhost', + port: parseInt(process.env.SQL_PORT) || 1433, + database: process.env.SQL_DB || 'dfx', + options: { encrypt: false, trustServerCertificate: true } +}; + +async function main() { + console.log('KYC Storage Seeder'); + console.log('==================\n'); + + console.log('Note: In local development mode, KYC files are stored in-memory.'); + console.log('They need to be uploaded via the KYC flow or manually inserted.\n'); + + // Get KYC file info from database + const pool = await mssql.connect(dbConfig); + + const files = await pool.request().query(` + SELECT kf.id, kf.uid, kf.name, kf.type, kf.userDataId, ud.mail + FROM kyc_file kf + JOIN user_data ud ON kf.userDataId = ud.id + ORDER BY kf.id + `); + + console.log('KYC Files in database:'); + console.log('======================'); + + for (const file of files.recordset) { + const ext = file.name.split('.').pop().toLowerCase(); + const contentType = ext === 'pdf' ? 'application/pdf' : ext === 'jpg' || ext === 'jpeg' ? 'image/jpeg' : 'image/png'; + + console.log(` ${file.name}`); + console.log(` UID: ${file.uid}`); + console.log(` Type: ${file.type}`); + console.log(` User: ${file.mail}`); + console.log(` Content-Type: ${contentType}`); + console.log(''); + } + + console.log('\n========================================'); + console.log('Note:'); + console.log('========================================'); + console.log(''); + console.log('In local development mode, KYC files are automatically'); + console.log('loaded from scripts/kyc/dummy-files/ by the azure-storage'); + console.log('service when the requested file is not in memory storage.'); + console.log(''); + + await pool.close(); +} + +main().catch(console.error); diff --git a/scripts/kyc/kyc-testdata.js b/scripts/kyc/kyc-testdata.js new file mode 100644 index 0000000000..e5fc7de398 --- /dev/null +++ b/scripts/kyc/kyc-testdata.js @@ -0,0 +1,330 @@ +const mssql = require('mssql'); +const crypto = require('crypto'); +const fs = require('fs'); +const path = require('path'); + +// Safety check - only local +const dbHost = process.env.SQL_HOST || 'localhost'; +if (!['localhost', '127.0.0.1'].includes(dbHost)) { + console.error('This script only runs on localhost!'); + process.exit(1); +} + +const config = { + user: process.env.SQL_USERNAME || 'sa', + password: process.env.SQL_PASSWORD || 'LocalDev2026@SQL', + server: 'localhost', + port: parseInt(process.env.SQL_PORT) || 1433, + database: process.env.SQL_DB || 'dfx', + options: { encrypt: false, trustServerCertificate: true } +}; + +function uuid() { + return crypto.randomUUID().toUpperCase(); +} + +// Create dummy files directory +const dummyDir = path.join(__dirname, 'dummy-files'); +if (!fs.existsSync(dummyDir)) { + fs.mkdirSync(dummyDir, { recursive: true }); +} + +// Create a minimal valid PNG (1x1 pixel, red) +function createDummyPng(filename) { + const pngData = Buffer.from([ + 0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, // PNG signature + 0x00, 0x00, 0x00, 0x0D, 0x49, 0x48, 0x44, 0x52, // IHDR chunk + 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, // 1x1 pixel + 0x08, 0x02, 0x00, 0x00, 0x00, 0x90, 0x77, 0x53, + 0xDE, 0x00, 0x00, 0x00, 0x0C, 0x49, 0x44, 0x41, + 0x54, 0x08, 0xD7, 0x63, 0xF8, 0xCF, 0xC0, 0x00, + 0x00, 0x00, 0x03, 0x00, 0x01, 0x00, 0x18, 0xDD, + 0x8D, 0xB4, 0x00, 0x00, 0x00, 0x00, 0x49, 0x45, + 0x4E, 0x44, 0xAE, 0x42, 0x60, 0x82 + ]); + const filepath = path.join(dummyDir, filename); + fs.writeFileSync(filepath, pngData); + return filepath; +} + +// Create a minimal PDF +function createDummyPdf(filename, title) { + const pdfContent = `%PDF-1.4 +1 0 obj +<< /Type /Catalog /Pages 2 0 R >> +endobj +2 0 obj +<< /Type /Pages /Kids [3 0 R] /Count 1 >> +endobj +3 0 obj +<< /Type /Page /Parent 2 0 R /MediaBox [0 0 612 792] /Contents 4 0 R /Resources << /Font << /F1 5 0 R >> >> >> +endobj +4 0 obj +<< /Length 120 >> +stream +BT +/F1 24 Tf +100 700 Td +(${title}) Tj +/F1 12 Tf +0 -30 Td +(Test Document for KYC Verification) Tj +0 -20 Td +(Generated: ${new Date().toISOString()}) Tj +ET +endstream +endobj +5 0 obj +<< /Type /Font /Subtype /Type1 /BaseFont /Helvetica >> +endobj +xref +0 6 +0000000000 65535 f +0000000009 00000 n +0000000058 00000 n +0000000115 00000 n +0000000266 00000 n +0000000436 00000 n +trailer +<< /Size 6 /Root 1 0 R >> +startxref +513 +%%EOF`; + const filepath = path.join(dummyDir, filename); + fs.writeFileSync(filepath, pdfContent); + return filepath; +} + +// Create a JPEG-like file (minimal valid structure) +function createDummyJpg(filename) { + // Minimal JPEG: SOI + APP0 + minimal data + EOI + const jpgData = Buffer.from([ + 0xFF, 0xD8, // SOI + 0xFF, 0xE0, 0x00, 0x10, 0x4A, 0x46, 0x49, 0x46, 0x00, 0x01, 0x01, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x00, // APP0 + 0xFF, 0xDB, 0x00, 0x43, 0x00, // DQT + 0x08, 0x06, 0x06, 0x07, 0x06, 0x05, 0x08, 0x07, 0x07, 0x07, 0x09, 0x09, 0x08, 0x0A, 0x0C, 0x14, + 0x0D, 0x0C, 0x0B, 0x0B, 0x0C, 0x19, 0x12, 0x13, 0x0F, 0x14, 0x1D, 0x1A, 0x1F, 0x1E, 0x1D, 0x1A, + 0x1C, 0x1C, 0x20, 0x24, 0x2E, 0x27, 0x20, 0x22, 0x2C, 0x23, 0x1C, 0x1C, 0x28, 0x37, 0x29, 0x2C, + 0x30, 0x31, 0x34, 0x34, 0x34, 0x1F, 0x27, 0x39, 0x3D, 0x38, 0x32, 0x3C, 0x2E, 0x33, 0x34, 0x32, + 0xFF, 0xC0, 0x00, 0x0B, 0x08, 0x00, 0x01, 0x00, 0x01, 0x01, 0x01, 0x11, 0x00, // SOF0 + 0xFF, 0xC4, 0x00, 0x1F, 0x00, 0x00, 0x01, 0x05, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, // DHT + 0xFF, 0xDA, 0x00, 0x08, 0x01, 0x01, 0x00, 0x00, 0x3F, 0x00, 0x7F, 0xFF, // SOS + minimal scan data + 0xFF, 0xD9 // EOI + ]); + const filepath = path.join(dummyDir, filename); + fs.writeFileSync(filepath, jpgData); + return filepath; +} + +async function main() { + console.log('Creating dummy files...'); + + // Create dummy files + const files = { + idFront: createDummyPng('id_front.png'), + idBack: createDummyPng('id_back.png'), + selfie: createDummyJpg('selfie.jpg'), + passport: createDummyPng('passport.png'), + proofOfAddress: createDummyPdf('proof_of_address.pdf', 'Proof of Address'), + commercialRegister: createDummyPdf('commercial_register.pdf', 'Commercial Register Extract'), + bankStatement: createDummyPdf('bank_statement.pdf', 'Bank Statement'), + residencePermit: createDummyPng('residence_permit.png'), + sourceOfFunds: createDummyPdf('source_of_funds.pdf', 'Source of Funds Declaration'), + additionalDoc: createDummyPdf('additional_document.pdf', 'Additional Document'), + }; + + console.log(' Created dummy files in:', dummyDir); + Object.entries(files).forEach(([name, filepath]) => { + console.log(` - ${name}: ${path.basename(filepath)}`); + }); + + console.log('\nConnecting to database...'); + const pool = await mssql.connect(config); + + // Get user_data entries + const userDataResult = await pool.request().query(` + SELECT id, mail, kycLevel, firstname, surname + FROM user_data + WHERE mail LIKE '%@test.local' OR mail = 'bernd@dfx.swiss' + ORDER BY id + `); + + if (userDataResult.recordset.length === 0) { + console.log('No test user_data found. Please run scripts/testdata.js first.'); + await pool.close(); + return; + } + + console.log(`\nFound ${userDataResult.recordset.length} user_data entries for KYC test data.\n`); + + // KYC Step configurations for different KYC levels + const kycStepConfigs = { + // KYC Level 10: Contact + Personal data + 10: [ + { name: 'ContactData', status: 'Completed', result: JSON.stringify({ email: 'test@example.com', phone: '+41791234567' }) }, + { name: 'PersonalData', status: 'Completed', result: JSON.stringify({ address: 'Teststrasse 1', city: 'Zurich', zip: '8000' }) }, + ], + // KYC Level 20: + Nationality + 20: [ + { name: 'ContactData', status: 'Completed', result: JSON.stringify({ email: 'test@example.com', phone: '+41791234567' }) }, + { name: 'PersonalData', status: 'Completed', result: JSON.stringify({ address: 'Teststrasse 1', city: 'Zurich', zip: '8000' }) }, + { name: 'NationalityData', status: 'Completed', result: JSON.stringify({ nationality: 'CH' }) }, + ], + // KYC Level 30: + Ident + 30: [ + { name: 'ContactData', status: 'Completed', result: JSON.stringify({ email: 'test@example.com', phone: '+41791234567' }) }, + { name: 'PersonalData', status: 'Completed', result: JSON.stringify({ address: 'Teststrasse 1', city: 'Zurich', zip: '8000' }) }, + { name: 'NationalityData', status: 'Completed', result: JSON.stringify({ nationality: 'CH' }) }, + { name: 'Ident', type: 'Manual', status: 'Completed', result: JSON.stringify({ firstName: 'Max', lastName: 'Mueller', birthday: '1978-11-30', nationality: { symbol: 'CH' }, documentType: 'Passport', documentNumber: 'X1234567' }) }, + ], + // KYC Level 50: Full KYC + Financial + 50: [ + { name: 'ContactData', status: 'Completed', result: JSON.stringify({ email: 'test@example.com', phone: '+41791234567' }) }, + { name: 'PersonalData', status: 'Completed', result: JSON.stringify({ address: 'Teststrasse 1', city: 'Zurich', zip: '8000' }) }, + { name: 'NationalityData', status: 'Completed', result: JSON.stringify({ nationality: 'CH' }) }, + { name: 'Ident', type: 'Manual', status: 'Completed', result: JSON.stringify({ firstName: 'Lisa', lastName: 'Weber', birthday: '1982-05-10', nationality: { symbol: 'CH' }, documentType: 'IdCard', documentNumber: 'C9876543' }) }, + { name: 'FinancialData', status: 'Completed', result: JSON.stringify({ annualIncome: '100000-200000', sourceOfFunds: 'Salary', occupation: 'Engineer' }) }, + { name: 'DfxApproval', status: 'Completed', result: JSON.stringify({ approved: true, approvedBy: 'system' }) }, + ], + }; + + // File configurations for different KYC steps + const fileConfigs = { + 'Ident': [ + { name: 'id_front.png', type: 'Identification', subType: 'IdentificationForm', protected: true }, + { name: 'id_back.png', type: 'Identification', subType: null, protected: true }, + { name: 'selfie.jpg', type: 'Identification', subType: null, protected: true }, + ], + 'FinancialData': [ + { name: 'source_of_funds.pdf', type: 'UserInformation', subType: 'RiskProfile', protected: false }, + { name: 'bank_statement.pdf', type: 'UserInformation', subType: 'BankTransactionVerification', protected: false }, + ], + 'DfxApproval': [ + { name: 'additional_document.pdf', type: 'AdditionalDocuments', subType: null, protected: false }, + ], + }; + + console.log('Creating KYC Steps and Files...\n'); + + for (const userData of userDataResult.recordset) { + const kycLevel = userData.kycLevel || 0; + const steps = kycStepConfigs[kycLevel]; + + if (!steps) { + console.log(` UserData ${userData.id} (${userData.mail}): KYC Level ${kycLevel} - no steps defined`); + continue; + } + + console.log(` UserData ${userData.id} (${userData.mail}): KYC Level ${kycLevel}`); + + for (let i = 0; i < steps.length; i++) { + const step = steps[i]; + + // Check if step already exists + const existingStep = await pool.request() + .input('userDataId', mssql.Int, userData.id) + .input('name', mssql.NVarChar, step.name) + .input('seqNum', mssql.Int, i + 1) + .query('SELECT id FROM kyc_step WHERE userDataId = @userDataId AND name = @name AND sequenceNumber = @seqNum'); + + let stepId; + if (existingStep.recordset.length > 0) { + stepId = existingStep.recordset[0].id; + console.log(` - Step ${step.name} already exists (id=${stepId})`); + } else { + const stepResult = await pool.request() + .input('userDataId', mssql.Int, userData.id) + .input('name', mssql.NVarChar, step.name) + .input('type', mssql.NVarChar, step.type || null) + .input('status', mssql.NVarChar, step.status) + .input('sequenceNumber', mssql.Int, i + 1) + .input('result', mssql.NVarChar, step.result || null) + .input('sessionId', mssql.NVarChar, uuid()) + .query(` + INSERT INTO kyc_step (userDataId, name, type, status, sequenceNumber, result, sessionId, created, updated) + OUTPUT INSERTED.id + VALUES (@userDataId, @name, @type, @status, @sequenceNumber, @result, @sessionId, GETUTCDATE(), GETUTCDATE()) + `); + stepId = stepResult.recordset[0].id; + console.log(` - Created Step ${step.name} (id=${stepId})`); + } + + // Create files for this step if configured + const fileConfigsForStep = fileConfigs[step.name]; + if (fileConfigsForStep) { + for (const fileConfig of fileConfigsForStep) { + const fileUid = uuid(); + + const existingFile = await pool.request() + .input('userDataId', mssql.Int, userData.id) + .input('name', mssql.NVarChar, fileConfig.name) + .input('kycStepId', mssql.Int, stepId) + .query('SELECT id FROM kyc_file WHERE userDataId = @userDataId AND name = @name AND kycStepId = @kycStepId'); + + if (existingFile.recordset.length > 0) { + console.log(` - File ${fileConfig.name} already exists`); + } else { + await pool.request() + .input('name', mssql.NVarChar, fileConfig.name) + .input('type', mssql.NVarChar, fileConfig.type) + .input('subType', mssql.NVarChar, fileConfig.subType) + .input('protected', mssql.Bit, fileConfig.protected) + .input('valid', mssql.Bit, true) + .input('uid', mssql.NVarChar, fileUid) + .input('userDataId', mssql.Int, userData.id) + .input('kycStepId', mssql.Int, stepId) + .query(` + INSERT INTO kyc_file (name, type, subType, protected, valid, uid, userDataId, kycStepId, created, updated) + VALUES (@name, @type, @subType, @protected, @valid, @uid, @userDataId, @kycStepId, GETUTCDATE(), GETUTCDATE()) + `); + console.log(` - Created File ${fileConfig.name} (uid=${fileUid.substring(0, 8)}...)`); + } + } + } + } + } + + // Create KYC Log entries + console.log('\nCreating KYC Log entries...'); + + const kycSteps = await pool.request().query('SELECT id, userDataId, name, status FROM kyc_step'); + + for (const step of kycSteps.recordset.slice(0, 5)) { + const existingLog = await pool.request() + .input('kycStepId', mssql.Int, step.id) + .query('SELECT id FROM kyc_log WHERE kycStepId = @kycStepId'); + + if (existingLog.recordset.length === 0) { + await pool.request() + .input('kycStepId', mssql.Int, step.id) + .input('status', mssql.NVarChar, step.status) + .input('result', mssql.NVarChar, 'System: KYC step processed') + .query(` + INSERT INTO kyc_log (kycStepId, status, result, created, updated) + VALUES (@kycStepId, @status, @result, GETUTCDATE(), GETUTCDATE()) + `); + console.log(` - Created log for step ${step.id} (${step.name})`); + } + } + + // Summary + console.log('\n========================================'); + console.log('KYC Test Data Creation Complete!'); + console.log('========================================\n'); + + const stepCount = await pool.request().query('SELECT COUNT(*) as c FROM kyc_step'); + const fileCount = await pool.request().query('SELECT COUNT(*) as c FROM kyc_file'); + const logCount = await pool.request().query('SELECT COUNT(*) as c FROM kyc_log'); + + console.log(` kyc_step: ${stepCount.recordset[0].c} rows`); + console.log(` kyc_file: ${fileCount.recordset[0].c} rows`); + console.log(` kyc_log: ${logCount.recordset[0].c} rows`); + console.log(`\n Dummy files location: ${dummyDir}`); + + await pool.close(); +} + +main().catch(e => { + console.error('Error:', e.message); + process.exit(1); +}); diff --git a/scripts/kyc/upload-kyc-files.js b/scripts/kyc/upload-kyc-files.js new file mode 100644 index 0000000000..3ad2d1910b --- /dev/null +++ b/scripts/kyc/upload-kyc-files.js @@ -0,0 +1,142 @@ +const axios = require('axios'); + +const API_URL = 'http://localhost:3000'; + +// Dummy file data (base64 encoded minimal files) +const DUMMY_FILES = { + // Minimal 1x1 red PNG + 'png': 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8z8DwHwAFBQIAX8jx0gAAAABJRU5ErkJggg==', + // Minimal PDF + 'pdf': 'JVBERi0xLjQKMSAwIG9iago8PCAvVHlwZSAvQ2F0YWxvZyAvUGFnZXMgMiAwIFIgPj4KZW5kb2JqCjIgMCBvYmoKPDwgL1R5cGUgL1BhZ2VzIC9LaWRzIFszIDAgUl0gL0NvdW50IDEgPj4KZW5kb2JqCjMgMCBvYmoKPDwgL1R5cGUgL1BhZ2UgL1BhcmVudCAyIDAgUiAvTWVkaWFCb3ggWzAgMCA2MTIgNzkyXSA+PgplbmRvYmoKeHJlZgowIDQKMDAwMDAwMDAwMCA2NTUzNSBmCjAwMDAwMDAwMDkgMDAwMDAgbgowMDAwMDAwMDU4IDAwMDAwIG4KMDAwMDAwMDExNSAwMDAwMCBuCnRyYWlsZXIKPDwgL1NpemUgNCAvUm9vdCAxIDAgUiA+PgpzdGFydHhyZWYKMTk1CiUlRU9G', + // Minimal JPEG (1x1 red pixel) + 'jpg': '/9j/4AAQSkZJRgABAQEASABIAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAABAAEDASIAAhEBAxEB/8QAFQABAQAAAAAAAAAAAAAAAAAAAAn/xAAUEAEAAAAAAAAAAAAAAAAAAAAA/8QAFQEBAQAAAAAAAAAAAAAAAAAAAAX/xAAUEQEAAAAAAAAAAAAAAAAAAAAA/9oADAMBEQCEAwEPwAB//9k=' +}; + +async function getAdminToken() { + const { ethers } = require('ethers'); + const ADMIN_SEED = 'ignore dish destroy upgrade stem pulse lucky tomato yard baby obvious cool'; + const wallet = ethers.Wallet.fromMnemonic(ADMIN_SEED); + + // Get sign message + const signMsgRes = await axios.get(`${API_URL}/v1/auth/signMessage?address=${wallet.address}`); + const message = signMsgRes.data.message; + + // Sign message + const signature = await wallet.signMessage(message); + + // Authenticate + const authRes = await axios.post(`${API_URL}/v1/auth`, { + address: wallet.address, + signature: signature + }); + + return authRes.data.accessToken; +} + +async function getKycCodes() { + const mssql = require('mssql'); + const config = { + user: process.env.SQL_USERNAME || 'sa', + password: process.env.SQL_PASSWORD || 'LocalDev2026@SQL', + server: 'localhost', + port: parseInt(process.env.SQL_PORT) || 1433, + database: process.env.SQL_DB || 'dfx', + options: { encrypt: false, trustServerCertificate: true } + }; + + const pool = await mssql.connect(config); + + const result = await pool.request().query(` + SELECT ud.id, ud.kycHash, ud.mail, ud.kycLevel, + ks.id as stepId, ks.name as stepName + FROM user_data ud + LEFT JOIN kyc_step ks ON ks.userDataId = ud.id AND ks.name = 'Ident' + WHERE ud.kycLevel >= 30 + ORDER BY ud.id + `); + + await pool.close(); + return result.recordset; +} + +async function uploadFileViaAPI(token, kycCode, stepId, fileData, fileName, fileType) { + try { + // The manual ident endpoint accepts file data + const response = await axios.put( + `${API_URL}/v2/kyc/ident/manual/${stepId}`, + { + firstName: 'Test', + lastName: 'User', + birthday: '1990-01-01', + nationality: { id: 1 }, + documentType: 'Passport', + documentNumber: 'X1234567', + documentFront: `data:image/${fileType};base64,${fileData}`, + documentBack: `data:image/${fileType};base64,${fileData}`, + selfie: `data:image/jpg;base64,${DUMMY_FILES.jpg}` + }, + { + headers: { + 'x-kyc-code': kycCode, + 'Authorization': `Bearer ${token}` + } + } + ); + return response.data; + } catch (e) { + console.log(` Error: ${e.response?.data?.message || e.message}`); + return null; + } +} + +async function main() { + console.log('KYC File Upload Script'); + console.log('======================\n'); + + try { + // Get admin token + console.log('Getting admin token...'); + const token = await getAdminToken(); + console.log(' Token obtained.\n'); + + // Get KYC codes for users with level >= 30 + console.log('Getting KYC codes...'); + const kycData = await getKycCodes(); + console.log(` Found ${kycData.length} entries.\n`); + + for (const entry of kycData) { + if (!entry.stepId) { + console.log(`Skipping ${entry.mail} - no Ident step`); + continue; + } + + console.log(`Processing ${entry.mail} (KYC Level ${entry.kycLevel}):`); + console.log(` KYC Code: ${entry.kycHash}`); + console.log(` Step ID: ${entry.stepId}`); + + // Try to upload via manual ident + const result = await uploadFileViaAPI( + token, + entry.kycHash, + entry.stepId, + DUMMY_FILES.png, + 'id_document.png', + 'png' + ); + + if (result) { + console.log(` Upload successful!`); + } + } + + console.log('\n========================================'); + console.log('Note: Files are stored in memory and will'); + console.log('be lost when the API restarts.'); + console.log('========================================\n'); + + } catch (e) { + console.error('Error:', e.message); + } +} + +main(); diff --git a/scripts/kyc/upload-kyc-files.sh b/scripts/kyc/upload-kyc-files.sh new file mode 100755 index 0000000000..ba9470a466 --- /dev/null +++ b/scripts/kyc/upload-kyc-files.sh @@ -0,0 +1,47 @@ +#!/bin/bash +set -e + +echo "๐Ÿ“ DFX KYC File Uploader" +echo "========================" +echo "" + +API_URL="${API_URL:-http://localhost:3000}" + +# Check if API is running +echo "๐Ÿ” Checking if API is running at $API_URL..." +if ! curl -s "$API_URL/v1/health" > /dev/null 2>&1; then + echo "โŒ API is not running at $API_URL" + echo "" + echo "Please start the API first with:" + echo " npm start" + echo "" + echo "Then run this script again." + exit 1 +fi +echo "โœ… API is running" +echo "" + +# Run kyc-storage.js +echo "๐Ÿ—„๏ธ Running kyc-storage.js..." +if [ -f "scripts/kyc/kyc-storage.js" ]; then + node scripts/kyc/kyc-storage.js + echo "" +else + echo "โš ๏ธ kyc-storage.js not found, skipping" +fi + +# Run upload-kyc-files.js +echo "๐Ÿ“ค Running upload-kyc-files.js..." +if [ -f "scripts/kyc/upload-kyc-files.js" ]; then + node scripts/kyc/upload-kyc-files.js + echo "" +else + echo "โš ๏ธ upload-kyc-files.js not found, skipping" +fi + +echo "" +echo "โœ… KYC file upload complete!" +echo "" +echo "โš ๏ธ Note: Files are stored in memory and will be lost when the API restarts." +echo " The API will return dummy images for missing files in local dev mode." +echo "" diff --git a/scripts/setup.sh b/scripts/setup.sh index ec4ef3147c..08bf88e798 100755 --- a/scripts/setup.sh +++ b/scripts/setup.sh @@ -94,6 +94,25 @@ for i in {1..30}; do echo -n "." done +# Seed test data +echo "" +echo "๐ŸŒฑ Seeding test data..." +if [ -f "scripts/testdata.js" ]; then + node scripts/testdata.js + echo "โœ… Test data seeded" +else + echo "โš ๏ธ testdata.js not found, skipping" +fi + +echo "" +echo "๐Ÿ” Seeding KYC test data..." +if [ -f "scripts/kyc/kyc-testdata.js" ]; then + node scripts/kyc/kyc-testdata.js + echo "โœ… KYC test data seeded" +else + echo "โš ๏ธ kyc-testdata.js not found, skipping" +fi + echo "" echo "โœ… Setup complete!" echo "" @@ -103,3 +122,6 @@ echo "" echo "๐Ÿ“ The server will be available at: http://localhost:3000" echo "๐Ÿ“ All external services are automatically mocked in local mode" echo "" +echo "๐Ÿ“ To upload KYC files (after API is running), run:" +echo " ./scripts/kyc/upload-kyc-files.sh" +echo "" diff --git a/scripts/testdata.js b/scripts/testdata.js new file mode 100644 index 0000000000..78a3baabe6 --- /dev/null +++ b/scripts/testdata.js @@ -0,0 +1,408 @@ +const mssql = require('mssql'); +const crypto = require('crypto'); + +// Safety check - only local +const dbHost = process.env.SQL_HOST || 'localhost'; +if (!['localhost', '127.0.0.1'].includes(dbHost)) { + console.error('This script only runs on localhost!'); + process.exit(1); +} + +const config = { + user: process.env.SQL_USERNAME || 'sa', + password: process.env.SQL_PASSWORD || 'LocalDev2026@SQL', + server: 'localhost', + port: parseInt(process.env.SQL_PORT) || 1433, + database: process.env.SQL_DB || 'dfx', + options: { encrypt: false, trustServerCertificate: true } +}; + +// Test addresses (not real wallets) +const TEST_ADDRESSES = { + EVM: [ + '0xTestUser2000000000000000000000000000002', + '0xTestUser3000000000000000000000000000003', + '0xTestUser4000000000000000000000000000004', + '0xTestUser5000000000000000000000000000005', + ], + BITCOIN: [ + 'bc1qTestBtcUser2000000000000000000002', + 'bc1qTestBtcUser3000000000000000000003', + ] +}; + +function uuid() { + return crypto.randomUUID().toUpperCase(); +} + +function bankUsage() { + const chars = 'ABCDEF0123456789'; + let result = ''; + for (let i = 0; i < 12; i++) { + if (i === 4 || i === 8) result += '-'; + result += chars[crypto.randomInt(chars.length)]; + } + return result; +} + +async function main() { + console.log('Connecting to database...'); + const pool = await mssql.connect(config); + + console.log('Creating test data...\n'); + + // Get existing IDs for foreign keys + const walletResult = await pool.request().query('SELECT TOP 1 id FROM wallet'); + const walletId = walletResult.recordset[0]?.id || 1; + + const langResult = await pool.request().query("SELECT id FROM language WHERE symbol = 'EN'"); + const languageId = langResult.recordset[0]?.id || 1; + + const chfResult = await pool.request().query("SELECT id FROM fiat WHERE name = 'CHF'"); + const chfId = chfResult.recordset[0]?.id || 1; + + const eurResult = await pool.request().query("SELECT id FROM fiat WHERE name = 'EUR'"); + const eurId = eurResult.recordset[0]?.id || 2; + + const countryResult = await pool.request().query("SELECT id FROM country WHERE symbol = 'CH'"); + const countryId = countryResult.recordset[0]?.id || 1; + + const deCountryResult = await pool.request().query("SELECT id FROM country WHERE symbol = 'DE'"); + const deCountryId = deCountryResult.recordset[0]?.id || 2; + + // Get some assets + const btcResult = await pool.request().query("SELECT id FROM asset WHERE name = 'BTC' AND blockchain = 'Bitcoin'"); + const btcId = btcResult.recordset[0]?.id; + + const ethResult = await pool.request().query("SELECT id FROM asset WHERE name = 'ETH' AND blockchain = 'Ethereum'"); + const ethId = ethResult.recordset[0]?.id; + + const usdtResult = await pool.request().query("SELECT id FROM asset WHERE name = 'USDT' AND blockchain = 'Ethereum'"); + const usdtId = usdtResult.recordset[0]?.id; + + console.log(`Using: walletId=${walletId}, langId=${languageId}, chfId=${chfId}, eurId=${eurId}`); + console.log(`Assets: BTC=${btcId}, ETH=${ethId}, USDT=${usdtId}\n`); + + // ============================================================ + // Create UserData entries + // ============================================================ + console.log('Creating UserData entries...'); + + const userDataConfigs = [ + { mail: 'kyc0@test.local', kycLevel: 0, kycStatus: 'NA', status: 'Active', firstname: 'Test', surname: 'NoKYC', countryId }, + { mail: 'kyc10@test.local', kycLevel: 10, kycStatus: 'NA', status: 'Active', firstname: 'Hans', surname: 'Muster', countryId, birthday: '1985-03-15', street: 'Bahnhofstrasse', houseNumber: '12', zip: '8001', location: 'Zรผrich' }, + { mail: 'kyc20@test.local', kycLevel: 20, kycStatus: 'NA', status: 'Active', firstname: 'Anna', surname: 'Schmidt', countryId: deCountryId, birthday: '1990-07-22', street: 'Hauptstrasse', houseNumber: '45a', zip: '10115', location: 'Berlin' }, + { mail: 'kyc30@test.local', kycLevel: 30, kycStatus: 'Completed', status: 'Active', firstname: 'Max', surname: 'Mueller', countryId, birthday: '1978-11-30', accountType: 'Personal', street: 'Limmatquai', houseNumber: '78', zip: '8001', location: 'Zรผrich' }, + { mail: 'kyc50@test.local', kycLevel: 50, kycStatus: 'Completed', status: 'Active', firstname: 'Lisa', surname: 'Weber', countryId, birthday: '1982-05-10', accountType: 'Personal', street: 'Paradeplatz', houseNumber: '1', zip: '8001', location: 'Zรผrich' }, + ]; + + const userDataIds = []; + for (const ud of userDataConfigs) { + const existing = await pool.request() + .input('mail', mssql.NVarChar, ud.mail) + .query('SELECT id FROM user_data WHERE mail = @mail'); + + if (existing.recordset.length > 0) { + userDataIds.push(existing.recordset[0].id); + console.log(` UserData ${ud.mail} already exists (id=${existing.recordset[0].id})`); + continue; + } + + const kycHash = uuid(); + const result = await pool.request() + .input('mail', mssql.NVarChar, ud.mail) + .input('firstname', mssql.NVarChar, ud.firstname) + .input('surname', mssql.NVarChar, ud.surname) + .input('street', mssql.NVarChar, ud.street || null) + .input('houseNumber', mssql.NVarChar, ud.houseNumber || null) + .input('zip', mssql.NVarChar, ud.zip || null) + .input('location', mssql.NVarChar, ud.location || null) + .input('kycHash', mssql.NVarChar, kycHash) + .input('kycLevel', mssql.Int, ud.kycLevel) + .input('kycStatus', mssql.NVarChar, ud.kycStatus) + .input('kycType', mssql.NVarChar, 'DFX') + .input('status', mssql.NVarChar, ud.status) + .input('riskStatus', mssql.NVarChar, 'NA') + .input('countryId', mssql.Int, ud.countryId || null) + .input('nationalityId', mssql.Int, ud.countryId || null) + .input('languageId', mssql.Int, languageId) + .input('currencyId', mssql.Int, chfId) + .input('walletId', mssql.Int, walletId) + .input('accountType', mssql.NVarChar, ud.accountType || null) + .input('birthday', mssql.Date, ud.birthday || null) + .query(` + INSERT INTO user_data (mail, firstname, surname, street, houseNumber, zip, location, kycHash, kycLevel, kycStatus, kycType, status, riskStatus, + countryId, nationalityId, languageId, currencyId, walletId, accountType, birthday, created, updated) + OUTPUT INSERTED.id + VALUES (@mail, @firstname, @surname, @street, @houseNumber, @zip, @location, @kycHash, @kycLevel, @kycStatus, @kycType, @status, @riskStatus, + @countryId, @nationalityId, @languageId, @currencyId, @walletId, @accountType, @birthday, GETUTCDATE(), GETUTCDATE()) + `); + + userDataIds.push(result.recordset[0].id); + console.log(` Created UserData: ${ud.mail} (id=${result.recordset[0].id}, kycLevel=${ud.kycLevel})`); + } + + // ============================================================ + // Create Users + // ============================================================ + console.log('\nCreating Users...'); + + const userConfigs = [ + { address: TEST_ADDRESSES.EVM[0], addressType: 'EVM', role: 'User', userDataIdx: 0 }, + { address: TEST_ADDRESSES.EVM[1], addressType: 'EVM', role: 'User', userDataIdx: 1 }, + { address: TEST_ADDRESSES.EVM[2], addressType: 'EVM', role: 'User', userDataIdx: 2 }, + { address: TEST_ADDRESSES.EVM[3], addressType: 'EVM', role: 'VIP', userDataIdx: 3 }, + { address: TEST_ADDRESSES.BITCOIN[0], addressType: 'Bitcoin', role: 'User', userDataIdx: 4 }, + ]; + + const userIds = []; + for (const u of userConfigs) { + const existing = await pool.request() + .input('address', mssql.NVarChar, u.address) + .query('SELECT id FROM [user] WHERE address = @address'); + + if (existing.recordset.length > 0) { + userIds.push(existing.recordset[0].id); + console.log(` User ${u.address.substring(0, 20)}... already exists (id=${existing.recordset[0].id})`); + continue; + } + + const result = await pool.request() + .input('address', mssql.NVarChar, u.address) + .input('addressType', mssql.NVarChar, u.addressType) + .input('role', mssql.NVarChar, u.role) + .input('status', mssql.NVarChar, 'Active') + .input('usedRef', mssql.NVarChar, '000-000') + .input('walletId', mssql.Int, walletId) + .input('userDataId', mssql.Int, userDataIds[u.userDataIdx]) + .input('refFeePercent', mssql.Float, 0.25) + .query(` + INSERT INTO [user] (address, addressType, role, status, usedRef, walletId, userDataId, refFeePercent, + buyVolume, annualBuyVolume, monthlyBuyVolume, sellVolume, annualSellVolume, monthlySellVolume, + cryptoVolume, annualCryptoVolume, monthlyCryptoVolume, refVolume, refCredit, paidRefCredit, created, updated) + OUTPUT INSERTED.id + VALUES (@address, @addressType, @role, @status, @usedRef, @walletId, @userDataId, @refFeePercent, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, GETUTCDATE(), GETUTCDATE()) + `); + + userIds.push(result.recordset[0].id); + console.log(` Created User: ${u.address.substring(0, 20)}... (id=${result.recordset[0].id}, role=${u.role})`); + } + + // ============================================================ + // Create Routes (for Buy entities) + // ============================================================ + console.log('\nCreating Routes...'); + + const routeIds = []; + for (let i = 0; i < 4; i++) { + const result = await pool.request() + .input('label', mssql.NVarChar, `TestRoute${i + 2}`) + .query(` + INSERT INTO route (label, created, updated) + OUTPUT INSERTED.id + VALUES (@label, GETUTCDATE(), GETUTCDATE()) + `); + routeIds.push(result.recordset[0].id); + } + console.log(` Created ${routeIds.length} routes`); + + // ============================================================ + // Create Buy routes + // ============================================================ + console.log('\nCreating Buy routes...'); + + const buyConfigs = [ + { userId: userIds[0], assetId: btcId }, + { userId: userIds[0], assetId: ethId }, + { userId: userIds[1], assetId: btcId }, + { userId: userIds[2], assetId: usdtId }, + ]; + + for (let i = 0; i < buyConfigs.length; i++) { + const b = buyConfigs[i]; + if (!b.assetId) continue; + + const usage = bankUsage(); + const existing = await pool.request() + .input('userId', mssql.Int, b.userId) + .input('assetId', mssql.Int, b.assetId) + .query('SELECT id FROM buy WHERE userId = @userId AND assetId = @assetId'); + + if (existing.recordset.length > 0) { + console.log(` Buy route for user ${b.userId}, asset ${b.assetId} already exists`); + continue; + } + + await pool.request() + .input('bankUsage', mssql.NVarChar, usage) + .input('userId', mssql.Int, b.userId) + .input('assetId', mssql.Int, b.assetId) + .input('routeId', mssql.Int, routeIds[i]) + .input('active', mssql.Bit, true) + .query(` + INSERT INTO buy (bankUsage, userId, assetId, routeId, active, volume, annualVolume, monthlyVolume, created, updated) + VALUES (@bankUsage, @userId, @assetId, @routeId, @active, 0, 0, 0, GETUTCDATE(), GETUTCDATE()) + `); + + console.log(` Created Buy route: user=${b.userId}, asset=${b.assetId}, usage=${usage}`); + } + + // ============================================================ + // Create BankData entries + // ============================================================ + console.log('\nCreating BankData entries...'); + + const bankDataConfigs = [ + { userDataId: userDataIds[2], iban: 'CH93 0076 2011 6238 5295 7', name: 'Anna Schmidt' }, + { userDataId: userDataIds[3], iban: 'DE89 3704 0044 0532 0130 00', name: 'Max Mueller' }, + { userDataId: userDataIds[4], iban: 'CH56 0483 5012 3456 7800 9', name: 'Lisa Weber' }, + ]; + + const bankDataIds = []; + for (const bd of bankDataConfigs) { + const cleanIban = bd.iban.replace(/\s/g, ''); + const existing = await pool.request() + .input('iban', mssql.NVarChar, cleanIban) + .query('SELECT id FROM bank_data WHERE iban = @iban'); + + if (existing.recordset.length > 0) { + bankDataIds.push(existing.recordset[0].id); + console.log(` BankData ${cleanIban} already exists`); + continue; + } + + const result = await pool.request() + .input('iban', mssql.NVarChar, cleanIban) + .input('name', mssql.NVarChar, bd.name) + .input('userDataId', mssql.Int, bd.userDataId) + .input('approved', mssql.Bit, true) + .query(` + INSERT INTO bank_data (iban, name, userDataId, approved, created, updated) + OUTPUT INSERTED.id + VALUES (@iban, @name, @userDataId, @approved, GETUTCDATE(), GETUTCDATE()) + `); + + bankDataIds.push(result.recordset[0].id); + console.log(` Created BankData: ${cleanIban}`); + } + + // ============================================================ + // Create Deposits (crypto addresses) + // ============================================================ + console.log('\nCreating Deposit addresses...'); + + const depositConfigs = [ + { address: '0xDeposit000000000000000000000000000001', blockchains: 'Ethereum;Arbitrum;Optimism;Polygon;Base' }, + { address: '0xDeposit000000000000000000000000000002', blockchains: 'Ethereum;Arbitrum;Optimism;Polygon;Base' }, + { address: 'bc1qdeposit0000000000000000000001', blockchains: 'Bitcoin' }, + ]; + + for (const d of depositConfigs) { + const existing = await pool.request() + .input('address', mssql.NVarChar, d.address) + .query('SELECT id FROM deposit WHERE address = @address'); + + if (existing.recordset.length > 0) { + console.log(` Deposit ${d.address.substring(0, 25)}... already exists`); + continue; + } + + await pool.request() + .input('address', mssql.NVarChar, d.address) + .input('blockchains', mssql.NVarChar, d.blockchains) + .query(` + INSERT INTO deposit (address, blockchains, created, updated) + VALUES (@address, @blockchains, GETUTCDATE(), GETUTCDATE()) + `); + + console.log(` Created Deposit: ${d.address.substring(0, 25)}...`); + } + + // ============================================================ + // Create Transactions + // ============================================================ + console.log('\nCreating Transaction entries...'); + + const txConfigs = [ + { userId: userIds[0], userDataId: userDataIds[0], sourceType: 'BuyCrypto', amountInChf: 500, amlCheck: 'Pass' }, + { userId: userIds[0], userDataId: userDataIds[0], sourceType: 'BuyCrypto', amountInChf: 1200, amlCheck: 'Pass' }, + { userId: userIds[1], userDataId: userDataIds[1], sourceType: 'BuyCrypto', amountInChf: 2500, amlCheck: 'Pass' }, + { userId: userIds[2], userDataId: userDataIds[2], sourceType: 'BuyFiat', amountInChf: 800, amlCheck: 'Pass' }, + { userId: userIds[3], userDataId: userDataIds[3], sourceType: 'BuyCrypto', amountInChf: 5000, amlCheck: 'Pass' }, + { userId: userIds[3], userDataId: userDataIds[3], sourceType: 'BuyFiat', amountInChf: 3500, amlCheck: 'Pass' }, + ]; + + for (const tx of txConfigs) { + const uid = uuid(); + + await pool.request() + .input('uid', mssql.NVarChar, uid) + .input('sourceType', mssql.NVarChar, tx.sourceType) + .input('userId', mssql.Int, tx.userId) + .input('userDataId', mssql.Int, tx.userDataId) + .input('amountInChf', mssql.Float, tx.amountInChf) + .input('amlCheck', mssql.NVarChar, tx.amlCheck) + .input('eventDate', mssql.DateTime2, new Date()) + .query(` + INSERT INTO [transaction] (uid, sourceType, userId, userDataId, amountInChf, amlCheck, eventDate, created, updated) + VALUES (@uid, @sourceType, @userId, @userDataId, @amountInChf, @amlCheck, @eventDate, GETUTCDATE(), GETUTCDATE()) + `); + + console.log(` Created Transaction: ${tx.sourceType}, CHF ${tx.amountInChf}, user=${tx.userId}`); + } + + // ============================================================ + // Create BankTx entries (incoming bank transfers) + // ============================================================ + console.log('\nCreating BankTx entries...'); + + const bankResult = await pool.request().query("SELECT TOP 1 id FROM bank WHERE receive = 1"); + const bankId = bankResult.recordset[0]?.id; + + if (bankId) { + const bankTxConfigs = [ + { accountIban: 'CH9300762011623852957', name: 'Anna Schmidt', amount: 500, currency: 'CHF', type: 'BuyCrypto' }, + { accountIban: 'DE89370400440532013000', name: 'Max Mueller', amount: 1000, currency: 'EUR', type: 'BuyCrypto' }, + { accountIban: 'CH5604835012345678009', name: 'Lisa Weber', amount: 2000, currency: 'CHF', type: 'BuyCrypto' }, + ]; + + for (const btx of bankTxConfigs) { + await pool.request() + .input('bankId', mssql.Int, bankId) + .input('accountIban', mssql.NVarChar, btx.accountIban) + .input('name', mssql.NVarChar, btx.name) + .input('amount', mssql.Float, btx.amount) + .input('currency', mssql.NVarChar, btx.currency) + .input('type', mssql.NVarChar, btx.type) + .input('creditDebitIndicator', mssql.NVarChar, 'CRDT') + .query(` + INSERT INTO bank_tx (bankId, accountIban, name, amount, currency, type, creditDebitIndicator, created, updated) + VALUES (@bankId, @accountIban, @name, @amount, @currency, @type, @creditDebitIndicator, GETUTCDATE(), GETUTCDATE()) + `); + + console.log(` Created BankTx: ${btx.name}, ${btx.currency} ${btx.amount}`); + } + } + + // ============================================================ + // Summary + // ============================================================ + console.log('\n========================================'); + console.log('Test data creation complete!'); + console.log('========================================\n'); + + // Show counts + const tables = ['user_data', '[user]', 'buy', 'bank_data', 'deposit', '[transaction]', 'bank_tx']; + for (const t of tables) { + const count = await pool.request().query(`SELECT COUNT(*) as c FROM ${t}`); + console.log(` ${t.replace('[', '').replace(']', '')}: ${count.recordset[0].c} rows`); + } + + await pool.close(); +} + +main().catch(e => { + console.error('Error:', e.message); + process.exit(1); +}); diff --git a/src/integration/exchange/services/exchange-tx.service.ts b/src/integration/exchange/services/exchange-tx.service.ts index 70fa6a1aa4..8c8a581f71 100644 --- a/src/integration/exchange/services/exchange-tx.service.ts +++ b/src/integration/exchange/services/exchange-tx.service.ts @@ -44,9 +44,8 @@ export class ExchangeTxService { async syncExchanges(from?: Date, exchange?: ExchangeName) { const syncs = ExchangeSyncs.filter((s) => !exchange || s.exchange === exchange); - const since = from ?? Util.minutesBefore(Config.exchangeTxSyncLimit); - const transactions = await Promise.all(syncs.map((s) => this.getTransactionsFor(s, since))).then((tx) => tx.flat()); + const transactions = await Promise.all(syncs.map((s) => this.getTransactionsFor(s, from))).then((tx) => tx.flat()); // sort by date transactions.sort((a, b) => a.externalCreated.getTime() - b.externalCreated.getTime()); @@ -120,8 +119,25 @@ export class ExchangeTxService { }); } - private async getTransactionsFor(sync: ExchangeSync, since: Date): Promise { + private async getSyncSinceDate(exchange: ExchangeName): Promise { + const defaultSince = Util.minutesBefore(Config.exchangeTxSyncLimit); + + const oldestPending = await this.exchangeTxRepo.findOne({ + where: { exchange, status: 'pending' }, + order: { externalCreated: 'ASC' }, + }); + + if (!oldestPending?.externalCreated) return defaultSince; + + // Add 1 hour buffer to account for timing differences + const pendingSince = Util.hoursBefore(1, oldestPending.externalCreated); + + return pendingSince < defaultSince ? pendingSince : defaultSince; + } + + private async getTransactionsFor(sync: ExchangeSync, from?: Date): Promise { try { + const since = from ?? (await this.getSyncSinceDate(sync.exchange)); const exchangeService = this.registryService.getExchange(sync.exchange); // Scrypt special case diff --git a/src/integration/infrastructure/azure-service.ts b/src/integration/infrastructure/azure-service.ts deleted file mode 100644 index 03ee1e2d75..0000000000 --- a/src/integration/infrastructure/azure-service.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { Injectable } from '@nestjs/common'; -import { Method } from 'axios'; -import { Config } from 'src/config/config'; -import { HttpService } from 'src/shared/services/http.service'; - -@Injectable() -export class AzureService { - private readonly baseUrl = `https://management.azure.com`; - private readonly apiVersion = '2022-03-01'; - - private accessToken = 'access-token-will-be-updated'; - - constructor(private readonly http: HttpService) {} - - public async restartWebApp(name: string, slot?: string) { - const appName = `app-dfx-${name}-${Config.environment}${slot ? `/slots/${slot}` : ''}`; - const resourceId = this.resourceId('Microsoft.Web/sites', appName); - return this.callApi(`${resourceId}/restart`, 'POST'); - } - - // --- HELPER METHODS --- // - private resourceId(provider: string, name: string): string { - return `subscriptions/${Config.azure.subscriptionId}/resourceGroups/rg-dfx-api-${Config.environment}/providers/${provider}/${name}`; - } - - private async callApi(url: string, method: Method = 'GET', data?: any): Promise { - return this.request(url, method, data); - } - - private async request(url: string, method: Method, data?: any, nthTry = 3, getNewAccessToken = false): Promise { - try { - if (getNewAccessToken) this.accessToken = await this.getAccessToken(); - - return await this.http.request({ - url: `${this.baseUrl}/${url}?api-version=${this.apiVersion}`, - method: method, - data: method !== 'GET' ? data : undefined, - params: method === 'GET' ? data : undefined, - headers: { - Authorization: `Bearer ${this.accessToken}`, - }, - }); - } catch (e) { - if (nthTry > 1 && e.response?.status == 401) { - return this.request(url, method, data, nthTry - 1, true); - } - throw e; - } - } - - private async getAccessToken(): Promise { - const { access_token } = await this.http.post<{ access_token: string }>( - `https://login.microsoftonline.com/${Config.azure.tenantId}/oauth2/token`, - new URLSearchParams({ - grant_type: 'client_credentials', - client_id: Config.azure.clientId, - client_secret: Config.azure.clientSecret, - resource: 'https://management.azure.com', - }), - ); - return access_token; - } -} diff --git a/src/integration/infrastructure/azure-storage.service.ts b/src/integration/infrastructure/azure-storage.service.ts index cf12862aaf..9d17cced65 100644 --- a/src/integration/infrastructure/azure-storage.service.ts +++ b/src/integration/infrastructure/azure-storage.service.ts @@ -1,4 +1,6 @@ import { BlobGetPropertiesResponse, BlobServiceClient, ContainerClient } from '@azure/storage-blob'; +import * as fs from 'fs'; +import * as path from 'path'; import { Config, Environment, GetConfig } from 'src/config/config'; import { DfxLogger } from 'src/shared/services/dfx-logger'; @@ -21,6 +23,15 @@ export interface BlobContent extends BlobMetaData { // In-memory storage for local development const mockStorage = new Map }>(); +// Dummy files directory for local development +const DUMMY_FILES_DIR = path.join(process.cwd(), 'scripts', 'kyc', 'dummy-files'); + +// Load dummy file from disk +function loadDummyFile(filename: string): Buffer { + const filePath = path.join(DUMMY_FILES_DIR, filename); + return fs.readFileSync(filePath); +} + export class AzureStorageService { private readonly logger = new DfxLogger(AzureStorageService); private readonly client: ContainerClient; @@ -86,12 +97,56 @@ export class AzureStorageService { if (this.isMockMode) { const key = `${this.container}/${name}`; const stored = mockStorage.get(key); + + // Return stored data if available, otherwise return dummy test data based on file extension + if (stored) { + return { + data: stored.data, + contentType: stored.type, + created: new Date(), + updated: new Date(), + metadata: stored.metadata ?? {}, + }; + } + + // Provide dummy data for missing files in local dev mode + const ext = name.split('.').pop()?.toLowerCase(); + const filename = name.split('/').pop() ?? name; + + // Map common KYC file names to dummy files + const dummyFileMap: Record = { + 'id_front.png': { file: 'id_front.png', type: 'image/png' }, + 'id_back.png': { file: 'id_back.png', type: 'image/png' }, + 'selfie.jpg': { file: 'selfie.jpg', type: 'image/jpeg' }, + 'passport.png': { file: 'passport.png', type: 'image/png' }, + 'residence_permit.png': { file: 'residence_permit.png', type: 'image/png' }, + 'proof_of_address.pdf': { file: 'proof_of_address.pdf', type: 'application/pdf' }, + 'bank_statement.pdf': { file: 'bank_statement.pdf', type: 'application/pdf' }, + 'source_of_funds.pdf': { file: 'source_of_funds.pdf', type: 'application/pdf' }, + 'commercial_register.pdf': { file: 'commercial_register.pdf', type: 'application/pdf' }, + 'additional_document.pdf': { file: 'additional_document.pdf', type: 'application/pdf' }, + }; + + const mapping = dummyFileMap[filename]; + if (mapping) { + return { + data: loadDummyFile(mapping.file), + contentType: mapping.type, + created: new Date(), + updated: new Date(), + metadata: {}, + }; + } + + // Fallback based on extension + const isJpg = ext === 'jpg' || ext === 'jpeg'; + const isPdf = ext === 'pdf'; return { - data: stored?.data ?? Buffer.from(''), - contentType: stored?.type ?? 'application/octet-stream', + data: loadDummyFile(isPdf ? 'proof_of_address.pdf' : isJpg ? 'selfie.jpg' : 'id_front.png'), + contentType: isPdf ? 'application/pdf' : isJpg ? 'image/jpeg' : 'image/png', created: new Date(), updated: new Date(), - metadata: stored?.metadata ?? {}, + metadata: {}, }; } diff --git a/src/integration/integration.module.ts b/src/integration/integration.module.ts index 549bdb7957..551d89f7eb 100644 --- a/src/integration/integration.module.ts +++ b/src/integration/integration.module.ts @@ -6,7 +6,6 @@ import { CheckoutModule } from './checkout/checkout.module'; import { ExchangeModule } from './exchange/exchange.module'; import { IknaModule } from './ikna/ikna.module'; import { AppInsightsQueryService } from './infrastructure/app-insights-query.service'; -import { AzureService } from './infrastructure/azure-service'; import { LetterModule } from './letter/letter.module'; import { SiftModule } from './sift/sift.module'; @@ -22,7 +21,7 @@ import { SiftModule } from './sift/sift.module'; SiftModule, ], controllers: [], - providers: [AzureService, AppInsightsQueryService], + providers: [AppInsightsQueryService], exports: [ BankIntegrationModule, BlockchainModule, @@ -30,7 +29,6 @@ import { SiftModule } from './sift/sift.module'; LetterModule, IknaModule, CheckoutModule, - AzureService, AppInsightsQueryService, SiftModule, ], diff --git a/src/subdomains/core/liquidity-management/adapters/actions/scrypt.adapter.ts b/src/subdomains/core/liquidity-management/adapters/actions/scrypt.adapter.ts index dcc8d903e9..88e8831dfb 100644 --- a/src/subdomains/core/liquidity-management/adapters/actions/scrypt.adapter.ts +++ b/src/subdomains/core/liquidity-management/adapters/actions/scrypt.adapter.ts @@ -1,6 +1,6 @@ import { Injectable } from '@nestjs/common'; import { Blockchain } from 'src/integration/blockchain/shared/enums/blockchain.enum'; -import { ScryptOrderSide, ScryptTransactionStatus } from 'src/integration/exchange/dto/scrypt.dto'; +import { ScryptOrderInfo, ScryptOrderSide, ScryptTransactionStatus } from 'src/integration/exchange/dto/scrypt.dto'; import { TradeChangedException } from 'src/integration/exchange/exceptions/trade-changed.exception'; import { ScryptService } from 'src/integration/exchange/services/scrypt.service'; import { Asset } from 'src/shared/models/asset/asset.entity'; @@ -260,14 +260,19 @@ export class ScryptAdapter extends LiquidityActionAdapter { throw new OrderFailedException(`Failed to fetch any orders for order ${order.id}`); } - // For SELL: output is the proceeds (filledQuantity * avgPrice) - return orders.reduce((sum, o) => { - if (o.filledQuantity > 0) { - const output = o.avgPrice ? o.filledQuantity * o.avgPrice : o.filledQuantity; - return sum + output; - } - return sum; - }, 0); + return orders.reduce((sum, o) => sum + this.calculateOrderOutput(o), 0); + } + + private calculateOrderOutput(order: ScryptOrderInfo): number { + if (order.filledQuantity <= 0) return 0; + + if (order.side === ScryptOrderSide.BUY) { + // BUY: output is base currency = filledQuantity + return order.filledQuantity; + } else { + // SELL: output is quote currency = filledQuantity * avgPrice + return order.avgPrice ? order.filledQuantity * order.avgPrice : order.filledQuantity; + } } // --- PARAM VALIDATION --- // diff --git a/src/subdomains/core/sell-crypto/route/sell.service.ts b/src/subdomains/core/sell-crypto/route/sell.service.ts index b615cec990..4372443e26 100644 --- a/src/subdomains/core/sell-crypto/route/sell.service.ts +++ b/src/subdomains/core/sell-crypto/route/sell.service.ts @@ -132,6 +132,13 @@ export class SellService { return sells.filter((s) => s.deposit.blockchainList.some((b) => sellableBlockchains.includes(b))); } + async getSellsByUserDataId(userDataId: number): Promise { + return this.sellRepo.find({ + where: { user: { userData: { id: userDataId } } }, + relations: { fiat: true, user: true }, + }); + } + async getSellWithoutRoute(): Promise { return this.sellRepo.findBy({ route: { id: IsNull() } }); } diff --git a/src/subdomains/generic/kyc/dto/kyc-error.enum.ts b/src/subdomains/generic/kyc/dto/kyc-error.enum.ts index af8b7bb8cf..d77a5f076f 100644 --- a/src/subdomains/generic/kyc/dto/kyc-error.enum.ts +++ b/src/subdomains/generic/kyc/dto/kyc-error.enum.ts @@ -34,8 +34,9 @@ export enum KycError { RECOMMENDER_BLOCKED = 'RecommenderBlocked', // FinancialData errors - MISSING_RESPONSE = 'MissingResponse', + MISSING_INFO = 'MissingInfo', RISKY_BUSINESS = 'RiskyBusiness', + INCORRECT_INFO = 'IncorrectInfo', // NationalityData errors NATIONALITY_NOT_MATCHING = 'NationalityNotMatching', @@ -79,12 +80,13 @@ export const KycErrorMap: Record = { [KycError.USER_DATA_DEACTIVATED]: 'Account deactivated', [KycError.IP_COUNTRY_MISMATCH]: 'Regulatory requirements not met', [KycError.COUNTRY_IP_COUNTRY_MISMATCH]: 'Regulatory requirements not met', - [KycError.MISSING_RESPONSE]: 'Missing data', + [KycError.MISSING_INFO]: 'Missing data', [KycError.RISKY_BUSINESS]: 'Your business is involved in risky business', [KycError.DENIED_RECOMMENDATION]: 'Your recommendation request was denied', [KycError.EXPIRED_RECOMMENDATION]: 'Your recommendation request is expired', [KycError.RECOMMENDER_BLOCKED]: 'Unknown error', [KycError.BANK_RECALL_FEE_NOT_PAID]: 'Recall fee not paid', + [KycError.INCORRECT_INFO]: 'Incorrect response', }; export const KycReasonMap: { [e in KycError]?: KycStepReason } = { diff --git a/src/subdomains/generic/kyc/services/kyc.service.ts b/src/subdomains/generic/kyc/services/kyc.service.ts index 6006338b92..e8cadda9be 100644 --- a/src/subdomains/generic/kyc/services/kyc.service.ts +++ b/src/subdomains/generic/kyc/services/kyc.service.ts @@ -297,7 +297,7 @@ export class KycService { if (errors.some((e) => KycStepIgnoringErrors.includes(e))) { await this.kycStepRepo.update(...entity.ignored(comment)); - } else if (errors.includes(KycError.MISSING_RESPONSE)) { + } else if (errors.includes(KycError.MISSING_INFO)) { await this.kycStepRepo.update(...entity.inProgress()); await this.kycNotificationService.kycStepMissingData( entity.userData, @@ -1228,6 +1228,13 @@ export class KycService { return this.kycStepRepo.findOne({ where: { id }, relations: { userData: true } }); } + async getStepsByUserData(userDataId: number): Promise { + return this.kycStepRepo.find({ + where: { userData: { id: userDataId } }, + order: { sequenceNumber: 'ASC' }, + }); + } + async saveKycStepUpdate(updateResult: UpdateResult): Promise { await this.kycStepRepo.update(...updateResult); } @@ -1348,7 +1355,7 @@ export class KycService { const financialStepResult = entity.getResult(); if (!FinancialService.isComplete(financialStepResult, entity.userData.accountType)) - errors.push(KycError.MISSING_RESPONSE); + errors.push(KycError.MISSING_INFO); if (!financialStepResult.some((f) => f.key === 'risky_business' && f.value.includes('no'))) errors.push(KycError.RISKY_BUSINESS); diff --git a/src/subdomains/generic/support/dto/user-data-support.dto.ts b/src/subdomains/generic/support/dto/user-data-support.dto.ts index f8c2e0417c..69c97f46e3 100644 --- a/src/subdomains/generic/support/dto/user-data-support.dto.ts +++ b/src/subdomains/generic/support/dto/user-data-support.dto.ts @@ -30,9 +30,65 @@ export class BankTxSupportInfo { iban?: string; } +export class UserSupportInfo { + id: number; + address: string; + role: string; + status: string; + created: Date; +} + +export class TransactionSupportInfo { + id: number; + uid: string; + type?: string; + sourceType: string; + amountInChf?: number; + amlCheck?: string; + created: Date; +} + +export class KycStepSupportInfo { + id: number; + name: string; + type?: string; + status: string; + sequenceNumber: number; + created: Date; +} + +export class BankDataSupportInfo { + id: number; + iban: string; + name: string; + approved: boolean; +} + +export class BuySupportInfo { + id: number; + bankUsage: string; + assetName: string; + blockchain: string; + volume: number; + active: boolean; +} + +export class SellSupportInfo { + id: number; + iban: string; + fiatName?: string; + volume: number; +} + export class UserDataSupportInfoDetails { userData: UserData; kycFiles: KycFile[]; + kycSteps: KycStepSupportInfo[]; + transactions: TransactionSupportInfo[]; + users: UserSupportInfo[]; + bankDatas: BankDataSupportInfo[]; + buyRoutes: BuySupportInfo[]; + sellRoutes: SellSupportInfo[]; } export class UserDataSupportQuery { diff --git a/src/subdomains/generic/support/support.service.ts b/src/subdomains/generic/support/support.service.ts index b167df9b09..87938cc2d2 100644 --- a/src/subdomains/generic/support/support.service.ts +++ b/src/subdomains/generic/support/support.service.ts @@ -4,11 +4,13 @@ import * as IbanTools from 'ibantools'; import { Config } from 'src/config/config'; import { Util } from 'src/shared/utils/util'; import { BuyCryptoService } from 'src/subdomains/core/buy-crypto/process/services/buy-crypto.service'; +import { Buy } from 'src/subdomains/core/buy-crypto/routes/buy/buy.entity'; import { BuyService } from 'src/subdomains/core/buy-crypto/routes/buy/buy.service'; import { SwapService } from 'src/subdomains/core/buy-crypto/routes/swap/swap.service'; import { RefundDataDto } from 'src/subdomains/core/history/dto/refund-data.dto'; import { BankRefundDto } from 'src/subdomains/core/history/dto/transaction-refund.dto'; import { BuyFiatService } from 'src/subdomains/core/sell-crypto/process/services/buy-fiat.service'; +import { Sell } from 'src/subdomains/core/sell-crypto/route/sell.entity'; import { SellService } from 'src/subdomains/core/sell-crypto/route/sell.service'; import { BankTxReturnService } from 'src/subdomains/supporting/bank-tx/bank-tx-return/bank-tx-return.service'; import { @@ -20,20 +22,31 @@ import { BankTxService } from 'src/subdomains/supporting/bank-tx/bank-tx/service import { BankService } from 'src/subdomains/supporting/bank/bank/bank.service'; import { VirtualIbanService } from 'src/subdomains/supporting/bank/virtual-iban/virtual-iban.service'; import { PayInService } from 'src/subdomains/supporting/payin/services/payin.service'; +import { Transaction } from 'src/subdomains/supporting/payment/entities/transaction.entity'; import { TransactionHelper } from 'src/subdomains/supporting/payment/services/transaction-helper'; import { TransactionService } from 'src/subdomains/supporting/payment/services/transaction.service'; +import { KycStep } from '../kyc/entities/kyc-step.entity'; import { KycFileService } from '../kyc/services/kyc-file.service'; +import { KycService } from '../kyc/services/kyc.service'; +import { BankData } from '../user/models/bank-data/bank-data.entity'; import { BankDataService } from '../user/models/bank-data/bank-data.service'; import { UserData } from '../user/models/user-data/user-data.entity'; import { UserDataService } from '../user/models/user-data/user-data.service'; +import { User } from '../user/models/user/user.entity'; import { UserService } from '../user/models/user/user.service'; import { + BankDataSupportInfo, BankTxSupportInfo, + BuySupportInfo, ComplianceSearchType, + KycStepSupportInfo, + SellSupportInfo, + TransactionSupportInfo, UserDataSupportInfo, UserDataSupportInfoDetails, UserDataSupportInfoResult, UserDataSupportQuery, + UserSupportInfo, } from './dto/user-data-support.dto'; interface UserDataComplianceSearchTypePair { @@ -56,6 +69,7 @@ export class SupportService { private readonly bankTxService: BankTxService, private readonly payInService: PayInService, private readonly kycFileService: KycFileService, + private readonly kycService: KycService, private readonly bankDataService: BankDataService, private readonly bankTxReturnService: BankTxReturnService, private readonly transactionService: TransactionService, @@ -69,9 +83,91 @@ export class SupportService { const userData = await this.userDataService.getUserData(id, { wallet: true, bankDatas: true }); if (!userData) throw new NotFoundException(`User not found`); - const kycFiles = await this.kycFileService.getUserDataKycFiles(id); + // Load all related data in parallel + const [kycFiles, kycSteps, transactions, users, bankDatas, buyRoutes, sellRoutes] = await Promise.all([ + this.kycFileService.getUserDataKycFiles(id), + this.kycService.getStepsByUserData(id), + this.transactionService.getTransactionsByUserDataId(id), + this.userService.getAllUserDataUsers(id), + this.bankDataService.getBankDatasByUserData(id), + this.buyService.getUserDataBuys(id), + this.sellService.getSellsByUserDataId(id), + ]); - return { userData, kycFiles }; + return { + userData, + kycFiles, + kycSteps: kycSteps.map((s) => this.toKycStepSupportInfo(s)), + transactions: transactions.map((t) => this.toTransactionSupportInfo(t)), + users: users.map((u) => this.toUserSupportInfo(u)), + bankDatas: bankDatas.map((b) => this.toBankDataSupportInfo(b)), + buyRoutes: buyRoutes.map((b) => this.toBuySupportInfo(b)), + sellRoutes: sellRoutes.map((s) => this.toSellSupportInfo(s)), + }; + } + + // --- MAPPING METHODS --- // + + private toKycStepSupportInfo(step: KycStep): KycStepSupportInfo { + return { + id: step.id, + name: step.name, + type: step.type, + status: step.status, + sequenceNumber: step.sequenceNumber, + created: step.created, + }; + } + + private toTransactionSupportInfo(tx: Transaction): TransactionSupportInfo { + return { + id: tx.id, + uid: tx.uid, + type: tx.type, + sourceType: tx.sourceType, + amountInChf: tx.amountInChf, + amlCheck: tx.amlCheck, + created: tx.created, + }; + } + + private toUserSupportInfo(user: User): UserSupportInfo { + return { + id: user.id, + address: user.address, + role: user.role, + status: user.status, + created: user.created, + }; + } + + private toBankDataSupportInfo(bankData: BankData): BankDataSupportInfo { + return { + id: bankData.id, + iban: bankData.iban, + name: bankData.name, + approved: bankData.approved, + }; + } + + private toBuySupportInfo(buy: Buy): BuySupportInfo { + return { + id: buy.id, + bankUsage: buy.bankUsage, + assetName: buy.asset?.name, + blockchain: buy.asset?.blockchain, + volume: buy.volume, + active: buy.active, + }; + } + + private toSellSupportInfo(sell: Sell): SellSupportInfo { + return { + id: sell.id, + iban: sell.iban, + fiatName: sell.fiat?.name, + volume: sell.annualVolume, + }; } async searchUserDataByKey(query: UserDataSupportQuery): Promise { diff --git a/src/subdomains/generic/user/models/bank-data/bank-data.service.ts b/src/subdomains/generic/user/models/bank-data/bank-data.service.ts index 620a026f83..cc391c82e4 100644 --- a/src/subdomains/generic/user/models/bank-data/bank-data.service.ts +++ b/src/subdomains/generic/user/models/bank-data/bank-data.service.ts @@ -301,6 +301,12 @@ export class BankDataService { }); } + async getBankDatasByUserData(userDataId: number): Promise { + return this.bankDataRepo.find({ + where: { userData: { id: userDataId } }, + }); + } + async getVerifiedBankDataWithIban( iban: string, userDataId?: number, diff --git a/src/subdomains/supporting/payment/services/transaction.service.ts b/src/subdomains/supporting/payment/services/transaction.service.ts index 8b7ce181be..f4ff249ffe 100644 --- a/src/subdomains/supporting/payment/services/transaction.service.ts +++ b/src/subdomains/supporting/payment/services/transaction.service.ts @@ -121,6 +121,14 @@ export class TransactionService { return this.repo.findBy({ uid: IsNull(), created: LessThanOrEqual(filterDate) }); } + async getTransactionsByUserDataId(userDataId: number): Promise { + return this.repo.find({ + where: { userData: { id: userDataId } }, + order: { created: 'DESC' }, + take: 100, + }); + } + async getTransactionsForAccount(userDataId: number, from = new Date(0), to = new Date()): Promise { return this.repo.find({ where: { userData: { id: userDataId }, type: Not(IsNull()), created: Between(from, to) },