From 0420230ea86e35f31e38f48df3803a53ca3ad2b1 Mon Sep 17 00:00:00 2001 From: ChanukaUOJ Date: Mon, 9 Mar 2026 15:43:16 +0530 Subject: [PATCH 01/10] functionality and tests added to the data handler and read api service --- opengin/core-api/cmd/server/service.go | 84 +++- .../db/repository/postgres/data_handler.go | 59 +++ .../repository/postgres/data_handler_test.go | 425 ++++++++++++++++++ 3 files changed, 562 insertions(+), 6 deletions(-) diff --git a/opengin/core-api/cmd/server/service.go b/opengin/core-api/cmd/server/service.go index 0236da7e..1e0d7693 100644 --- a/opengin/core-api/cmd/server/service.go +++ b/opengin/core-api/cmd/server/service.go @@ -182,11 +182,21 @@ func (s *Server) ReadEntity(ctx context.Context, req *pb.ReadEntityRequest) (*pb // Use the EntityAttributeProcessor to read and process attributes processor := engine.NewEntityAttributeProcessor() - // Extract fields from the request attributes based on storage type - fields := extractFieldsFromAttributes(req.Entity.Attributes) + // Extract fields and record filters from the request attributes based on storage type + fields, recordFilters := extractFieldsFromAttributes(req.Entity.Attributes) log.Printf("Extracted fields from attributes: %v", fields) + log.Printf("Extracted record filters from attributes: %v", recordFilters) - readOptions := engine.NewReadOptions(make(map[string]interface{}), fields...) + // Convert record filters to the filters map for ReadOptions + filtersMap := make(map[string]interface{}) + if len(recordFilters) > 0 { + filtersMap["record_filters"] = recordFilters + } + + fmt.Printf("Filters map: %v\n", filtersMap) + fmt.Printf("Fields: %v\n", fields) + + readOptions := engine.NewReadOptions(filtersMap, fields...) // Process the entity with attributes to get the results map attributeResults := processor.ProcessEntityAttributes(ctx, req.Entity, "read", readOptions) @@ -399,8 +409,9 @@ func (s *Server) ReadEntities(ctx context.Context, req *pb.ReadEntityRequest) (* // extractFieldsFromAttributes extracts field names from entity attributes based on storage type // TODO: Limitation in multi-value attribute reads. // FIXME: https://github.com/LDFLK/nexoan/issues/285 -func extractFieldsFromAttributes(attributes map[string]*pb.TimeBasedValueList) []string { +func extractFieldsFromAttributes(attributes map[string]*pb.TimeBasedValueList) ([]string, []postgres.RecordFilter) { var fields []string + var records []postgres.RecordFilter for attrName, attrValueList := range attributes { if attrValueList == nil || len(attrValueList.Values) == 0 { @@ -428,6 +439,12 @@ func extractFieldsFromAttributes(attributes map[string]*pb.TimeBasedValueList) [ } else { log.Printf("Warning: could not extract columns from tabular attribute %s: %v", attrName, err) } + + if rows, err := extractRowsFromTabularAttributes(value.Value); err == nil { + records = append(records, rows...) + } else { + log.Printf("Warning: could not extract rows from tabular attribute %s: %v", attrName, err) + } case "graph": // TODO: Handle graph data fields log.Printf("Graph data fields extraction not implemented yet for attribute %s", attrName) @@ -438,8 +455,9 @@ func extractFieldsFromAttributes(attributes map[string]*pb.TimeBasedValueList) [ log.Printf("Unknown storage type %s for attribute %s", storageType, attrName) } } - - return fields + fmt.Printf("final fields: %v\n", fields) + fmt.Printf("final records: %v\n", records) + return fields, records } // determineStorageTypeFromValue determines the storage type from a protobuf Any value @@ -509,6 +527,60 @@ func extractColumnsFromTabularAttribute(anyValue *anypb.Any) ([]string, error) { return columns, nil } +// extractRowsFromTabularAttributes extracts row filters from a tabular attribute value +func extractRowsFromTabularAttributes(anyValue *anypb.Any) ([]postgres.RecordFilter, error) { + message, err := anyValue.UnmarshalNew() + if err != nil { + return nil, fmt.Errorf("failed to unmarshal Any value: %v", err) + } + + structValue, ok := message.(*structpb.Struct) + if !ok { + return nil, fmt.Errorf("expected struct value") + } + + rowsField, exists := structValue.Fields["rows"] + if !exists { + return nil, fmt.Errorf("no rows found") + } + + fmt.Printf("row fields : %v\n", rowsField) + + listValue := rowsField.GetListValue() + + fmt.Printf("list value : %v\n", listValue) + if listValue == nil { + return nil, fmt.Errorf("rows field is not a list") + } + + var filters []postgres.RecordFilter + for _, val := range listValue.Values { + datum := val.GetStructValue() + if datum == nil { + continue + } + + filter := postgres.RecordFilter{} + if fieldName, ok := datum.Fields["field_name"]; ok { + filter.FieldName = fieldName.GetStringValue() + } + + if operator, ok := datum.Fields["operator"]; ok { + filter.Operator = operator.GetStringValue() + } + + if value, ok := datum.Fields["value"]; ok { + filter.Value = value.GetStringValue() + } + + filters = append(filters, filter) + } + + fmt.Println("Rows extracted: ", filters) + + return filters, nil +} + // Start the gRPC server func main() { // Initialize MongoDB config diff --git a/opengin/core-api/db/repository/postgres/data_handler.go b/opengin/core-api/db/repository/postgres/data_handler.go index e9f2477b..da5ba72c 100644 --- a/opengin/core-api/db/repository/postgres/data_handler.go +++ b/opengin/core-api/db/repository/postgres/data_handler.go @@ -638,6 +638,13 @@ type TabularData struct { Rows [][]interface{} `json:"rows"` } +// RecordFilter represents a single row level filter +type RecordFilter struct { + FieldName string + Operator string + Value string +} + // GetData retrieves data from a table with optional field selection and filters, returns it as pb.Any with JSON-formatted tabular data. func (repo *PostgresRepository) GetData(ctx context.Context, tableName string, filters map[string]interface{}, fields ...string) (*anypb.Any, error) { log.Printf("DEBUG: GetData: tableName=%s, \t\nfilters=%v, \t\nfields=%v", tableName, filters, fields) @@ -668,6 +675,52 @@ func (repo *PostgresRepository) GetData(ctx context.Context, tableName string, f // Add filters to the query for key, value := range filters { + // Handle RecordFilter slices (operator-based row filtering) + if key == "record_filters" { + if recordFilters, ok := value.([]RecordFilter); ok { + for _, rf := range recordFilters { + sqlOp := "=" + switch rf.Operator { + case "eq": + log.Printf("DEBUG: [DataHandler.GetData] rf.Operator: %s", rf.Operator) + sqlOp = "=" + case "neq": + log.Printf("DEBUG: [DataHandler.GetData] rf.Operator: %s", rf.Operator) + sqlOp = "!=" + case "gt": + log.Printf("DEBUG: [DataHandler.GetData] rf.Operator: %s", rf.Operator) + sqlOp = ">" + case "lt": + log.Printf("DEBUG: [DataHandler.GetData] rf.Operator: %s", rf.Operator) + sqlOp = "<" + case "lte": + log.Printf("DEBUG: [DataHandler.GetData] rf.Operator: %s", rf.Operator) + sqlOp = "<=" + case "gte": + log.Printf("DEBUG: [DataHandler.GetData] rf.Operator: %s", rf.Operator) + sqlOp = ">=" + case "contains": + log.Printf("DEBUG: [DataHandler.GetData] rf.Operator: %s", rf.Operator) + sqlOp = "ILIKE" + case "notcontains": + log.Printf("DEBUG: [DataHandler.GetData] rf.Operator: %s", rf.Operator) + sqlOp = "NOT ILIKE" + } + + if rf.Operator == "contains" || rf.Operator == "notcontains" { + whereClauses = append(whereClauses, fmt.Sprintf("%s %s $%d", commons.SanitizeIdentifier(rf.FieldName), sqlOp, argCount)) + args = append(args, "%"+rf.Value+"%") + } else { + whereClauses = append(whereClauses, fmt.Sprintf("%s %s $%d", commons.SanitizeIdentifier(rf.FieldName), sqlOp, argCount)) + args = append(args, rf.Value) + } + argCount++ + } + } + continue + } + + // Default: simple equality filter whereClauses = append(whereClauses, fmt.Sprintf("%s = $%d", commons.SanitizeIdentifier(key), argCount)) args = append(args, value) argCount++ @@ -677,6 +730,9 @@ func (repo *PostgresRepository) GetData(ctx context.Context, tableName string, f query += " WHERE " + strings.Join(whereClauses, " AND ") } + fmt.Printf("DEBUG: [DataHandler.GetData] query: %s", query) + fmt.Printf("DEBUG: [DataHandler.GetData] args: %v", args) + // Execute the query rows, err := repo.DB().QueryContext(ctx, query, args...) if err != nil { @@ -723,9 +779,12 @@ func (repo *PostgresRepository) GetData(ctx context.Context, tableName string, f } var tabularRows [][]interface{} + fmt.Printf("DEBUG: [DataHandler.GetData] rows: %v", rows) for rows.Next() { rowValues := make([]interface{}, len(resultColumns)) rowPointers := make([]interface{}, len(resultColumns)) + fmt.Printf("DEBUG: [DataHandler.GetData] rowValues: %v", rowValues) + fmt.Printf("DEBUG: [DataHandler.GetData] rowPointers: %v", rowPointers) for i := range rowValues { rowPointers[i] = &rowValues[i] } diff --git a/opengin/core-api/db/repository/postgres/data_handler_test.go b/opengin/core-api/db/repository/postgres/data_handler_test.go index 5d0ea7f4..02a32c34 100644 --- a/opengin/core-api/db/repository/postgres/data_handler_test.go +++ b/opengin/core-api/db/repository/postgres/data_handler_test.go @@ -649,3 +649,428 @@ func TestGetDataTabularFormat(t *testing.T) { assert.Len(t, filteredRows, 1) assert.Equal(t, expectedRows[0], filteredRows[0].([]interface{})) } + +func TestGetDataWithRowFiltering(t *testing.T) { + repo := setupTestDB(t) + // Do not defer repo.Close() here - let cleanup handle it + + // Use unique table name for this test + tableName := fmt.Sprintf("test_data_table_%d", time.Now().UnixNano()) + + // Create a dummy table and insert data + _, err := repo.DB().Exec(fmt.Sprintf(` + CREATE TABLE %s ( + id SERIAL PRIMARY KEY, + col1 TEXT, + col2 INTEGER, + col3 FLOAT, + col4 BOOLEAN + ) + `, tableName)) + assert.NoError(t, err) + + _, err = repo.DB().Exec(fmt.Sprintf(` + INSERT INTO %s (col1, col2, col3, col4) VALUES + ('val1', 10, 10.5, true), + ('val2', 20, 20.5, false), + ('val3', 30, 30.5, true), + ('val4', 40, 40.5, false), + ('val5', 50, 50.5, true) + `, tableName)) + + assert.NoError(t, err) + + t.Run("Test 1: Record filtering equality (single)", func(t *testing.T) { + row_filter := map[string]interface{}{ + "record_filters": []RecordFilter{ + {FieldName: "col4", Operator: "eq", Value: "true"}, + }, + } + + fields := []string{"col1", "col2", "col4"} + anyData, err := repo.GetData(context.Background(), tableName, row_filter, fields...) + assert.NoError(t, err) + assert.NotNil(t, anyData) + + // Unmarshal the Any data to get the JSON string + var structValue structpb.Struct + err = anyData.UnmarshalTo(&structValue) + assert.NoError(t, err) + + jsonStr := structValue.Fields["data"].GetStringValue() + assert.NotEmpty(t, jsonStr) + + // Parse the JSON to verify the structure + var tabularData map[string]interface{} + err = json.Unmarshal([]byte(jsonStr), &tabularData) + assert.NoError(t, err) + assert.NotNil(t, tabularData) + + // Add safety checks for the map keys + firstColumnsInterface, hasFirstColumns := tabularData["columns"] + assert.True(t, hasFirstColumns, "columns key should exist") + assert.NotNil(t, firstColumnsInterface, "columns should not be nil") + + firstRowsInterface, hasFirstRows := tabularData["rows"] + assert.True(t, hasFirstRows, "rows key should exist") + assert.NotNil(t, firstRowsInterface, "rows should not be nil") + + columns := firstColumnsInterface.([]interface{}) + rows := firstRowsInterface.([]interface{}) + + assert.Equal(t, "col1", columns[0]) + assert.Equal(t, "col2", columns[1]) + assert.Equal(t, "col4", columns[2]) + assert.Len(t, rows, 3) + }) + + t.Run("Test 2: Record filtering lte (single)", func(t *testing.T) { + row_filter := map[string]interface{}{ + "record_filters": []RecordFilter{ + {FieldName: "col2", Operator: "lte", Value: "20"}, + }, + } + + anyData, err := repo.GetData(context.Background(), tableName, row_filter) + assert.NoError(t, err) + assert.NotNil(t, anyData) + + // Unmarshal the Any data to get the JSON string + var structValue structpb.Struct + err = anyData.UnmarshalTo(&structValue) + assert.NoError(t, err) + + jsonStr := structValue.Fields["data"].GetStringValue() + assert.NotEmpty(t, jsonStr) + + // Parse the JSON to verify the structure + var tabularData map[string]interface{} + err = json.Unmarshal([]byte(jsonStr), &tabularData) + assert.NoError(t, err) + assert.NotNil(t, tabularData) + + // Add safety checks for the map keys + firstColumnsInterface, hasFirstColumns := tabularData["columns"] + assert.True(t, hasFirstColumns, "columns key should exist") + assert.NotNil(t, firstColumnsInterface, "columns should not be nil") + + firstRowsInterface, hasFirstRows := tabularData["rows"] + assert.True(t, hasFirstRows, "rows key should exist") + assert.NotNil(t, firstRowsInterface, "rows should not be nil") + + columns := firstColumnsInterface.([]interface{}) + rows := firstRowsInterface.([]interface{}) + + // since the fields are not specified, all the fields should be returned + assert.Equal(t, "id", columns[0]) + assert.Equal(t, "col1", columns[1]) + assert.Equal(t, "col2", columns[2]) + assert.Equal(t, "col3", columns[3]) + assert.Equal(t, "col4", columns[4]) + assert.Len(t, rows, 2) + }) + + t.Run("Test 3: Record filtering gte (single)", func(t *testing.T) { + row_filter := map[string]interface{}{ + "record_filters": []RecordFilter{ + {FieldName: "col2", Operator: "gte", Value: "20"}, + }, + } + + anyData, err := repo.GetData(context.Background(), tableName, row_filter) + assert.NoError(t, err) + assert.NotNil(t, anyData) + + // Unmarshal the Any data to get the JSON string + var structValue structpb.Struct + err = anyData.UnmarshalTo(&structValue) + assert.NoError(t, err) + + jsonStr := structValue.Fields["data"].GetStringValue() + assert.NotEmpty(t, jsonStr) + + // Parse the JSON to verify the structure + var tabularData map[string]interface{} + err = json.Unmarshal([]byte(jsonStr), &tabularData) + assert.NoError(t, err) + assert.NotNil(t, tabularData) + + // Add safety checks for the map keys + firstColumnsInterface, hasFirstColumns := tabularData["columns"] + assert.True(t, hasFirstColumns, "columns key should exist") + assert.NotNil(t, firstColumnsInterface, "columns should not be nil") + + firstRowsInterface, hasFirstRows := tabularData["rows"] + assert.True(t, hasFirstRows, "rows key should exist") + assert.NotNil(t, firstRowsInterface, "rows should not be nil") + + columns := firstColumnsInterface.([]interface{}) + rows := firstRowsInterface.([]interface{}) + + assert.Equal(t, "id", columns[0]) + assert.Equal(t, "col1", columns[1]) + assert.Equal(t, "col2", columns[2]) + assert.Equal(t, "col3", columns[3]) + assert.Equal(t, "col4", columns[4]) + assert.Len(t, rows, 4) + }) + + t.Run("Test 4: Record filtering lt (single)", func(t *testing.T) { + row_filter := map[string]interface{}{ + "record_filters": []RecordFilter{ + {FieldName: "col2", Operator: "lt", Value: "20"}, + }, + } + + anyData, err := repo.GetData(context.Background(), tableName, row_filter) + assert.NoError(t, err) + assert.NotNil(t, anyData) + + // Unmarshal the Any data to get the JSON string + var structValue structpb.Struct + err = anyData.UnmarshalTo(&structValue) + assert.NoError(t, err) + + jsonStr := structValue.Fields["data"].GetStringValue() + assert.NotEmpty(t, jsonStr) + + // Parse the JSON to verify the structure + var tabularData map[string]interface{} + err = json.Unmarshal([]byte(jsonStr), &tabularData) + assert.NoError(t, err) + assert.NotNil(t, tabularData) + + // Add safety checks for the map keys + firstColumnsInterface, hasFirstColumns := tabularData["columns"] + assert.True(t, hasFirstColumns, "columns key should exist") + assert.NotNil(t, firstColumnsInterface, "columns should not be nil") + + firstRowsInterface, hasFirstRows := tabularData["rows"] + assert.True(t, hasFirstRows, "rows key should exist") + assert.NotNil(t, firstRowsInterface, "rows should not be nil") + + columns := firstColumnsInterface.([]interface{}) + rows := firstRowsInterface.([]interface{}) + + assert.Equal(t, "id", columns[0]) + assert.Equal(t, "col1", columns[1]) + assert.Equal(t, "col2", columns[2]) + assert.Equal(t, "col3", columns[3]) + assert.Equal(t, "col4", columns[4]) + assert.Len(t, rows, 1) + }) + + t.Run("Test 5: Record filtering gt (single)", func(t *testing.T) { + row_filter := map[string]interface{}{ + "record_filters": []RecordFilter{ + {FieldName: "col3", Operator: "gt", Value: "20.5"}, + }, + } + + anyData, err := repo.GetData(context.Background(), tableName, row_filter) + assert.NoError(t, err) + assert.NotNil(t, anyData) + + // Unmarshal the Any data to get the JSON string + var structValue structpb.Struct + err = anyData.UnmarshalTo(&structValue) + assert.NoError(t, err) + + jsonStr := structValue.Fields["data"].GetStringValue() + assert.NotEmpty(t, jsonStr) + + // Parse the JSON to verify the structure + var tabularData map[string]interface{} + err = json.Unmarshal([]byte(jsonStr), &tabularData) + assert.NoError(t, err) + assert.NotNil(t, tabularData) + + // Add safety checks for the map keys + firstColumnsInterface, hasFirstColumns := tabularData["columns"] + assert.True(t, hasFirstColumns, "columns key should exist") + assert.NotNil(t, firstColumnsInterface, "columns should not be nil") + + firstRowsInterface, hasFirstRows := tabularData["rows"] + assert.True(t, hasFirstRows, "rows key should exist") + assert.NotNil(t, firstRowsInterface, "rows should not be nil") + + columns := firstColumnsInterface.([]interface{}) + rows := firstRowsInterface.([]interface{}) + + assert.Equal(t, "id", columns[0]) + assert.Equal(t, "col1", columns[1]) + assert.Equal(t, "col2", columns[2]) + assert.Equal(t, "col3", columns[3]) + assert.Equal(t, "col4", columns[4]) + assert.Len(t, rows, 3) + }) + + t.Run("Test 6: Record filtering neq (single)", func(t *testing.T) { + row_filter := map[string]interface{}{ + "record_filters": []RecordFilter{ + {FieldName: "col1", Operator: "neq", Value: "val5"}, + }, + } + + anyData, err := repo.GetData(context.Background(), tableName, row_filter) + assert.NoError(t, err) + assert.NotNil(t, anyData) + + // Unmarshal the Any data to get the JSON string + var structValue structpb.Struct + err = anyData.UnmarshalTo(&structValue) + assert.NoError(t, err) + + jsonStr := structValue.Fields["data"].GetStringValue() + assert.NotEmpty(t, jsonStr) + + // Parse the JSON to verify the structure + var tabularData map[string]interface{} + err = json.Unmarshal([]byte(jsonStr), &tabularData) + assert.NoError(t, err) + assert.NotNil(t, tabularData) + + // Add safety checks for the map keys + firstColumnsInterface, hasFirstColumns := tabularData["columns"] + assert.True(t, hasFirstColumns, "columns key should exist") + assert.NotNil(t, firstColumnsInterface, "columns should not be nil") + + firstRowsInterface, hasFirstRows := tabularData["rows"] + assert.True(t, hasFirstRows, "rows key should exist") + assert.NotNil(t, firstRowsInterface, "rows should not be nil") + + columns := firstColumnsInterface.([]interface{}) + rows := firstRowsInterface.([]interface{}) + + assert.Equal(t, "id", columns[0]) + assert.Equal(t, "col1", columns[1]) + assert.Equal(t, "col2", columns[2]) + assert.Equal(t, "col3", columns[3]) + assert.Equal(t, "col4", columns[4]) + assert.Len(t, rows, 4) + }) + + t.Run("Test 7: Record filtering contains (single)", func(t *testing.T) { + row_filter := map[string]interface{}{ + "record_filters": []RecordFilter{ + {FieldName: "col1", Operator: "contains", Value: "val"}, + }, + } + fields := []string{"col4"} + anyData, err := repo.GetData(context.Background(), tableName, row_filter, fields...) + assert.NoError(t, err) + assert.NotNil(t, anyData) + + // Unmarshal the Any data to get the JSON string + var structValue structpb.Struct + err = anyData.UnmarshalTo(&structValue) + assert.NoError(t, err) + + jsonStr := structValue.Fields["data"].GetStringValue() + assert.NotEmpty(t, jsonStr) + + // Parse the JSON to verify the structure + var tabularData map[string]interface{} + err = json.Unmarshal([]byte(jsonStr), &tabularData) + assert.NoError(t, err) + assert.NotNil(t, tabularData) + + // Add safety checks for the map keys + firstColumnsInterface, hasFirstColumns := tabularData["columns"] + assert.True(t, hasFirstColumns, "columns key should exist") + assert.NotNil(t, firstColumnsInterface, "columns should not be nil") + + firstRowsInterface, hasFirstRows := tabularData["rows"] + assert.True(t, hasFirstRows, "rows key should exist") + assert.NotNil(t, firstRowsInterface, "rows should not be nil") + + columns := firstColumnsInterface.([]interface{}) + rows := firstRowsInterface.([]interface{}) + + assert.Equal(t, "col4", columns[0]) + assert.Len(t, rows, 5) + }) + + t.Run("Test 8: Record filtering not contains (single)", func(t *testing.T) { + row_filter := map[string]interface{}{ + "record_filters": []RecordFilter{ + {FieldName: "col1", Operator: "notcontains", Value: "val"}, + }, + } + fields := []string{"col4"} + anyData, err := repo.GetData(context.Background(), tableName, row_filter, fields...) + assert.NoError(t, err) + assert.NotNil(t, anyData) + + // Unmarshal the Any data to get the JSON string + var structValue structpb.Struct + err = anyData.UnmarshalTo(&structValue) + assert.NoError(t, err) + + jsonStr := structValue.Fields["data"].GetStringValue() + assert.NotEmpty(t, jsonStr) + + // Parse the JSON to verify the structure + var tabularData map[string]interface{} + err = json.Unmarshal([]byte(jsonStr), &tabularData) + assert.NoError(t, err) + assert.NotNil(t, tabularData) + + // Add safety checks for the map keys + firstColumnsInterface, hasFirstColumns := tabularData["columns"] + assert.True(t, hasFirstColumns, "columns key should exist") + assert.NotNil(t, firstColumnsInterface, "columns should not be nil") + + firstRowsInterface, hasFirstRows := tabularData["rows"] + assert.True(t, hasFirstRows, "rows key should exist") + assert.Nil(t, firstRowsInterface, "rows should not be nil") + + columns := firstColumnsInterface.([]interface{}) + assert.Equal(t, "col4", columns[0]) + }) + + t.Run("Test 9: Record filtering eq/contains (multiple)", func(t *testing.T) { + row_filter := map[string]interface{}{ + "record_filters": []RecordFilter{ + {FieldName: "col2", Operator: "eq", Value: "20"}, + {FieldName: "col1", Operator: "contains", Value: "val"}, + {FieldName: "col4", Operator: "neq", Value: "true"}, + }, + } + fields := []string{"col4"} + anyData, err := repo.GetData(context.Background(), tableName, row_filter, fields...) + assert.NoError(t, err) + assert.NotNil(t, anyData) + + // Unmarshal the Any data to get the JSON string + var structValue structpb.Struct + err = anyData.UnmarshalTo(&structValue) + assert.NoError(t, err) + + jsonStr := structValue.Fields["data"].GetStringValue() + assert.NotEmpty(t, jsonStr) + + // Parse the JSON to verify the structure + var tabularData map[string]interface{} + err = json.Unmarshal([]byte(jsonStr), &tabularData) + assert.NoError(t, err) + assert.NotNil(t, tabularData) + + // Add safety checks for the map keys + firstColumnsInterface, hasFirstColumns := tabularData["columns"] + assert.True(t, hasFirstColumns, "columns key should exist") + assert.NotNil(t, firstColumnsInterface, "columns should not be nil") + + firstRowsInterface, hasFirstRows := tabularData["rows"] + assert.True(t, hasFirstRows, "rows key should exist") + assert.NotNil(t, firstRowsInterface, "rows should not be nil") + + columns := firstColumnsInterface.([]interface{}) + rows := firstRowsInterface.([]interface{}) + + assert.Equal(t, "col4", columns[0]) + assert.Len(t, rows, 1) + + }) +} + From ea95d8bde2135aa9faa5028a664a7c63014fd8e2 Mon Sep 17 00:00:00 2001 From: ChanukaUOJ Date: Mon, 9 Mar 2026 15:56:21 +0530 Subject: [PATCH 02/10] read api service files --- opengin/contracts/rest/read_api.yaml | 36 +- opengin/read-api/read_api_service.bal | 82 ++-- .../read-api/tests/read_api_service_test.bal | 442 +++++++++++++++--- opengin/read-api/types.bal | 22 +- 4 files changed, 462 insertions(+), 120 deletions(-) diff --git a/opengin/contracts/rest/read_api.yaml b/opengin/contracts/rest/read_api.yaml index efd7165f..b8787144 100644 --- a/opengin/contracts/rest/read_api.yaml +++ b/opengin/contracts/rest/read_api.yaml @@ -31,8 +31,7 @@ paths: /entities/search: post: summary: Get entities based on id or filter criteria - description: - This endpoint allows you to search for entities using either a specific `id` or a set of filter criteria. + description: This endpoint allows you to search for entities using either a specific `id` or a set of filter criteria. requestBody: required: true @@ -125,7 +124,7 @@ paths: Structure may vary based on entity type. /entities/{entityId}/attributes/{attributeName}: - get: + post: summary: Get entity attribute parameters: - name: entityId @@ -159,6 +158,36 @@ paths: items: type: string default: ["*"] + requestBody: + required: false + content: + application/json: + schema: + type: object + properties: + records: + type: array + items: + type: object + properties: + field_name: + type: string + operator: + type: string + description: "The comparison operator to use." + enum: + - "eq" + - "neq" + - "gt" + - "lt" + - "gte" + - "lte" + - "contains" + - "notcontains" + default: "eq" + value: + type: string + description: "List of record filters to apply row base filtering" responses: 200: description: Attribute value(s) @@ -273,4 +302,3 @@ paths: format: date-time direction: type: string - \ No newline at end of file diff --git a/opengin/read-api/read_api_service.bal b/opengin/read-api/read_api_service.bal index 9aebefd5..e1875374 100644 --- a/opengin/read-api/read_api_service.bal +++ b/opengin/read-api/read_api_service.bal @@ -4,11 +4,11 @@ // AUTO-GENERATED FILE. // This file is auto-generated by the Ballerina OpenAPI tool. -import ballerina/http; -import ballerina/protobuf.types.'any; import ballerina/grpc; -import ballerina/lang.'int as langint; +import ballerina/http; import ballerina/io; +import ballerina/lang.'int as langint; +import ballerina/protobuf.types.'any; import ballerina/protobuf.types.'any as pbAny; // BAL_CONFIG_VAR_CORESERVICEURL @@ -36,15 +36,15 @@ COREServiceClient ep = check new (coreServiceUrl, grpcConfig); // Helper function to extract string representation based on typeUrl function extractValueAsString('any:Any anyValue) returns string { string typeUrl = anyValue.typeUrl; - + // Default to using the value's string representation string valueStr = anyValue.toString(); - + // Check if it's a known primitive type and handle accordingly if (typeUrl.endsWith("string")) { return anyValue.value.toString(); - } else if (typeUrl.endsWith("int32") || typeUrl.endsWith("int64") || - typeUrl.endsWith("float") || typeUrl.endsWith("double")) { + } else if (typeUrl.endsWith("int32") || typeUrl.endsWith("int64") || + typeUrl.endsWith("float") || typeUrl.endsWith("double")) { return anyValue.value.toString(); } else if (typeUrl.endsWith("bool")) { return anyValue.value.toString(); @@ -135,10 +135,12 @@ function makeFilter(json data) returns pbAny:Any|error { service /v1 on ep0 { # Get entity attribute # - # + return - Attribute value(s) - resource function get entities/[string entityId]/attributes/[string attributeName](string? startTime, string? endTime, string[]? fields) returns record {string 'start?; string end?; string value?;}|record {string 'start?; string end?; string value?;}[]|http:NotFound|error { + # + fields - List of field names to return. Defaults to ['*'] (all fields). + # + return - Attribute value(s) + resource function post entities/[string entityId]/attributes/[string attributeName](string? startTime, string? endTime, @http:Payload attributes_attributeName_body payload, string[]? fields) returns RecordStringStartStringendStringvalueRecordStringStartStringendStringvalueArrayOk|http:NotFound|error { // Set default fields value to ["*"] if not provided or if empty array string[] fieldsToUse = (fields == () || fields.length() == 0) ? [] : fields; + json recordsToUse = (payload.records ?: []).toJson(); // TODO: Pass fieldsToUse to the backend for field filtering // This requires updating the ReadEntityRequest protobuf definition @@ -147,7 +149,7 @@ service /v1 on ep0 { json attributeValueFilter = { "columns": fieldsToUse, - "rows": [[]] + "rows": recordsToUse }; pbAny:Any attributeValueFilterAny = check makeFilter(attributeValueFilter); @@ -186,25 +188,25 @@ service /v1 on ep0 { ], relationships: [] }; - + // Create ReadEntityRequest with output field set to attributes only ReadEntityRequest request = { entity: entityFilter, - output: ["attributes"] // Only request attributes field + output: ["attributes"] // Only request attributes field }; - + // Read the entity using the core service Entity entity = check ep->ReadEntity(request); - + // Process the filtered entity foreach var attrEntry in entity.attributes { if attrEntry.key == attributeName { TimeBasedValueList tbvList = attrEntry.value; - + // If we have time parameters, filter the results if tbvList.values.length() == 0 { // Return empty array instead of null - return []; + return {body: []}; } else if (tbvList.values.length() == 1) { // Return single value TimeBasedValue tbv = tbvList.values[0]; @@ -213,7 +215,7 @@ service /v1 on ep0 { end: tbv.endTime, value: extractValueAsString(tbv.value) }; - return response; + return {body: response}; } else { // Return multiple values record {string 'start?; string end?; string value?;}[] response = []; @@ -224,13 +226,13 @@ service /v1 on ep0 { value: extractValueAsString(tbv.value) }); } - return response; + return {body: response}; } } } - + // Return 404 with a message instead of null - return { + return { body: { "error": "Attribute not found", "attribute": attributeName, @@ -268,20 +270,20 @@ service /v1 on ep0 { // Create ReadEntityRequest with output field set to metadata only ReadEntityRequest request = { entity: entityFilter, - output: ["metadata"] // Only request metadata field + output: ["metadata"] // Only request metadata field }; - + // Read the entity using the core service Entity entity = check ep->ReadEntity(request); - + // Convert metadata to generic JSON object map metadataJson = {}; - + // Add all metadata fields foreach var entry in entity.metadata { metadataJson[entry.key] = extractValueAsString(entry.value); } - + return metadataJson; } @@ -300,16 +302,16 @@ service /v1 on ep0 { // Validate that startTime/endTime and activeAt are not used together boolean hasTimeRange = (payload.startTime is string && payload.startTime != "") || (payload.endTime is string && payload.endTime != ""); boolean hasActiveAt = payload.activeAt is string && payload.activeAt != ""; - + if (hasTimeRange && hasActiveAt) { - return { + return { body: { "error": "Invalid request parameters", "details": "Cannot use both time range (startTime/endTime) and activeAt parameters together. Use either time range or activeAt, but not both." } }; } - + // Create entity filter with embedded relationship criteria Entity entityFilter = { id: entityId, @@ -328,7 +330,7 @@ service /v1 on ep0 { attributes: [], relationships: [ { - key: payload.id ?: "", // Using id as the key + key: payload.id ?: "", // Using id as the key value: { relatedEntityId: payload.relatedEntityId ?: "", startTime: payload.startTime ?: "", @@ -340,23 +342,23 @@ service /v1 on ep0 { } ] }; - + // Create ReadEntityRequest with output field set to relationships only ReadEntityRequest request = { entity: entityFilter, - output: ["relationships"], // Only request relationships field + output: ["relationships"], // Only request relationships field activeAt: payload.activeAt ?: "" }; - + // Read the entity using the core service Entity entity = check ep->ReadEntity(request); - + // Process the relationships returned by the backend record {string id?; string relatedEntityId?; string name?; string startTime?; string endTime?; string direction?;}[] relationships = []; - + foreach var relEntry in entity.relationships { Relationship rel = relEntry.value; - + // Add to our result list relationships.push({ id: rel.id, @@ -367,7 +369,7 @@ service /v1 on ep0 { direction: rel.direction }); } - + return {body: relationships}; } @@ -377,7 +379,7 @@ service /v1 on ep0 { resource function post entities/search(@http:Payload entities_search_body payload) returns InlineResponse2001Ok|http:BadRequest|error { // Check if we have either ID or Kind.Major if payload.id == "" && (payload.kind is () || (payload.kind)?.major is () || (payload.kind)?.major == "") { - return { + return { body: { "error": "Invalid search criteria", "details": "Either id or Kind.Major is required for filtering entities" @@ -421,8 +423,8 @@ service /v1 on ep0 { EntityList|error entityList = ep->ReadEntities(request); if entityList is error { io:println(string `Error reading entities: ${entityList.message()}`); - - return { + + return { body: { "error": "Invalid search criteria", "details": entityList.message() @@ -441,7 +443,7 @@ service /v1 on ep0 { terminated: entity.terminated }); } - return { + return { body: { body: response } diff --git a/opengin/read-api/tests/read_api_service_test.bal b/opengin/read-api/tests/read_api_service_test.bal index e82f76ca..3ed3ad0d 100644 --- a/opengin/read-api/tests/read_api_service_test.bal +++ b/opengin/read-api/tests/read_api_service_test.bal @@ -2,9 +2,9 @@ // SPDX-License-Identifier: Apache-2.0 import ballerina/io; -import ballerina/test; -import ballerina/protobuf.types.'any as pbAny; import ballerina/os; +import ballerina/protobuf.types.'any as pbAny; +import ballerina/test; // Before Suite Function @test:BeforeSuite @@ -18,18 +18,18 @@ type JsonObject map; function getCoreServiceUrl() returns string|error { io:println("Getting CORE service URL"); string coreServiceUrl = os:getEnv("CORE_SERVICE_URL"); - + io:println("CORE_SERVICE_URL: " + coreServiceUrl); - + if coreServiceUrl == "" { return error("CORE_SERVICE_URL environment variable is not set"); } - + // Validate URL format if !coreServiceUrl.startsWith("http://") && !coreServiceUrl.startsWith("https://") { return error("CORE_SERVICE_URL must be a valid HTTP/HTTPS URL, got: " + coreServiceUrl); } - + io:println("Connecting to CORE service at: " + coreServiceUrl); return coreServiceUrl; } @@ -72,7 +72,7 @@ function convertJsonToAny(json data) returns pbAny:Any|error { // First, convert any decimal values to float for protobuf compatibility // FIXME: https://github.com/LDFLK/nexoan/issues/287 json convertedData = convertDecimalToFloat(data); - + if convertedData is int { // For integer values map structMap = { @@ -137,7 +137,7 @@ function testEntityAttributeRetrieval() returns error? { return coreUrl; } COREServiceClient ep = check new (coreUrl); - + // Test data setup string testId = "ABC Pvt Ltd"; string attributeName = "employee_data"; @@ -153,7 +153,7 @@ function testEntityAttributeRetrieval() returns error? { }; pbAny:Any attributeValueAny = check convertJsonToAny(attributeValue); - + Entity createEntityRequest = { id: testId, kind: { @@ -189,18 +189,18 @@ function testEntityAttributeRetrieval() returns error? { ], relationships: [] }; - + // Create entity Entity createResponse = check ep->CreateEntity(createEntityRequest); io:println("Created entity with ID: " + createResponse.id); json attributeValueFilter = { "columns": ["emp_id", "name", "salary"], - "rows": [[]] + "rows": [] }; pbAny:Any attributeValueFilterAny = check convertJsonToAny(attributeValueFilter); - + // Now read the entity with the specific attribute filter ReadEntityRequest readEntityRequest = { entity: { @@ -232,9 +232,9 @@ function testEntityAttributeRetrieval() returns error? { }, output: ["attributes"] }; - + Entity readResponse = check ep->ReadEntity(readEntityRequest); - + test:assertEquals(readResponse.attributes.length(), 1, "Should return exactly one attribute"); test:assertEquals(readResponse.attributes[0].key, attributeName, "Attribute key should match"); @@ -262,12 +262,322 @@ function testEntityAttributeRetrieval() returns error? { io:println("Data JSON: " + actualValueJson.toString()); verifyTabularData(actualValueJson, expectedValueJson); - + + // Clean up + EntityId deleteRequest = {id: testId}; + Empty _ = check ep->DeleteEntity(deleteRequest); + io:println("Test entity deleted"); + + return; +} + +// Test entity attribute retrieval with single column filter +@test:Config { + groups: ["entity", "attribute"], + enable: true +} +function testEntityAttributeRetrievalWithSingleColumnFilter() returns error? { + // TODO: Implement this test once the Data handling layer is written + // Initialize the client + io:println("[read_api_service_test.bal][testEntityAttributeRetrievalWithSingleColumnFilter]"); + string|error coreUrl = getCoreServiceUrl(); + if coreUrl is error { + return coreUrl; + } + COREServiceClient ep = check new (coreUrl); + + // Test data setup + string testId = "ABCD Pvt Ltd"; + string attributeName = "employee_data"; + json attributeValue = { + "columns": ["emp_id", "name", "salary", "join_date", "is_active"], + "rows": [ + [1001, "John Doe", 75000.50, "2024-01-15T09:00:00Z", true], + [1002, "Jane Smith", 82000.75, "2024-02-01T09:00:00Z", true], + [1003, "Bob Wilson", 65000.25, "2024-03-01T09:00:00Z", false], + [1004, "Alice Brown", 70000.25, "2024-04-01T09:00:00Z", true], + [1005, "Charlie Davis", 80000, "2024-05-01T09:00:00Z", true] + ] + }; + + pbAny:Any attributeValueAny = check convertJsonToAny(attributeValue); + + Entity createEntityRequest = { + id: testId, + kind: { + major: "Organization", + minor: "Private Limited" + }, + created: "2025-02-01T00:00:00Z", + terminated: "", + name: { + startTime: "2025-02-01T00:00:00Z", + endTime: "", + value: check pbAny:pack("ABC Pvt Ltd") + }, + metadata: [ + { + key: "abc_pvt_ltd_metadata", + value: check pbAny:pack("tabular_abc_pvt_ltd_test_value") + } + ], + attributes: [ + { + key: "employee_data", + value: { + values: [ + { + startTime: "2025-04-01T00:00:00Z", + endTime: "", + value: attributeValueAny + } + ] + } + } + ], + relationships: [] + }; + + // Create entity + Entity createResponse = check ep->CreateEntity(createEntityRequest); + io:println("Created entity with ID: " + createResponse.id); + + json attributeValueFilter = { + "columns": ["emp_id", "name", "salary"], + "rows": [ + { + "field_name": "emp_id", + "operator": "eq", + "value": "1005" + } + ] + }; + + pbAny:Any attributeValueFilterAny = check convertJsonToAny(attributeValueFilter); + + // Now read the entity with the specific attribute filter + ReadEntityRequest readEntityRequest = { + entity: { + id: testId, + kind: {}, + created: "", + terminated: "", + name: { + startTime: "", + endTime: "", + value: check pbAny:pack("") + }, + metadata: [], + attributes: [ + { + key: "employee_data", + value: { + values: [ + { + startTime: "", + endTime: "", + value: attributeValueFilterAny + } + ] + } + } + ], + relationships: [] + }, + output: ["attributes"] + }; + + Entity readResponse = check ep->ReadEntity(readEntityRequest); + + test:assertEquals(readResponse.attributes.length(), 1, "Should return exactly one attribute"); + test:assertEquals(readResponse.attributes[0].key, attributeName, "Attribute key should match"); + + var retrievedAttributeValue = readResponse.attributes[0].value.values[0].value; + JsonObject attributeValueJson = check pbAny:unpack(retrievedAttributeValue); + io:println("Retrieved attribute value JSON: " + attributeValueJson.toString()); + + json expectedValueJson = { + "columns": ["emp_id", "name", "salary"], + "rows": [ + [1005, "Charlie Davis", 80000] + ] + }; + + // Extract the nested data field + string dataJsonString = attributeValueJson["data"]; + io:println("Data JSON string: " + dataJsonString); + + // Parse the nested JSON string to get the actual tabular data + json actualValueJson = check dataJsonString.fromJsonString(); + io:println("Data JSON: " + actualValueJson.toString()); + + verifyTabularData(actualValueJson, expectedValueJson); + + // Clean up + EntityId deleteRequest = {id: testId}; + Empty _ = check ep->DeleteEntity(deleteRequest); + io:println("Test entity deleted"); + + return; +} + +// Test entity attribute retrieval with multiple column filter +@test:Config { + groups: ["entity", "attribute"], + enable: true +} +function testEntityAttributeRetrievalWithMultipleColumnFilter() returns error? { + // TODO: Implement this test once the Data handling layer is written + // Initialize the client + io:println("[read_api_service_test.bal][testEntityAttributeRetrievalWithMultipleColumnFilter]"); + string|error coreUrl = getCoreServiceUrl(); + if coreUrl is error { + return coreUrl; + } + COREServiceClient ep = check new (coreUrl); + + // Test data setup + string testId = "ABCDE Pvt Ltd"; + string attributeName = "employee_data"; + json attributeValue = { + "columns": ["emp_id", "name", "salary", "join_date", "is_active"], + "rows": [ + [1001, "John Doe", 75000.50, "2024-01-15T09:00:00Z", true], + [1002, "Jane Smith", 82000.75, "2024-02-01T09:00:00Z", true], + [1003, "Bob Wilson", 65000.25, "2024-03-01T09:00:00Z", false], + [1004, "Alice Brown", 70000.25, "2024-04-01T09:00:00Z", true], + [1005, "Charlie Davis", 80000, "2024-05-01T09:00:00Z", true] + ] + }; + + pbAny:Any attributeValueAny = check convertJsonToAny(attributeValue); + + Entity createEntityRequest = { + id: testId, + kind: { + major: "Organization", + minor: "Private Limited" + }, + created: "2025-02-01T00:00:00Z", + terminated: "", + name: { + startTime: "2025-02-01T00:00:00Z", + endTime: "", + value: check pbAny:pack("ABC Pvt Ltd") + }, + metadata: [ + { + key: "abc_pvt_ltd_metadata", + value: check pbAny:pack("tabular_abc_pvt_ltd_test_value") + } + ], + attributes: [ + { + key: "employee_data", + value: { + values: [ + { + startTime: "2025-04-01T00:00:00Z", + endTime: "", + value: attributeValueAny + } + ] + } + } + ], + relationships: [] + }; + + // Create entity + Entity createResponse = check ep->CreateEntity(createEntityRequest); + io:println("Created entity with ID: " + createResponse.id); + + json attributeValueFilter = { + "columns": ["emp_id", "name", "salary", "join_date", "is_active"], + "rows": [ + { + "field_name": "emp_id", + "operator": "gte", + "value": "1002" + }, + { + "field_name": "salary", + "operator": "lte", + "value": "75000" + }, + { + "field_name": "is_active", + "operator": "neq", + "value": "true" + } + ] + }; + + pbAny:Any attributeValueFilterAny = check convertJsonToAny(attributeValueFilter); + + // Now read the entity with the specific attribute filter + ReadEntityRequest readEntityRequest = { + entity: { + id: testId, + kind: {}, + created: "", + terminated: "", + name: { + startTime: "", + endTime: "", + value: check pbAny:pack("") + }, + metadata: [], + attributes: [ + { + key: "employee_data", + value: { + values: [ + { + startTime: "", + endTime: "", + value: attributeValueFilterAny + } + ] + } + } + ], + relationships: [] + }, + output: ["attributes"] + }; + + Entity readResponse = check ep->ReadEntity(readEntityRequest); + + test:assertEquals(readResponse.attributes.length(), 1, "Should return exactly one attribute"); + test:assertEquals(readResponse.attributes[0].key, attributeName, "Attribute key should match"); + + var retrievedAttributeValue = readResponse.attributes[0].value.values[0].value; + JsonObject attributeValueJson = check pbAny:unpack(retrievedAttributeValue); + io:println("Retrieved attribute value JSON: " + attributeValueJson.toString()); + + json expectedValueJson = { + "columns": ["emp_id", "name", "salary", "join_date", "is_active"], + "rows": [ + [1003, "Bob Wilson", 65000.25, "2024-03-01T09:00:00Z", false] + ] + }; + + // Extract the nested data field + string dataJsonString = attributeValueJson["data"]; + io:println("Data JSON string: " + dataJsonString); + + // Parse the nested JSON string to get the actual tabular data + json actualValueJson = check dataJsonString.fromJsonString(); + io:println("Data JSON: " + actualValueJson.toString()); + + verifyTabularData(actualValueJson, expectedValueJson); + // Clean up EntityId deleteRequest = {id: testId}; Empty _ = check ep->DeleteEntity(deleteRequest); io:println("Test entity deleted"); - + return; } @@ -276,19 +586,19 @@ function testEntityAttributeRetrieval() returns error? { function testEntityMetadataRetrieval() returns error? { // Test disabled due to gRPC connectivity issues // To enable, ensure the CORE service is running and all entity fields are properly populated - + // Initialize the client string|error coreUrl = getCoreServiceUrl(); if coreUrl is error { return coreUrl; } COREServiceClient ep = check new (coreUrl); - + // Test data setup string testId = "test-entity-metadata"; - + // Create the metadata array - record {| string key; pbAny:Any value; |}[] metadataArray = []; + record {|string key; pbAny:Any value;|}[] metadataArray = []; pbAny:Any packedValue1 = check pbAny:pack("Example Corp"); pbAny:Any packedValue2 = check pbAny:pack("Sensor X1"); metadataArray.push({key: "manufacturer", value: packedValue1}); @@ -323,7 +633,7 @@ function testEntityMetadataRetrieval() returns error? { Entity createEntityResponse = check ep->CreateEntity(createEntityRequest); io:println("Debug - Create entity response:"); io:println(createEntityResponse.toString()); - + // Read entity with metadata filter Entity metadataFilter = { id: testId, @@ -338,33 +648,33 @@ function testEntityMetadataRetrieval() returns error? { endTime: "", value: check pbAny:pack("") }, - metadata: [], // Empty metadata array to indicate we want metadata + metadata: [], // Empty metadata array to indicate we want metadata relationships: [], attributes: [] }; - + ReadEntityRequest readRequest = { entity: metadataFilter, output: ["metadata"] }; - + io:println("Debug - Read request details:"); io:println(" id: " + readRequest.entity.id); io:println(" output field length: " + readRequest.output.length().toString()); io:println(" output contents: " + readRequest.output.toString()); - + io:println("Debug - Read request:"); io:println(readRequest.toString()); - + Entity|error readResponse = ep->ReadEntity(readRequest); - + if readResponse is error { io:println("[DEBUG] gRPC error: " + readResponse.toString()); return; } - + io:println("Received read response: " + readResponse.toString()); - + // Verify metadata values map actualValues = {}; foreach var item in readResponse.metadata { @@ -375,16 +685,16 @@ function testEntityMetadataRetrieval() returns error? { test:assertFail("Failed to unpack metadata value for key: " + item.key); } } - + // Assert the values match test:assertEquals(actualValues["manufacturer"], "Example Corp", "Metadata value for manufacturer doesn't match"); test:assertEquals(actualValues["model"], "Sensor X1", "Metadata value for model doesn't match"); - + // Clean up EntityId deleteEntityRequest = {id: testId}; Empty _ = check ep->DeleteEntity(deleteEntityRequest); io:println("Test entity deleted"); - + return; } @@ -436,7 +746,6 @@ function testEntityRelationshipsRetrieval() returns error? { test:assertEquals(respAll.relationships.length(), 3, "Should return all relationships"); io:println("[OUTPUT] Retrieving all relationships: " + respAll.toString()); - // 2. Filter by name Entity relFilterName = { id: entityId, @@ -507,7 +816,6 @@ function testEntityRelationshipsRetrieval() returns error? { test:assertEquals(rel.value.name, "linked", "name should match"); io:println("[OUTPUT] Retrieving relationships by activeAt and name: " + respMulti.toString()); - // // 7. Filter by non-existent name Entity relFilterNone = {id: entityId, name: {value: check pbAny:pack("")}, relationships: [{key: "", value: {name: "nonexistent"}}]}; ReadEntityRequest reqNone = {entity: relFilterNone, output: ["relationships"]}; @@ -532,24 +840,24 @@ function testEntityRelationshipsRetrieval() returns error? { function testEntitySearch() returns error? { // Test disabled due to gRPC connectivity issues // To enable, ensure the CORE service is running and all entity fields are properly populated - + // Initialize clients string|error coreUrl = getCoreServiceUrl(); if coreUrl is error { return coreUrl; } COREServiceClient coreClient = check new (coreUrl); - + // Create several test entities with different attributes string[] testIds = []; - + // First entity string entity1Id = "test-search-entity-1"; testIds.push(entity1Id); - - record {| string key; pbAny:Any value; |}[] metadata1 = []; + + record {|string key; pbAny:Any value;|}[] metadata1 = []; metadata1.push({key: "manufacturer", value: check pbAny:pack("Example Corp")}); - + Entity entity1 = { id: entity1Id, kind: { @@ -567,17 +875,17 @@ function testEntitySearch() returns error? { relationships: [], attributes: [] }; - + Entity createResponse1 = check coreClient->CreateEntity(entity1); io:println("Created search test entity 1: " + createResponse1.id); - + // Second entity string entity2Id = "test-search-entity-2"; testIds.push(entity2Id); - - record {| string key; pbAny:Any value; |}[] metadata2 = []; + + record {|string key; pbAny:Any value;|}[] metadata2 = []; metadata2.push({key: "manufacturer", value: check pbAny:pack("Other Corp")}); - + Entity entity2 = { id: entity2Id, kind: { @@ -595,17 +903,17 @@ function testEntitySearch() returns error? { relationships: [], attributes: [] }; - + Entity createResponse2 = check coreClient->CreateEntity(entity2); io:println("Created search test entity 2: " + createResponse2.id); - + // Third entity string entity3Id = "test-search-entity-3"; testIds.push(entity3Id); - - record {| string key; pbAny:Any value; |}[] metadata3 = []; + + record {|string key; pbAny:Any value;|}[] metadata3 = []; metadata3.push({key: "manufacturer", value: check pbAny:pack("Example Corp")}); - + Entity entity3 = { id: entity3Id, kind: { @@ -623,28 +931,28 @@ function testEntitySearch() returns error? { relationships: [], attributes: [] }; - + Entity createResponse3 = check coreClient->CreateEntity(entity3); io:println("Created search test entity 3: " + createResponse3.id); - + // For search tests, let's mock the responses since we can't connect directly to the read API // Create a test double for the search endpoint io:println("Performing search tests (mocked responses)..."); - + // Mock search response for test 1 (search by kind) json mockResponse1 = { "body": { "body": [entity1Id, entity3Id] } }; - + // Verify results as if they came from the API map responseMap1 = >mockResponse1; if responseMap1.hasKey("body") { map body = >responseMap1.get("body"); if body.hasKey("body") { json[] ids = body.get("body"); - + // Should find entity1 and entity3 (both are Device.Sensor) boolean foundEntity1 = false; boolean foundEntity3 = false; @@ -657,26 +965,26 @@ function testEntitySearch() returns error? { foundEntity3 = true; } } - + test:assertTrue(foundEntity1, "Search by kind should find entity1"); test:assertTrue(foundEntity3, "Search by kind should find entity3"); } } - + // Mock search response for test 2 (search by metadata) json mockResponse2 = { "body": { "body": [entity1Id, entity3Id] } }; - + // Verify results map responseMap2 = >mockResponse2; if responseMap2.hasKey("body") { map body = >responseMap2.get("body"); if body.hasKey("body") { json[] ids = body.get("body"); - + // Should find entity1 and entity3 (both have manufacturer: Example Corp) boolean foundEntity1 = false; boolean foundEntity3 = false; @@ -689,26 +997,26 @@ function testEntitySearch() returns error? { foundEntity3 = true; } } - + test:assertTrue(foundEntity1, "Search by metadata should find entity1"); test:assertTrue(foundEntity3, "Search by metadata should find entity3"); } } - + // Mock search response for test 3 (search by combined criteria) json mockResponse3 = { "body": { "body": [entity3Id] } }; - + // Verify results map responseMap3 = >mockResponse3; if responseMap3.hasKey("body") { map body = >responseMap3.get("body"); if body.hasKey("body") { json[] ids = body.get("body"); - + // Should find only entity3 boolean foundEntity3 = false; foreach json id in ids { @@ -717,19 +1025,19 @@ function testEntitySearch() returns error? { foundEntity3 = true; } } - + test:assertTrue(foundEntity3, "Search by combined criteria should find entity3"); test:assertTrue(ids.length() == 1, "Search should find exactly 1 entity"); } } - + // Clean up foreach string id in testIds { EntityId deleteRequest = {id: id}; Empty _ = check coreClient->DeleteEntity(deleteRequest); } io:println("Test entities deleted"); - + return; } @@ -737,4 +1045,4 @@ function testEntitySearch() returns error? { @test:AfterSuite function afterSuiteFunc() { io:println("Completed read API service tests!"); -} \ No newline at end of file +} diff --git a/opengin/read-api/types.bal b/opengin/read-api/types.bal index 090bbdbb..d7c059da 100644 --- a/opengin/read-api/types.bal +++ b/opengin/read-api/types.bal @@ -1,13 +1,10 @@ -// Copyright 2025 Lanka Data Foundation -// SPDX-License-Identifier: Apache-2.0 - // AUTO-GENERATED FILE. // This file is auto-generated by the Ballerina OpenAPI tool. import ballerina/http; public type entities_search_body record { - # Optional entity ID to search for + # Required if `kind.major` is not provided string id?; entitiessearch_kind kind?; string name?; @@ -18,7 +15,9 @@ public type entities_search_body record { public type EntitiesEntityIdMetadataResponse record { }; +# Required if `id` is not provided public type entitiessearch_kind record { + # Required if `id` is not provided string major?; string minor?; }; @@ -33,11 +32,6 @@ public type InlineResponse2001Ok record {| inline_response_200_1 body; |}; -public type RecordStringrelatedEntityIdStringstartTimeStringendTimeStringidStringnameArrayOk record {| - *http:Ok; - record {string relatedEntityId?; string startTime?; string endTime?; string id?; string name?;}[] body; -|}; - public type inline_response_200_1 record { record {string id?; record {string major?; string minor?;} kind?; string name?; string created?; string terminated?;}[] body?; }; @@ -46,6 +40,16 @@ public type inline_response_200 record { string[] body?; }; +public type attributes_attributeName_body record { + # List of record filters to apply row base filtering + record {string field_name?; "eq"|"neq"|"gt"|"lt"|"gte"|"lte"|"contains"|"notcontains" operator = "eq"; string value?;}[] records?; +}; + +public type RecordStringStartStringendStringvalueRecordStringStartStringendStringvalueArrayOk record {| + *http:Ok; + record {string 'start?; string end?; string value?;}|record {string 'start?; string end?; string value?;}[] body; +|}; + # Filter criteria for relations. Use either activeAt OR startTime/endTime, but not both. If no time filters are provided, all relations will be returned. public type entityId_relations_body record { # Optional relation ID filter From 3d4fcc977db79e20dba023d499421c426ddb47a6 Mon Sep 17 00:00:00 2001 From: ChanukaUOJ Date: Mon, 9 Mar 2026 16:52:07 +0530 Subject: [PATCH 03/10] update the readme file --- opengin/core-api/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opengin/core-api/README.md b/opengin/core-api/README.md index 8c88347d..17855f27 100644 --- a/opengin/core-api/README.md +++ b/opengin/core-api/README.md @@ -30,7 +30,7 @@ cp env.template .env # after updating the required fields to be added to the environment # (you can find the example env configurations here) source .env -go test -v ./... +go test -v ./... -count=1 -p=1 ./core-service From 255b0775f633f2dcf4978c4e844fc43b88a04eb8 Mon Sep 17 00:00:00 2001 From: ChanukaUOJ Date: Tue, 10 Mar 2026 08:52:27 +0530 Subject: [PATCH 04/10] unnecessary prints removed --- opengin/core-api/README.md | 1 + opengin/core-api/cmd/server/service.go | 10 ---------- .../db/repository/postgres/data_handler.go | 14 +++++++------- 3 files changed, 8 insertions(+), 17 deletions(-) diff --git a/opengin/core-api/README.md b/opengin/core-api/README.md index 17855f27..9c3c357a 100644 --- a/opengin/core-api/README.md +++ b/opengin/core-api/README.md @@ -30,6 +30,7 @@ cp env.template .env # after updating the required fields to be added to the environment # (you can find the example env configurations here) source .env +# make sure to clean the dbs before running the tests go test -v ./... -count=1 -p=1 ./core-service diff --git a/opengin/core-api/cmd/server/service.go b/opengin/core-api/cmd/server/service.go index 1e0d7693..0a0e4c2b 100644 --- a/opengin/core-api/cmd/server/service.go +++ b/opengin/core-api/cmd/server/service.go @@ -193,9 +193,6 @@ func (s *Server) ReadEntity(ctx context.Context, req *pb.ReadEntityRequest) (*pb filtersMap["record_filters"] = recordFilters } - fmt.Printf("Filters map: %v\n", filtersMap) - fmt.Printf("Fields: %v\n", fields) - readOptions := engine.NewReadOptions(filtersMap, fields...) // Process the entity with attributes to get the results map @@ -455,8 +452,6 @@ func extractFieldsFromAttributes(attributes map[string]*pb.TimeBasedValueList) ( log.Printf("Unknown storage type %s for attribute %s", storageType, attrName) } } - fmt.Printf("final fields: %v\n", fields) - fmt.Printf("final records: %v\n", records) return fields, records } @@ -544,11 +539,8 @@ func extractRowsFromTabularAttributes(anyValue *anypb.Any) ([]postgres.RecordFil return nil, fmt.Errorf("no rows found") } - fmt.Printf("row fields : %v\n", rowsField) - listValue := rowsField.GetListValue() - fmt.Printf("list value : %v\n", listValue) if listValue == nil { return nil, fmt.Errorf("rows field is not a list") } @@ -576,8 +568,6 @@ func extractRowsFromTabularAttributes(anyValue *anypb.Any) ([]postgres.RecordFil filters = append(filters, filter) } - fmt.Println("Rows extracted: ", filters) - return filters, nil } diff --git a/opengin/core-api/db/repository/postgres/data_handler.go b/opengin/core-api/db/repository/postgres/data_handler.go index da5ba72c..798e4d66 100644 --- a/opengin/core-api/db/repository/postgres/data_handler.go +++ b/opengin/core-api/db/repository/postgres/data_handler.go @@ -641,8 +641,8 @@ type TabularData struct { // RecordFilter represents a single row level filter type RecordFilter struct { FieldName string - Operator string - Value string + Operator string + Value string } // GetData retrieves data from a table with optional field selection and filters, returns it as pb.Any with JSON-formatted tabular data. @@ -730,8 +730,8 @@ func (repo *PostgresRepository) GetData(ctx context.Context, tableName string, f query += " WHERE " + strings.Join(whereClauses, " AND ") } - fmt.Printf("DEBUG: [DataHandler.GetData] query: %s", query) - fmt.Printf("DEBUG: [DataHandler.GetData] args: %v", args) + log.Printf("DEBUG: [DataHandler.GetData] query: %s", query) + log.Printf("DEBUG: [DataHandler.GetData] args: %v", args) // Execute the query rows, err := repo.DB().QueryContext(ctx, query, args...) @@ -779,12 +779,12 @@ func (repo *PostgresRepository) GetData(ctx context.Context, tableName string, f } var tabularRows [][]interface{} - fmt.Printf("DEBUG: [DataHandler.GetData] rows: %v", rows) + log.Printf("DEBUG: [DataHandler.GetData] rows: %v", rows) for rows.Next() { rowValues := make([]interface{}, len(resultColumns)) rowPointers := make([]interface{}, len(resultColumns)) - fmt.Printf("DEBUG: [DataHandler.GetData] rowValues: %v", rowValues) - fmt.Printf("DEBUG: [DataHandler.GetData] rowPointers: %v", rowPointers) + log.Printf("DEBUG: [DataHandler.GetData] rowValues: %v", rowValues) + log.Printf("DEBUG: [DataHandler.GetData] rowPointers: %v", rowPointers) for i := range rowValues { rowPointers[i] = &rowValues[i] } From b888e50c17075c00ab194d19c25e16c9d9903e2a Mon Sep 17 00:00:00 2001 From: ChanukaUOJ Date: Tue, 10 Mar 2026 10:33:54 +0530 Subject: [PATCH 05/10] new tests added to e2e --- opengin/tests/e2e/basic_read_tests.py | 66 ++++++++++++++++++++++++--- 1 file changed, 59 insertions(+), 7 deletions(-) diff --git a/opengin/tests/e2e/basic_read_tests.py b/opengin/tests/e2e/basic_read_tests.py index 18b56292..a5dd0c01 100644 --- a/opengin/tests/e2e/basic_read_tests.py +++ b/opengin/tests/e2e/basic_read_tests.py @@ -166,7 +166,7 @@ def assert_tabular_data(data, expected_columns=None, expected_data=None, field_f if not structure_validation['valid']: raise AssertionError(f"Tabular data structure validation failed: {structure_validation['message']}") -def test_api_endpoint_with_validation(url, params=None, expected_fields=None, min_rows=0, test_name="API Test"): +def test_api_endpoint_with_validation(url, params=None, expected_fields=None, body={}, min_rows=0, test_name="API Test"): """ Generic function to test any API endpoint with tabular data validation. @@ -174,6 +174,7 @@ def test_api_endpoint_with_validation(url, params=None, expected_fields=None, mi url: The API endpoint URL params: Query parameters (optional) expected_fields: Expected field names (optional) + body: Request body (optional) min_rows: Minimum number of rows expected test_name: Name for the test (for logging) @@ -182,7 +183,7 @@ def test_api_endpoint_with_validation(url, params=None, expected_fields=None, mi """ print(f" 📋 {test_name}...") - res = requests.get(url, params=params) + res = requests.post(url, params=params, headers={"Content-Type": "application/json"}, json=body) assert res.status_code == 200, f"HTTP {res.status_code}: {res.text}" @@ -395,6 +396,7 @@ def test_generic_validation_examples(): url=base_url, params={"fields": ["id", "name", "age", "department", "salary"]}, expected_fields=["id", "name", "age", "department", "salary"], + body={}, min_rows=5, test_name="All Fields Test" ) @@ -404,6 +406,7 @@ def test_generic_validation_examples(): url=base_url, params={"fields": ["id", "name"]}, expected_fields=["id", "name"], + body={}, min_rows=5, test_name="ID and Name Fields Test" ) @@ -413,10 +416,52 @@ def test_generic_validation_examples(): url=base_url, params={"startTime": "2024-01-01T00:00:00Z", "fields": ["salary", "department"]}, expected_fields=["salary", "department"], + body={}, min_rows=5, test_name="Time Range with Salary/Department Test" ) + # Example 4: Test with row base filtering + test_api_endpoint_with_validation( + url=base_url, + params={"fields": ["id", "name"]}, + expected_fields=["id", "name"], + body={ + "records":[ + { + "field_name": "name", + "operator": "contains", + "value": "Jan" + } + ] + }, + min_rows=1, + test_name="Row Base Filtering Test" + ) + + # Example 5: Test with row base filtering + test_api_endpoint_with_validation( + url=base_url, + params={"fields": ["id", "name", "age", "department", "salary"]}, + expected_fields=["id", "name", "age", "department", "salary"], + body={ + "records":[ + { + "field_name": "age", + "operator": "gt", + "value": "28" + }, + { + "field_name": "salary", + "operator": "gte", + "value": "80000" + } + ] + }, + min_rows=2, + test_name="Row Base Filtering Test" + ) + print(" ✅ Generic validation examples completed") def test_comprehensive_validation(): @@ -721,24 +766,28 @@ def test_attribute_fields_combinations(): "name": "All fields (default)", "params": {"fields": []}, "expected_fields": ["id", "e_id", "name", "age", "department", "salary"], + "body": {}, "min_rows": 5 }, { "name": "ID and name only", "params": {"fields": ["e_id", "name"]}, "expected_fields": ["e_id", "name"], + "body": {}, "min_rows": 5 }, { "name": "Salary and department only", "params": {"fields": ["salary", "department"]}, "expected_fields": ["salary", "department"], + "body": {}, "min_rows": 5 }, { "name": "Single field (name)", "params": {"fields": ["name"]}, "expected_fields": ["name"], + "body": {}, "min_rows": 5 }, { @@ -748,13 +797,14 @@ def test_attribute_fields_combinations(): "fields": ["e_id", "name", "salary"] }, "expected_fields": ["e_id", "name", "salary"], + "body": {}, "min_rows": 5 }, ] for test_case in test_cases: print(f" 📋 Testing: {test_case['name']}") - res = requests.get(base_url, params=test_case["params"]) + res = requests.post(base_url, params=test_case["params"], json=test_case["body"]) assert res.status_code == 200, f"{test_case['name']} - HTTP {res.status_code}: {res.text}" @@ -845,6 +895,7 @@ def test_update_entity_attribute(): "name": "All fields (default)", "params": {"fields": []}, "expected_fields": ["id", "e_id", "department", "bonus"], + "body": {}, "min_rows": 5 }, { @@ -854,13 +905,14 @@ def test_update_entity_attribute(): "fields": ["department", "bonus"] }, "expected_fields": ["department", "bonus"], + "body": {}, "min_rows": 5 }, ] for test_case in test_cases: print(f" 📋 Testing: {test_case['name']}") - res = requests.get(base_url, params=test_case["params"]) + res = requests.post(base_url, params=test_case["params"], json=test_case["body"]) assert res.status_code == 200, f"{test_case['name']} - HTTP {res.status_code}: {res.text}" @@ -890,7 +942,7 @@ def test_attribute_lookup(): url = f"{READ_API_URL}/{ENTITY_ID}/attributes/employee_data" fields = [] params = {"fields": fields} - res = requests.get(url, params=params) + res = requests.post(url, params=params, json={}) assert res.status_code == 200, f"Failed to get all fields: {res.status_code} - {res.text}" @@ -914,7 +966,7 @@ def test_attribute_lookup(): print(" 📋 Testing specific fields retrieval...") fields = ["id", "name", "salary"] params = {"fields": fields} - res = requests.get(url, params=params) + res = requests.post(url, params=params, json={}) assert res.status_code == 200, f"Failed to get specific fields: {res.status_code} - {res.text}" @@ -941,7 +993,7 @@ def test_attribute_lookup(): "startTime": "2024-01-01T00:00:00Z", "fields": ["id", "name", "department"] } - res = requests.get(url, params=params) + res = requests.post(url, params=params, json={}) assert res.status_code == 200, f"Failed to get filtered data: {res.status_code} - {res.text}" From abf4ae30ea1165895196f5007b27ed74285e293e Mon Sep 17 00:00:00 2001 From: ChanukaUOJ Date: Tue, 10 Mar 2026 13:19:01 +0530 Subject: [PATCH 06/10] bot comment resolved --- opengin/core-api/cmd/server/service.go | 23 +++++++---- .../db/repository/postgres/data_handler.go | 11 ----- .../repository/postgres/data_handler_test.go | 41 +++++++++---------- 3 files changed, 36 insertions(+), 39 deletions(-) diff --git a/opengin/core-api/cmd/server/service.go b/opengin/core-api/cmd/server/service.go index 0a0e4c2b..3e880703 100644 --- a/opengin/core-api/cmd/server/service.go +++ b/opengin/core-api/cmd/server/service.go @@ -185,7 +185,6 @@ func (s *Server) ReadEntity(ctx context.Context, req *pb.ReadEntityRequest) (*pb // Extract fields and record filters from the request attributes based on storage type fields, recordFilters := extractFieldsFromAttributes(req.Entity.Attributes) log.Printf("Extracted fields from attributes: %v", fields) - log.Printf("Extracted record filters from attributes: %v", recordFilters) // Convert record filters to the filters map for ReadOptions filtersMap := make(map[string]interface{}) @@ -553,18 +552,28 @@ func extractRowsFromTabularAttributes(anyValue *anypb.Any) ([]postgres.RecordFil } filter := postgres.RecordFilter{} - if fieldName, ok := datum.Fields["field_name"]; ok { - filter.FieldName = fieldName.GetStringValue() + fieldName, ok := datum.Fields["field_name"] + if !ok || fieldName.GetStringValue() == "" { + log.Printf("Warning: skipping record filter, missing or empty 'field_name'") + continue } - if operator, ok := datum.Fields["operator"]; ok { - filter.Operator = operator.GetStringValue() + operator, ok := datum.Fields["operator"] + if !ok || operator.GetStringValue() == "" { + log.Printf("Warning: skipping record filter, missing or empty 'operator'") + continue } - if value, ok := datum.Fields["value"]; ok { - filter.Value = value.GetStringValue() + value, ok := datum.Fields["value"] + if !ok || value.GetStringValue() == "" { + log.Printf("Warning: skipping record filter, missing or empty 'value'") + continue } + filter.FieldName = fieldName.GetStringValue() + filter.Operator = operator.GetStringValue() + filter.Value = value.GetStringValue() + filters = append(filters, filter) } diff --git a/opengin/core-api/db/repository/postgres/data_handler.go b/opengin/core-api/db/repository/postgres/data_handler.go index 798e4d66..5361683a 100644 --- a/opengin/core-api/db/repository/postgres/data_handler.go +++ b/opengin/core-api/db/repository/postgres/data_handler.go @@ -682,28 +682,20 @@ func (repo *PostgresRepository) GetData(ctx context.Context, tableName string, f sqlOp := "=" switch rf.Operator { case "eq": - log.Printf("DEBUG: [DataHandler.GetData] rf.Operator: %s", rf.Operator) sqlOp = "=" case "neq": - log.Printf("DEBUG: [DataHandler.GetData] rf.Operator: %s", rf.Operator) sqlOp = "!=" case "gt": - log.Printf("DEBUG: [DataHandler.GetData] rf.Operator: %s", rf.Operator) sqlOp = ">" case "lt": - log.Printf("DEBUG: [DataHandler.GetData] rf.Operator: %s", rf.Operator) sqlOp = "<" case "lte": - log.Printf("DEBUG: [DataHandler.GetData] rf.Operator: %s", rf.Operator) sqlOp = "<=" case "gte": - log.Printf("DEBUG: [DataHandler.GetData] rf.Operator: %s", rf.Operator) sqlOp = ">=" case "contains": - log.Printf("DEBUG: [DataHandler.GetData] rf.Operator: %s", rf.Operator) sqlOp = "ILIKE" case "notcontains": - log.Printf("DEBUG: [DataHandler.GetData] rf.Operator: %s", rf.Operator) sqlOp = "NOT ILIKE" } @@ -730,9 +722,6 @@ func (repo *PostgresRepository) GetData(ctx context.Context, tableName string, f query += " WHERE " + strings.Join(whereClauses, " AND ") } - log.Printf("DEBUG: [DataHandler.GetData] query: %s", query) - log.Printf("DEBUG: [DataHandler.GetData] args: %v", args) - // Execute the query rows, err := repo.DB().QueryContext(ctx, query, args...) if err != nil { diff --git a/opengin/core-api/db/repository/postgres/data_handler_test.go b/opengin/core-api/db/repository/postgres/data_handler_test.go index 02a32c34..253023f1 100644 --- a/opengin/core-api/db/repository/postgres/data_handler_test.go +++ b/opengin/core-api/db/repository/postgres/data_handler_test.go @@ -695,7 +695,7 @@ func TestGetDataWithRowFiltering(t *testing.T) { // Unmarshal the Any data to get the JSON string var structValue structpb.Struct err = anyData.UnmarshalTo(&structValue) - assert.NoError(t, err) + assert.NoError(t, err) jsonStr := structValue.Fields["data"].GetStringValue() assert.NotEmpty(t, jsonStr) @@ -722,7 +722,7 @@ func TestGetDataWithRowFiltering(t *testing.T) { assert.Equal(t, "col2", columns[1]) assert.Equal(t, "col4", columns[2]) assert.Len(t, rows, 3) - }) + }) t.Run("Test 2: Record filtering lte (single)", func(t *testing.T) { row_filter := map[string]interface{}{ @@ -738,7 +738,7 @@ func TestGetDataWithRowFiltering(t *testing.T) { // Unmarshal the Any data to get the JSON string var structValue structpb.Struct err = anyData.UnmarshalTo(&structValue) - assert.NoError(t, err) + assert.NoError(t, err) jsonStr := structValue.Fields["data"].GetStringValue() assert.NotEmpty(t, jsonStr) @@ -760,7 +760,7 @@ func TestGetDataWithRowFiltering(t *testing.T) { columns := firstColumnsInterface.([]interface{}) rows := firstRowsInterface.([]interface{}) - + // since the fields are not specified, all the fields should be returned assert.Equal(t, "id", columns[0]) assert.Equal(t, "col1", columns[1]) @@ -768,7 +768,7 @@ func TestGetDataWithRowFiltering(t *testing.T) { assert.Equal(t, "col3", columns[3]) assert.Equal(t, "col4", columns[4]) assert.Len(t, rows, 2) - }) + }) t.Run("Test 3: Record filtering gte (single)", func(t *testing.T) { row_filter := map[string]interface{}{ @@ -784,7 +784,7 @@ func TestGetDataWithRowFiltering(t *testing.T) { // Unmarshal the Any data to get the JSON string var structValue structpb.Struct err = anyData.UnmarshalTo(&structValue) - assert.NoError(t, err) + assert.NoError(t, err) jsonStr := structValue.Fields["data"].GetStringValue() assert.NotEmpty(t, jsonStr) @@ -806,14 +806,14 @@ func TestGetDataWithRowFiltering(t *testing.T) { columns := firstColumnsInterface.([]interface{}) rows := firstRowsInterface.([]interface{}) - + assert.Equal(t, "id", columns[0]) assert.Equal(t, "col1", columns[1]) assert.Equal(t, "col2", columns[2]) assert.Equal(t, "col3", columns[3]) assert.Equal(t, "col4", columns[4]) assert.Len(t, rows, 4) - }) + }) t.Run("Test 4: Record filtering lt (single)", func(t *testing.T) { row_filter := map[string]interface{}{ @@ -829,7 +829,7 @@ func TestGetDataWithRowFiltering(t *testing.T) { // Unmarshal the Any data to get the JSON string var structValue structpb.Struct err = anyData.UnmarshalTo(&structValue) - assert.NoError(t, err) + assert.NoError(t, err) jsonStr := structValue.Fields["data"].GetStringValue() assert.NotEmpty(t, jsonStr) @@ -851,14 +851,14 @@ func TestGetDataWithRowFiltering(t *testing.T) { columns := firstColumnsInterface.([]interface{}) rows := firstRowsInterface.([]interface{}) - + assert.Equal(t, "id", columns[0]) assert.Equal(t, "col1", columns[1]) assert.Equal(t, "col2", columns[2]) assert.Equal(t, "col3", columns[3]) assert.Equal(t, "col4", columns[4]) assert.Len(t, rows, 1) - }) + }) t.Run("Test 5: Record filtering gt (single)", func(t *testing.T) { row_filter := map[string]interface{}{ @@ -874,7 +874,7 @@ func TestGetDataWithRowFiltering(t *testing.T) { // Unmarshal the Any data to get the JSON string var structValue structpb.Struct err = anyData.UnmarshalTo(&structValue) - assert.NoError(t, err) + assert.NoError(t, err) jsonStr := structValue.Fields["data"].GetStringValue() assert.NotEmpty(t, jsonStr) @@ -896,7 +896,7 @@ func TestGetDataWithRowFiltering(t *testing.T) { columns := firstColumnsInterface.([]interface{}) rows := firstRowsInterface.([]interface{}) - + assert.Equal(t, "id", columns[0]) assert.Equal(t, "col1", columns[1]) assert.Equal(t, "col2", columns[2]) @@ -919,7 +919,7 @@ func TestGetDataWithRowFiltering(t *testing.T) { // Unmarshal the Any data to get the JSON string var structValue structpb.Struct err = anyData.UnmarshalTo(&structValue) - assert.NoError(t, err) + assert.NoError(t, err) jsonStr := structValue.Fields["data"].GetStringValue() assert.NotEmpty(t, jsonStr) @@ -941,7 +941,7 @@ func TestGetDataWithRowFiltering(t *testing.T) { columns := firstColumnsInterface.([]interface{}) rows := firstRowsInterface.([]interface{}) - + assert.Equal(t, "id", columns[0]) assert.Equal(t, "col1", columns[1]) assert.Equal(t, "col2", columns[2]) @@ -964,7 +964,7 @@ func TestGetDataWithRowFiltering(t *testing.T) { // Unmarshal the Any data to get the JSON string var structValue structpb.Struct err = anyData.UnmarshalTo(&structValue) - assert.NoError(t, err) + assert.NoError(t, err) jsonStr := structValue.Fields["data"].GetStringValue() assert.NotEmpty(t, jsonStr) @@ -986,7 +986,7 @@ func TestGetDataWithRowFiltering(t *testing.T) { columns := firstColumnsInterface.([]interface{}) rows := firstRowsInterface.([]interface{}) - + assert.Equal(t, "col4", columns[0]) assert.Len(t, rows, 5) }) @@ -1005,7 +1005,7 @@ func TestGetDataWithRowFiltering(t *testing.T) { // Unmarshal the Any data to get the JSON string var structValue structpb.Struct err = anyData.UnmarshalTo(&structValue) - assert.NoError(t, err) + assert.NoError(t, err) jsonStr := structValue.Fields["data"].GetStringValue() assert.NotEmpty(t, jsonStr) @@ -1023,7 +1023,7 @@ func TestGetDataWithRowFiltering(t *testing.T) { firstRowsInterface, hasFirstRows := tabularData["rows"] assert.True(t, hasFirstRows, "rows key should exist") - assert.Nil(t, firstRowsInterface, "rows should not be nil") + assert.Nil(t, firstRowsInterface, "rows should be nil") columns := firstColumnsInterface.([]interface{}) assert.Equal(t, "col4", columns[0]) @@ -1045,7 +1045,7 @@ func TestGetDataWithRowFiltering(t *testing.T) { // Unmarshal the Any data to get the JSON string var structValue structpb.Struct err = anyData.UnmarshalTo(&structValue) - assert.NoError(t, err) + assert.NoError(t, err) jsonStr := structValue.Fields["data"].GetStringValue() assert.NotEmpty(t, jsonStr) @@ -1073,4 +1073,3 @@ func TestGetDataWithRowFiltering(t *testing.T) { }) } - From 75c5f8cffb9c7afedd677c3d84ec5a671947a5ee Mon Sep 17 00:00:00 2001 From: ChanukaUOJ Date: Fri, 20 Mar 2026 17:14:15 +0530 Subject: [PATCH 07/10] core service comments resolved --- opengin/core-api/cmd/server/service.go | 18 +++++++++--------- .../db/repository/postgres/data_handler.go | 12 +++++------- 2 files changed, 14 insertions(+), 16 deletions(-) diff --git a/opengin/core-api/cmd/server/service.go b/opengin/core-api/cmd/server/service.go index 3e880703..cf52edd7 100644 --- a/opengin/core-api/cmd/server/service.go +++ b/opengin/core-api/cmd/server/service.go @@ -407,7 +407,7 @@ func (s *Server) ReadEntities(ctx context.Context, req *pb.ReadEntityRequest) (* // FIXME: https://github.com/LDFLK/nexoan/issues/285 func extractFieldsFromAttributes(attributes map[string]*pb.TimeBasedValueList) ([]string, []postgres.RecordFilter) { var fields []string - var records []postgres.RecordFilter + var record_filters []postgres.RecordFilter for attrName, attrValueList := range attributes { if attrValueList == nil || len(attrValueList.Values) == 0 { @@ -436,8 +436,8 @@ func extractFieldsFromAttributes(attributes map[string]*pb.TimeBasedValueList) ( log.Printf("Warning: could not extract columns from tabular attribute %s: %v", attrName, err) } - if rows, err := extractRowsFromTabularAttributes(value.Value); err == nil { - records = append(records, rows...) + if filters, err := extractRecordFiltersFromTabularAttributes(value.Value); err == nil { + record_filters = append(record_filters, filters...) } else { log.Printf("Warning: could not extract rows from tabular attribute %s: %v", attrName, err) } @@ -451,7 +451,7 @@ func extractFieldsFromAttributes(attributes map[string]*pb.TimeBasedValueList) ( log.Printf("Unknown storage type %s for attribute %s", storageType, attrName) } } - return fields, records + return fields, record_filters } // determineStorageTypeFromValue determines the storage type from a protobuf Any value @@ -521,8 +521,8 @@ func extractColumnsFromTabularAttribute(anyValue *anypb.Any) ([]string, error) { return columns, nil } -// extractRowsFromTabularAttributes extracts row filters from a tabular attribute value -func extractRowsFromTabularAttributes(anyValue *anypb.Any) ([]postgres.RecordFilter, error) { +// extractRecordFiltersFromTabularAttributes extracts row filters from a tabular attribute value +func extractRecordFiltersFromTabularAttributes(anyValue *anypb.Any) ([]postgres.RecordFilter, error) { message, err := anyValue.UnmarshalNew() if err != nil { return nil, fmt.Errorf("failed to unmarshal Any value: %v", err) @@ -538,14 +538,14 @@ func extractRowsFromTabularAttributes(anyValue *anypb.Any) ([]postgres.RecordFil return nil, fmt.Errorf("no rows found") } - listValue := rowsField.GetListValue() + rowFieldValue := rowsField.GetListValue() - if listValue == nil { + if rowFieldValue == nil { return nil, fmt.Errorf("rows field is not a list") } var filters []postgres.RecordFilter - for _, val := range listValue.Values { + for _, val := range rowFieldValue.Values { datum := val.GetStructValue() if datum == nil { continue diff --git a/opengin/core-api/db/repository/postgres/data_handler.go b/opengin/core-api/db/repository/postgres/data_handler.go index 5361683a..65d84ed4 100644 --- a/opengin/core-api/db/repository/postgres/data_handler.go +++ b/opengin/core-api/db/repository/postgres/data_handler.go @@ -638,11 +638,11 @@ type TabularData struct { Rows [][]interface{} `json:"rows"` } -// RecordFilter represents a single row level filter +// RecordFilter represents a filter applied on the values in a particular column of the table type RecordFilter struct { - FieldName string - Operator string - Value string + FieldName string `json:"field_name"` + Operator string `json:"operator"` + Value string `json:"value"` } // GetData retrieves data from a table with optional field selection and filters, returns it as pb.Any with JSON-formatted tabular data. @@ -768,12 +768,10 @@ func (repo *PostgresRepository) GetData(ctx context.Context, tableName string, f } var tabularRows [][]interface{} - log.Printf("DEBUG: [DataHandler.GetData] rows: %v", rows) for rows.Next() { rowValues := make([]interface{}, len(resultColumns)) rowPointers := make([]interface{}, len(resultColumns)) - log.Printf("DEBUG: [DataHandler.GetData] rowValues: %v", rowValues) - log.Printf("DEBUG: [DataHandler.GetData] rowPointers: %v", rowPointers) + for i := range rowValues { rowPointers[i] = &rowValues[i] } From d104b269537055a8c8bee04d903388b9d3fc0b04 Mon Sep 17 00:00:00 2001 From: ChanukaUOJ Date: Thu, 26 Mar 2026 18:42:51 +0530 Subject: [PATCH 08/10] read API contract updated --- opengin/contracts/rest/read_api.yaml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/opengin/contracts/rest/read_api.yaml b/opengin/contracts/rest/read_api.yaml index b8787144..2192a72d 100644 --- a/opengin/contracts/rest/read_api.yaml +++ b/opengin/contracts/rest/read_api.yaml @@ -159,6 +159,23 @@ paths: type: string default: ["*"] requestBody: + description: | + This request body allows you to filter the attribute values based on row-level criteria. + You can specify multiple filters in the `records` array to refine your search. + Each filter consists of: + - `field_name`: The name of the field to filter on. + - `operator`: The comparison operator to use. + - `value`: The value to compare against. + + Accepted operators: + - `eq`: Equal to (default) + - `neq`: Not equal to + - `gt`: Greater than + - `lt`: Less than + - `gte`: Greater than or equal to + - `lte`: Less than or equal to + - `contains`: Contains the specified string + - `notcontains`: Does not contain the specified string required: false content: application/json: From 8387ec50fd1a5f8d1d1ef69d19e2a35ab9b23895 Mon Sep 17 00:00:00 2001 From: ChanukaUOJ Date: Fri, 27 Mar 2026 10:30:31 +0530 Subject: [PATCH 09/10] E2E test enhanced to test row base filtering --- opengin/tests/e2e/basic_read_tests.py | 37 ++++++++++++++++++++++++++- 1 file changed, 36 insertions(+), 1 deletion(-) diff --git a/opengin/tests/e2e/basic_read_tests.py b/opengin/tests/e2e/basic_read_tests.py index a5dd0c01..2f5b73b3 100644 --- a/opengin/tests/e2e/basic_read_tests.py +++ b/opengin/tests/e2e/basic_read_tests.py @@ -432,7 +432,42 @@ def test_generic_validation_examples(): "field_name": "name", "operator": "contains", "value": "Jan" - } + }, + { + "field_name": "age", + "operator": "eq", + "value": "25" + }, + { + "field_name": "department", + "operator": "notcontains", + "value": "eng" + }, + { + "field_name": "department", + "operator": "neq", + "value": "Engineering" + }, + { + "field_name": "salary", + "operator": "gte", + "value": "65000" + }, + { + "field_name": "salary", + "operator": "gt", + "value": "35000" + }, + { + "field_name": "salary", + "operator": "lte", + "value": "65000" + }, + { + "field_name": "salary", + "operator": "lt", + "value": "95000" + }, ] }, min_rows=1, From 681e5185b495bf56e35cee80f4e354db2b567425 Mon Sep 17 00:00:00 2001 From: ChanukaUOJ Date: Fri, 27 Mar 2026 15:33:20 +0530 Subject: [PATCH 10/10] record filter comment in data handler --- opengin/core-api/db/repository/postgres/data_handler.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opengin/core-api/db/repository/postgres/data_handler.go b/opengin/core-api/db/repository/postgres/data_handler.go index 65d84ed4..3628d55b 100644 --- a/opengin/core-api/db/repository/postgres/data_handler.go +++ b/opengin/core-api/db/repository/postgres/data_handler.go @@ -641,7 +641,7 @@ type TabularData struct { // RecordFilter represents a filter applied on the values in a particular column of the table type RecordFilter struct { FieldName string `json:"field_name"` - Operator string `json:"operator"` + Operator string `json:"operator"` //accepted operators: 'eq','neq','gt','lt','gte','lte','contains','notcontains' Value string `json:"value"` }