From 02db81390d1d606b22645ba94929daa5f9aae8e5 Mon Sep 17 00:00:00 2001 From: Andrew Vasilyev Date: Sun, 19 Apr 2026 15:06:53 +0200 Subject: [PATCH 01/16] feat: add comprehensive mock-based handler tests (189 tests, 33 files) Phase 1-8: 85 read/write handler tests with mock backend infrastructure Error paths: 49 tests covering backend error propagation for all handler groups Not-connected: 29 tests verifying Connected/ConnectedForWrite guards JSON format: 26 tests validating JSON output for show/list handlers Production code changes: - Added Func fields to MockBackend for 8 agent-editor write methods - Fixed ContainerID parameter semantics in withContainer and all call sites --- internal/marketplace/types.go | 2 +- mdl/backend/mock/backend.go | 16 +- mdl/backend/mock/mock_infrastructure.go | 61 +- mdl/executor/cmd_agenteditor_mock_test.go | 252 +++++++++ mdl/executor/cmd_agenteditor_models.go | 20 +- mdl/executor/cmd_agenteditor_write.go | 20 +- mdl/executor/cmd_associations_mock_test.go | 77 +++ mdl/executor/cmd_businessevents_mock_test.go | 79 +++ mdl/executor/cmd_constants_mock_test.go | 106 ++++ mdl/executor/cmd_datatransformer_mock_test.go | 60 ++ mdl/executor/cmd_dbconnection_mock_test.go | 60 ++ mdl/executor/cmd_entities_mock_test.go | 58 ++ mdl/executor/cmd_enumerations_mock_test.go | 164 +++--- mdl/executor/cmd_error_mock_test.go | 474 ++++++++++++++++ mdl/executor/cmd_export_mappings_mock_test.go | 34 ++ mdl/executor/cmd_fragments_mock_test.go | 31 + .../cmd_imagecollections_mock_test.go | 61 ++ mdl/executor/cmd_import_mappings_mock_test.go | 34 ++ mdl/executor/cmd_javaactions_mock_test.go | 58 ++ .../cmd_javascript_actions_mock_test.go | 59 ++ mdl/executor/cmd_json_mock_test.go | 528 ++++++++++++++++++ mdl/executor/cmd_jsonstructures_mock_test.go | 35 ++ mdl/executor/cmd_mermaid_mock_test.go | 52 ++ mdl/executor/cmd_microflows_mock_test.go | 113 ++++ mdl/executor/cmd_misc_mock_test.go | 35 ++ mdl/executor/cmd_modules_mock_test.go | 46 ++ mdl/executor/cmd_navigation_mock_test.go | 92 +++ mdl/executor/cmd_notconnected_mock_test.go | 181 ++++++ mdl/executor/cmd_odata_mock_test.go | 126 +++++ mdl/executor/cmd_pages_mock_test.go | 93 +++ mdl/executor/cmd_published_rest_mock_test.go | 70 +++ mdl/executor/cmd_rest_clients_mock_test.go | 63 +++ mdl/executor/cmd_security_mock_test.go | 174 ++++++ mdl/executor/cmd_settings_mock_test.go | 48 ++ mdl/executor/cmd_workflows_mock_test.go | 52 ++ mdl/executor/cmd_write_handlers_mock_test.go | 328 +++++++++++ mdl/executor/mock_test_helpers_test.go | 273 +++++++++ mdl/visitor/visitor_agenteditor.go | 1 - 38 files changed, 3904 insertions(+), 132 deletions(-) create mode 100644 mdl/executor/cmd_agenteditor_mock_test.go create mode 100644 mdl/executor/cmd_associations_mock_test.go create mode 100644 mdl/executor/cmd_businessevents_mock_test.go create mode 100644 mdl/executor/cmd_constants_mock_test.go create mode 100644 mdl/executor/cmd_datatransformer_mock_test.go create mode 100644 mdl/executor/cmd_dbconnection_mock_test.go create mode 100644 mdl/executor/cmd_entities_mock_test.go create mode 100644 mdl/executor/cmd_error_mock_test.go create mode 100644 mdl/executor/cmd_export_mappings_mock_test.go create mode 100644 mdl/executor/cmd_fragments_mock_test.go create mode 100644 mdl/executor/cmd_imagecollections_mock_test.go create mode 100644 mdl/executor/cmd_import_mappings_mock_test.go create mode 100644 mdl/executor/cmd_javaactions_mock_test.go create mode 100644 mdl/executor/cmd_javascript_actions_mock_test.go create mode 100644 mdl/executor/cmd_json_mock_test.go create mode 100644 mdl/executor/cmd_jsonstructures_mock_test.go create mode 100644 mdl/executor/cmd_mermaid_mock_test.go create mode 100644 mdl/executor/cmd_microflows_mock_test.go create mode 100644 mdl/executor/cmd_misc_mock_test.go create mode 100644 mdl/executor/cmd_modules_mock_test.go create mode 100644 mdl/executor/cmd_navigation_mock_test.go create mode 100644 mdl/executor/cmd_notconnected_mock_test.go create mode 100644 mdl/executor/cmd_odata_mock_test.go create mode 100644 mdl/executor/cmd_pages_mock_test.go create mode 100644 mdl/executor/cmd_published_rest_mock_test.go create mode 100644 mdl/executor/cmd_rest_clients_mock_test.go create mode 100644 mdl/executor/cmd_security_mock_test.go create mode 100644 mdl/executor/cmd_settings_mock_test.go create mode 100644 mdl/executor/cmd_workflows_mock_test.go create mode 100644 mdl/executor/cmd_write_handlers_mock_test.go create mode 100644 mdl/executor/mock_test_helpers_test.go diff --git a/internal/marketplace/types.go b/internal/marketplace/types.go index b73c1d76..37d54a9e 100644 --- a/internal/marketplace/types.go +++ b/internal/marketplace/types.go @@ -18,7 +18,7 @@ var BaseURL = "https://marketplace-api.mendix.com" type Content struct { ContentID int `json:"contentId"` Publisher string `json:"publisher"` - Type string `json:"type"` // "Module", "Widget", "Theme", "Starter App", ... + Type string `json:"type"` // "Module", "Widget", "Theme", "Starter App", ... Categories []Category `json:"categories"` SupportCategory string `json:"supportCategory"` // "Platform", "Community", "Deprecated", ... LicenseURL string `json:"licenseUrl,omitempty"` diff --git a/mdl/backend/mock/backend.go b/mdl/backend/mock/backend.go index a330aa1e..b6dc8a39 100644 --- a/mdl/backend/mock/backend.go +++ b/mdl/backend/mock/backend.go @@ -260,8 +260,16 @@ type MockBackend struct { FindAllCustomWidgetTypesFunc func(widgetID string) ([]*mpr.RawCustomWidgetType, error) // AgentEditorBackend - ListAgentEditorModelsFunc func() ([]*agenteditor.Model, error) - ListAgentEditorKnowledgeBasesFunc func() ([]*agenteditor.KnowledgeBase, error) - ListAgentEditorConsumedMCPServicesFunc func() ([]*agenteditor.ConsumedMCPService, error) - ListAgentEditorAgentsFunc func() ([]*agenteditor.Agent, error) + ListAgentEditorModelsFunc func() ([]*agenteditor.Model, error) + ListAgentEditorKnowledgeBasesFunc func() ([]*agenteditor.KnowledgeBase, error) + ListAgentEditorConsumedMCPServicesFunc func() ([]*agenteditor.ConsumedMCPService, error) + ListAgentEditorAgentsFunc func() ([]*agenteditor.Agent, error) + CreateAgentEditorModelFunc func(m *agenteditor.Model) error + DeleteAgentEditorModelFunc func(id string) error + CreateAgentEditorKnowledgeBaseFunc func(kb *agenteditor.KnowledgeBase) error + DeleteAgentEditorKnowledgeBaseFunc func(id string) error + CreateAgentEditorConsumedMCPServiceFunc func(svc *agenteditor.ConsumedMCPService) error + DeleteAgentEditorConsumedMCPServiceFunc func(id string) error + CreateAgentEditorAgentFunc func(a *agenteditor.Agent) error + DeleteAgentEditorAgentFunc func(id string) error } diff --git a/mdl/backend/mock/mock_infrastructure.go b/mdl/backend/mock/mock_infrastructure.go index e575f69d..f0b6f9bc 100644 --- a/mdl/backend/mock/mock_infrastructure.go +++ b/mdl/backend/mock/mock_infrastructure.go @@ -188,13 +188,58 @@ func (m *MockBackend) ListAgentEditorAgents() ([]*agenteditor.Agent, error) { return nil, nil } -func (m *MockBackend) CreateAgentEditorModel(_ *agenteditor.Model) error { return nil } -func (m *MockBackend) DeleteAgentEditorModel(_ string) error { return nil } -func (m *MockBackend) CreateAgentEditorKnowledgeBase(_ *agenteditor.KnowledgeBase) error { return nil } -func (m *MockBackend) DeleteAgentEditorKnowledgeBase(_ string) error { return nil } -func (m *MockBackend) CreateAgentEditorConsumedMCPService(_ *agenteditor.ConsumedMCPService) error { +func (m *MockBackend) CreateAgentEditorModel(model *agenteditor.Model) error { + if m.CreateAgentEditorModelFunc != nil { + return m.CreateAgentEditorModelFunc(model) + } + return nil +} + +func (m *MockBackend) DeleteAgentEditorModel(id string) error { + if m.DeleteAgentEditorModelFunc != nil { + return m.DeleteAgentEditorModelFunc(id) + } + return nil +} + +func (m *MockBackend) CreateAgentEditorKnowledgeBase(kb *agenteditor.KnowledgeBase) error { + if m.CreateAgentEditorKnowledgeBaseFunc != nil { + return m.CreateAgentEditorKnowledgeBaseFunc(kb) + } + return nil +} + +func (m *MockBackend) DeleteAgentEditorKnowledgeBase(id string) error { + if m.DeleteAgentEditorKnowledgeBaseFunc != nil { + return m.DeleteAgentEditorKnowledgeBaseFunc(id) + } + return nil +} + +func (m *MockBackend) CreateAgentEditorConsumedMCPService(svc *agenteditor.ConsumedMCPService) error { + if m.CreateAgentEditorConsumedMCPServiceFunc != nil { + return m.CreateAgentEditorConsumedMCPServiceFunc(svc) + } + return nil +} + +func (m *MockBackend) DeleteAgentEditorConsumedMCPService(id string) error { + if m.DeleteAgentEditorConsumedMCPServiceFunc != nil { + return m.DeleteAgentEditorConsumedMCPServiceFunc(id) + } + return nil +} + +func (m *MockBackend) CreateAgentEditorAgent(a *agenteditor.Agent) error { + if m.CreateAgentEditorAgentFunc != nil { + return m.CreateAgentEditorAgentFunc(a) + } + return nil +} + +func (m *MockBackend) DeleteAgentEditorAgent(id string) error { + if m.DeleteAgentEditorAgentFunc != nil { + return m.DeleteAgentEditorAgentFunc(id) + } return nil } -func (m *MockBackend) DeleteAgentEditorConsumedMCPService(_ string) error { return nil } -func (m *MockBackend) CreateAgentEditorAgent(_ *agenteditor.Agent) error { return nil } -func (m *MockBackend) DeleteAgentEditorAgent(_ string) error { return nil } diff --git a/mdl/executor/cmd_agenteditor_mock_test.go b/mdl/executor/cmd_agenteditor_mock_test.go new file mode 100644 index 00000000..74675111 --- /dev/null +++ b/mdl/executor/cmd_agenteditor_mock_test.go @@ -0,0 +1,252 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/agenteditor" +) + +func TestShowAgentEditorModels_Mock(t *testing.T) { + mod := mkModule("M") + m1 := &agenteditor.Model{ + BaseElement: model.BaseElement{ID: nextID("aem")}, + ContainerID: mod.ID, + Name: "GPT4", + Provider: "MxCloudGenAI", + DisplayName: "GPT-4 Turbo", + Key: &agenteditor.ConstantRef{QualifiedName: "M.APIKey"}, + } + + h := mkHierarchy(mod) + withContainer(h, m1.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListAgentEditorModelsFunc: func() ([]*agenteditor.Model, error) { return []*agenteditor.Model{m1}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showAgentEditorModels(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "Qualified Name") + assertContainsStr(t, out, "Module") + assertContainsStr(t, out, "Provider") + assertContainsStr(t, out, "Key Constant") + assertContainsStr(t, out, "Display Name") + assertContainsStr(t, out, "M.GPT4") + assertContainsStr(t, out, "MxCloudGenAI") + assertContainsStr(t, out, "M.APIKey") + assertContainsStr(t, out, "GPT-4 Turbo") +} + +func TestDescribeAgentEditorModel_Mock(t *testing.T) { + mod := mkModule("M") + m1 := &agenteditor.Model{ + BaseElement: model.BaseElement{ID: nextID("aem")}, + ContainerID: mod.ID, + Name: "GPT4", + Provider: "MxCloudGenAI", + Key: &agenteditor.ConstantRef{QualifiedName: "M.APIKey"}, + } + + h := mkHierarchy(mod) + withContainer(h, m1.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListAgentEditorModelsFunc: func() ([]*agenteditor.Model, error) { return []*agenteditor.Model{m1}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describeAgentEditorModel(ctx, ast.QualifiedName{Module: "M", Name: "GPT4"})) + + out := buf.String() + assertContainsStr(t, out, "CREATE MODEL") + assertContainsStr(t, out, "Provider") + assertContainsStr(t, out, "Key") +} + +func TestShowAgentEditorAgents_Mock(t *testing.T) { + mod := mkModule("M") + a1 := &agenteditor.Agent{ + BaseElement: model.BaseElement{ID: nextID("aea")}, + ContainerID: mod.ID, + Name: "MyAgent", + UsageType: "Chat", + Model: &agenteditor.DocRef{QualifiedName: "M.GPT4"}, + Tools: []agenteditor.AgentTool{{ID: "t1", Enabled: true}}, + KBTools: []agenteditor.AgentKBTool{{ID: "kb1", Enabled: true}}, + } + + h := mkHierarchy(mod) + withContainer(h, a1.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListAgentEditorAgentsFunc: func() ([]*agenteditor.Agent, error) { return []*agenteditor.Agent{a1}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showAgentEditorAgents(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "Qualified Name") + assertContainsStr(t, out, "Usage") + assertContainsStr(t, out, "Model") + assertContainsStr(t, out, "Tools") + assertContainsStr(t, out, "KBs") + assertContainsStr(t, out, "M.MyAgent") + assertContainsStr(t, out, "Chat") + assertContainsStr(t, out, "M.GPT4") +} + +func TestDescribeAgentEditorAgent_Mock(t *testing.T) { + mod := mkModule("M") + a1 := &agenteditor.Agent{ + BaseElement: model.BaseElement{ID: nextID("aea")}, + ContainerID: mod.ID, + Name: "MyAgent", + UsageType: "Chat", + Model: &agenteditor.DocRef{QualifiedName: "M.GPT4"}, + } + + h := mkHierarchy(mod) + withContainer(h, a1.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListAgentEditorAgentsFunc: func() ([]*agenteditor.Agent, error) { return []*agenteditor.Agent{a1}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describeAgentEditorAgent(ctx, ast.QualifiedName{Module: "M", Name: "MyAgent"})) + + out := buf.String() + assertContainsStr(t, out, "CREATE AGENT") + assertContainsStr(t, out, "UsageType") + assertContainsStr(t, out, "Model") +} + +func TestShowAgentEditorKnowledgeBases_Mock(t *testing.T) { + mod := mkModule("M") + kb := &agenteditor.KnowledgeBase{ + BaseElement: model.BaseElement{ID: nextID("aekb")}, + ContainerID: mod.ID, + Name: "MyKB", + Provider: "MxCloudGenAI", + Key: &agenteditor.ConstantRef{QualifiedName: "M.KBKey"}, + ModelDisplayName: "text-embedding-ada-002", + } + + h := mkHierarchy(mod) + withContainer(h, kb.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListAgentEditorKnowledgeBasesFunc: func() ([]*agenteditor.KnowledgeBase, error) { return []*agenteditor.KnowledgeBase{kb}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showAgentEditorKnowledgeBases(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "Qualified Name") + assertContainsStr(t, out, "Provider") + assertContainsStr(t, out, "Key Constant") + assertContainsStr(t, out, "Embedding Model") + assertContainsStr(t, out, "M.MyKB") + assertContainsStr(t, out, "MxCloudGenAI") + assertContainsStr(t, out, "M.KBKey") + assertContainsStr(t, out, "text-embedding-ada-002") +} + +func TestDescribeAgentEditorKnowledgeBase_Mock(t *testing.T) { + mod := mkModule("M") + kb := &agenteditor.KnowledgeBase{ + BaseElement: model.BaseElement{ID: nextID("aekb")}, + ContainerID: mod.ID, + Name: "MyKB", + Provider: "MxCloudGenAI", + Key: &agenteditor.ConstantRef{QualifiedName: "M.KBKey"}, + } + + h := mkHierarchy(mod) + withContainer(h, kb.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListAgentEditorKnowledgeBasesFunc: func() ([]*agenteditor.KnowledgeBase, error) { return []*agenteditor.KnowledgeBase{kb}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describeAgentEditorKnowledgeBase(ctx, ast.QualifiedName{Module: "M", Name: "MyKB"})) + + out := buf.String() + assertContainsStr(t, out, "CREATE KNOWLEDGE BASE") + assertContainsStr(t, out, "Provider") +} + +func TestShowAgentEditorConsumedMCPServices_Mock(t *testing.T) { + mod := mkModule("M") + svc := &agenteditor.ConsumedMCPService{ + BaseElement: model.BaseElement{ID: nextID("aemcp")}, + ContainerID: mod.ID, + Name: "MySvc", + ProtocolVersion: "2025-03-26", + Version: "1.0.0", + ConnectionTimeoutSeconds: 30, + } + + h := mkHierarchy(mod) + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListAgentEditorConsumedMCPServicesFunc: func() ([]*agenteditor.ConsumedMCPService, error) { return []*agenteditor.ConsumedMCPService{svc}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showAgentEditorConsumedMCPServices(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "Qualified Name") + assertContainsStr(t, out, "Protocol") + assertContainsStr(t, out, "Version") + assertContainsStr(t, out, "Timeout") + assertContainsStr(t, out, "M.MySvc") + assertContainsStr(t, out, "2025-03-26") + assertContainsStr(t, out, "1.0.0") +} + +func TestDescribeAgentEditorConsumedMCPService_Mock(t *testing.T) { + mod := mkModule("M") + svc := &agenteditor.ConsumedMCPService{ + BaseElement: model.BaseElement{ID: nextID("aemcp")}, + ContainerID: mod.ID, + Name: "MySvc", + ProtocolVersion: "2025-03-26", + Version: "1.0.0", + ConnectionTimeoutSeconds: 30, + } + + h := mkHierarchy(mod) + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListAgentEditorConsumedMCPServicesFunc: func() ([]*agenteditor.ConsumedMCPService, error) { return []*agenteditor.ConsumedMCPService{svc}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describeAgentEditorConsumedMCPService(ctx, ast.QualifiedName{Module: "M", Name: "MySvc"})) + + out := buf.String() + assertContainsStr(t, out, "CREATE CONSUMED MCP SERVICE") + assertContainsStr(t, out, "ProtocolVersion") +} diff --git a/mdl/executor/cmd_agenteditor_models.go b/mdl/executor/cmd_agenteditor_models.go index a43e59db..cefd65ec 100644 --- a/mdl/executor/cmd_agenteditor_models.go +++ b/mdl/executor/cmd_agenteditor_models.go @@ -159,17 +159,17 @@ func execCreateAgentEditorModel(ctx *ExecContext, s *ast.CreateModelStmt) error } m := &agenteditor.Model{ - ContainerID: module.ID, - Name: s.Name.Name, + ContainerID: module.ID, + Name: s.Name.Name, Documentation: s.Documentation, - Provider: provider, - Key: keyRef, - DisplayName: s.DisplayName, - KeyName: s.KeyName, - KeyID: s.KeyID, - Environment: s.Environment, - ResourceName: s.ResourceName, - DeepLinkURL: s.DeepLinkURL, + Provider: provider, + Key: keyRef, + DisplayName: s.DisplayName, + KeyName: s.KeyName, + KeyID: s.KeyID, + Environment: s.Environment, + ResourceName: s.ResourceName, + DeepLinkURL: s.DeepLinkURL, } if err := ctx.Backend.CreateAgentEditorModel(m); err != nil { diff --git a/mdl/executor/cmd_agenteditor_write.go b/mdl/executor/cmd_agenteditor_write.go index 851f1c99..656ac555 100644 --- a/mdl/executor/cmd_agenteditor_write.go +++ b/mdl/executor/cmd_agenteditor_write.go @@ -144,17 +144,17 @@ func execCreateAgent(ctx *ExecContext, s *ast.CreateAgentStmt) error { } a := &agenteditor.Agent{ - ContainerID: module.ID, - Name: s.Name.Name, + ContainerID: module.ID, + Name: s.Name.Name, Documentation: s.Documentation, - Description: s.Description, - SystemPrompt: s.SystemPrompt, - UserPrompt: s.UserPrompt, - UsageType: s.UsageType, - MaxTokens: s.MaxTokens, - ToolChoice: s.ToolChoice, - Temperature: s.Temperature, - TopP: s.TopP, + Description: s.Description, + SystemPrompt: s.SystemPrompt, + UserPrompt: s.UserPrompt, + UsageType: s.UsageType, + MaxTokens: s.MaxTokens, + ToolChoice: s.ToolChoice, + Temperature: s.Temperature, + TopP: s.TopP, } // Resolve Model reference diff --git a/mdl/executor/cmd_associations_mock_test.go b/mdl/executor/cmd_associations_mock_test.go new file mode 100644 index 00000000..1d56e679 --- /dev/null +++ b/mdl/executor/cmd_associations_mock_test.go @@ -0,0 +1,77 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/domainmodel" +) + +func TestShowAssociations_Mock(t *testing.T) { + mod := mkModule("MyModule") + ent1 := mkEntity(mod.ID, "Order") + ent2 := mkEntity(mod.ID, "Customer") + assoc := mkAssociation(mod.ID, "Order_Customer", ent1.ID, ent2.ID) + + dm := &domainmodel.DomainModel{ + BaseElement: model.BaseElement{ID: nextID("dm")}, + ContainerID: mod.ID, + Entities: []*domainmodel.Entity{ent1, ent2}, + Associations: []*domainmodel.Association{assoc}, + } + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListModulesFunc: func() ([]*model.Module, error) { return []*model.Module{mod}, nil }, + ListDomainModelsFunc: func() ([]*domainmodel.DomainModel, error) { return []*domainmodel.DomainModel{dm}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, showAssociations(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "MyModule.Order_Customer") + assertContainsStr(t, out, "MyModule.Order") + assertContainsStr(t, out, "MyModule.Customer") + assertContainsStr(t, out, "Reference") + assertContainsStr(t, out, "(1 associations)") +} + +func TestShowAssociations_Mock_FilterByModule(t *testing.T) { + mod1 := mkModule("Sales") + mod2 := mkModule("HR") + ent1 := mkEntity(mod1.ID, "Order") + ent2 := mkEntity(mod1.ID, "Product") + ent3 := mkEntity(mod2.ID, "Employee") + ent4 := mkEntity(mod2.ID, "Department") + + dm1 := &domainmodel.DomainModel{ + BaseElement: model.BaseElement{ID: nextID("dm")}, + ContainerID: mod1.ID, + Entities: []*domainmodel.Entity{ent1, ent2}, + Associations: []*domainmodel.Association{mkAssociation(mod1.ID, "Order_Product", ent1.ID, ent2.ID)}, + } + dm2 := &domainmodel.DomainModel{ + BaseElement: model.BaseElement{ID: nextID("dm")}, + ContainerID: mod2.ID, + Entities: []*domainmodel.Entity{ent3, ent4}, + Associations: []*domainmodel.Association{mkAssociation(mod2.ID, "Employee_Dept", ent3.ID, ent4.ID)}, + } + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListModulesFunc: func() ([]*model.Module, error) { return []*model.Module{mod1, mod2}, nil }, + ListDomainModelsFunc: func() ([]*domainmodel.DomainModel, error) { return []*domainmodel.DomainModel{dm1, dm2}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, showAssociations(ctx, "HR")) + + out := buf.String() + assertNotContainsStr(t, out, "Sales.Order_Product") + assertContainsStr(t, out, "HR.Employee_Dept") + assertContainsStr(t, out, "(1 associations)") +} diff --git a/mdl/executor/cmd_businessevents_mock_test.go b/mdl/executor/cmd_businessevents_mock_test.go new file mode 100644 index 00000000..c874b613 --- /dev/null +++ b/mdl/executor/cmd_businessevents_mock_test.go @@ -0,0 +1,79 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" +) + +func TestShowBusinessEventServices_Mock(t *testing.T) { + mod := mkModule("MyModule") + svc := &model.BusinessEventService{ + BaseElement: model.BaseElement{ID: nextID("bes")}, + ContainerID: mod.ID, + Name: "OrderEvents", + } + h := mkHierarchy(mod) + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListBusinessEventServicesFunc: func() ([]*model.BusinessEventService, error) { + return []*model.BusinessEventService{svc}, nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showBusinessEventServices(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "QualifiedName") + assertContainsStr(t, out, "MyModule.OrderEvents") +} + +func TestShowBusinessEventClients_Mock(t *testing.T) { + ctx, buf := newMockCtx(t) + assertNoError(t, showBusinessEventClients(ctx, "")) + assertContainsStr(t, buf.String(), "not yet implemented") +} + +func TestDescribeBusinessEventService_Mock(t *testing.T) { + mod := mkModule("MyModule") + svc := &model.BusinessEventService{ + BaseElement: model.BaseElement{ID: nextID("bes")}, + ContainerID: mod.ID, + Name: "OrderEvents", + Definition: &model.BusinessEventDefinition{ + ServiceName: "com.example.orders", + EventNamePrefix: "order", + Channels: []*model.BusinessEventChannel{ + { + ChannelName: "ch1", + Messages: []*model.BusinessEventMessage{ + {MessageName: "OrderCreated"}, + }, + }, + }, + }, + } + h := mkHierarchy(mod) + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListBusinessEventServicesFunc: func() ([]*model.BusinessEventService, error) { + return []*model.BusinessEventService{svc}, nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describeBusinessEventService(ctx, ast.QualifiedName{Module: "MyModule", Name: "OrderEvents"})) + + out := buf.String() + assertContainsStr(t, out, "CREATE OR REPLACE BUSINESS EVENT SERVICE") + assertContainsStr(t, out, "MyModule.OrderEvents") +} diff --git a/mdl/executor/cmd_constants_mock_test.go b/mdl/executor/cmd_constants_mock_test.go new file mode 100644 index 00000000..d18ebb1e --- /dev/null +++ b/mdl/executor/cmd_constants_mock_test.go @@ -0,0 +1,106 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" +) + +func TestShowConstants_Mock(t *testing.T) { + mod := mkModule("MyModule") + c1 := mkConstant(mod.ID, "AppURL", "String", "https://example.com") + c2 := mkConstant(mod.ID, "MaxRetries", "Integer", "3") + c2.ExposedToClient = true + + h := mkHierarchy(mod) + withContainer(h, c1.ContainerID, mod.ID) + withContainer(h, c2.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConstantsFunc: func() ([]*model.Constant, error) { return []*model.Constant{c1, c2}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showConstants(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "MyModule.AppURL") + assertContainsStr(t, out, "MyModule.MaxRetries") + assertContainsStr(t, out, "https://example.com") + assertContainsStr(t, out, "Yes") + assertContainsStr(t, out, "(2 constants)") +} + +func TestShowConstants_Mock_FilterByModule(t *testing.T) { + mod1 := mkModule("Alpha") + mod2 := mkModule("Beta") + c1 := mkConstant(mod1.ID, "Key1", "String", "val1") + c2 := mkConstant(mod2.ID, "Key2", "Integer", "42") + + h := mkHierarchy(mod1, mod2) + withContainer(h, c1.ContainerID, mod1.ID) + withContainer(h, c2.ContainerID, mod2.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConstantsFunc: func() ([]*model.Constant, error) { return []*model.Constant{c1, c2}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showConstants(ctx, "Beta")) + + out := buf.String() + assertNotContainsStr(t, out, "Alpha.Key1") + assertContainsStr(t, out, "Beta.Key2") + assertContainsStr(t, out, "(1 constants)") +} + +func TestShowConstants_Mock_Empty(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConstantsFunc: func() ([]*model.Constant, error) { return nil, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(mkHierarchy())) + assertNoError(t, showConstants(ctx, "")) + assertContainsStr(t, buf.String(), "No constants found") +} + +func TestDescribeConstant_Mock(t *testing.T) { + mod := mkModule("MyModule") + c := mkConstant(mod.ID, "AppURL", "String", "https://example.com") + + h := mkHierarchy(mod) + withContainer(h, c.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConstantsFunc: func() ([]*model.Constant, error) { return []*model.Constant{c}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describeConstant(ctx, ast.QualifiedName{Module: "MyModule", Name: "AppURL"})) + + out := buf.String() + assertContainsStr(t, out, "CREATE OR MODIFY CONSTANT MyModule.AppURL") + assertContainsStr(t, out, "String") +} + +func TestDescribeConstant_Mock_NotFound(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConstantsFunc: func() ([]*model.Constant, error) { return nil, nil }, + } + + ctx, _ := newMockCtx(t, withBackend(mb), withHierarchy(h)) + err := describeConstant(ctx, ast.QualifiedName{Module: "MyModule", Name: "Missing"}) + assertError(t, err) +} diff --git a/mdl/executor/cmd_datatransformer_mock_test.go b/mdl/executor/cmd_datatransformer_mock_test.go new file mode 100644 index 00000000..fdf2998a --- /dev/null +++ b/mdl/executor/cmd_datatransformer_mock_test.go @@ -0,0 +1,60 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" +) + +func TestListDataTransformers_Mock(t *testing.T) { + mod := mkModule("ETL") + dt := &model.DataTransformer{ + BaseElement: model.BaseElement{ID: nextID("dt")}, + ContainerID: mod.ID, + Name: "TransformOrders", + SourceType: "Entity", + } + + h := mkHierarchy(mod) + withContainer(h, dt.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListDataTransformersFunc: func() ([]*model.DataTransformer, error) { return []*model.DataTransformer{dt}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, listDataTransformers(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "Qualified Name") + assertContainsStr(t, out, "ETL.TransformOrders") +} + +func TestDescribeDataTransformer_Mock(t *testing.T) { + mod := mkModule("ETL") + dt := &model.DataTransformer{ + BaseElement: model.BaseElement{ID: nextID("dt")}, + ContainerID: mod.ID, + Name: "TransformOrders", + SourceType: "Entity", + } + + h := mkHierarchy(mod) + withContainer(h, dt.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListDataTransformersFunc: func() ([]*model.DataTransformer, error) { return []*model.DataTransformer{dt}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describeDataTransformer(ctx, ast.QualifiedName{Module: "ETL", Name: "TransformOrders"})) + + out := buf.String() + assertContainsStr(t, out, "CREATE DATA TRANSFORMER") +} diff --git a/mdl/executor/cmd_dbconnection_mock_test.go b/mdl/executor/cmd_dbconnection_mock_test.go new file mode 100644 index 00000000..bfd5460f --- /dev/null +++ b/mdl/executor/cmd_dbconnection_mock_test.go @@ -0,0 +1,60 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" +) + +func TestShowDatabaseConnections_Mock(t *testing.T) { + mod := mkModule("DataMod") + conn := &model.DatabaseConnection{ + BaseElement: model.BaseElement{ID: nextID("dbc")}, + ContainerID: mod.ID, + Name: "MyDB", + DatabaseType: "PostgreSQL", + } + + h := mkHierarchy(mod) + withContainer(h, conn.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListDatabaseConnectionsFunc: func() ([]*model.DatabaseConnection, error) { return []*model.DatabaseConnection{conn}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showDatabaseConnections(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "Qualified Name") + assertContainsStr(t, out, "DataMod.MyDB") +} + +func TestDescribeDatabaseConnection_Mock(t *testing.T) { + mod := mkModule("DataMod") + conn := &model.DatabaseConnection{ + BaseElement: model.BaseElement{ID: nextID("dbc")}, + ContainerID: mod.ID, + Name: "MyDB", + DatabaseType: "PostgreSQL", + } + + h := mkHierarchy(mod) + withContainer(h, conn.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListDatabaseConnectionsFunc: func() ([]*model.DatabaseConnection, error) { return []*model.DatabaseConnection{conn}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describeDatabaseConnection(ctx, ast.QualifiedName{Module: "DataMod", Name: "MyDB"})) + + out := buf.String() + assertContainsStr(t, out, "CREATE DATABASE CONNECTION") +} diff --git a/mdl/executor/cmd_entities_mock_test.go b/mdl/executor/cmd_entities_mock_test.go new file mode 100644 index 00000000..556e1b8a --- /dev/null +++ b/mdl/executor/cmd_entities_mock_test.go @@ -0,0 +1,58 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/domainmodel" +) + +func TestShowEntities_Mock(t *testing.T) { + mod := mkModule("MyModule") + ent1 := mkEntity(mod.ID, "Customer") + ent2 := mkEntity(mod.ID, "Order") + + dm := mkDomainModel(mod.ID, ent1, ent2) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListModulesFunc: func() ([]*model.Module, error) { return []*model.Module{mod}, nil }, + ListDomainModelsFunc: func() ([]*domainmodel.DomainModel, error) { return []*domainmodel.DomainModel{dm}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, showEntities(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "MyModule.Customer") + assertContainsStr(t, out, "MyModule.Order") + assertContainsStr(t, out, "Persistent") + assertContainsStr(t, out, "(2 entities)") +} + +func TestShowEntities_Mock_FilterByModule(t *testing.T) { + mod1 := mkModule("Sales") + mod2 := mkModule("HR") + ent1 := mkEntity(mod1.ID, "Product") + ent2 := mkEntity(mod2.ID, "Employee") + + dm1 := mkDomainModel(mod1.ID, ent1) + dm2 := mkDomainModel(mod2.ID, ent2) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListModulesFunc: func() ([]*model.Module, error) { return []*model.Module{mod1, mod2}, nil }, + ListDomainModelsFunc: func() ([]*domainmodel.DomainModel, error) { return []*domainmodel.DomainModel{dm1, dm2}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, showEntities(ctx, "HR")) + + out := buf.String() + assertNotContainsStr(t, out, "Sales.Product") + assertContainsStr(t, out, "HR.Employee") + assertContainsStr(t, out, "(1 entities)") +} diff --git a/mdl/executor/cmd_enumerations_mock_test.go b/mdl/executor/cmd_enumerations_mock_test.go index 57ef46d4..19f6cc6f 100644 --- a/mdl/executor/cmd_enumerations_mock_test.go +++ b/mdl/executor/cmd_enumerations_mock_test.go @@ -3,129 +3,97 @@ package executor import ( - "bytes" - "context" - "strings" "testing" + "github.com/mendixlabs/mxcli/mdl/ast" "github.com/mendixlabs/mxcli/mdl/backend/mock" "github.com/mendixlabs/mxcli/model" ) -// TestShowEnumerations_Mock demonstrates testing a handler with a MockBackend -// instead of a real .mpr file. The handler under test is showEnumerations, -// which calls ctx.Backend.ListEnumerations() and writes a table to ctx.Output. func TestShowEnumerations_Mock(t *testing.T) { - modID := model.ID("mod-1") - enumID := model.ID("enum-1") + mod := mkModule("MyModule") + enum := mkEnumeration(mod.ID, "Color", "Red", "Green", "Blue") - mb := &mock.MockBackend{ - IsConnectedFunc: func() bool { return true }, - ListEnumerationsFunc: func() ([]*model.Enumeration, error) { - return []*model.Enumeration{ - { - BaseElement: model.BaseElement{ID: enumID}, - ContainerID: modID, - Name: "Color", - Values: []model.EnumerationValue{ - {Name: "Red"}, - {Name: "Green"}, - {Name: "Blue"}, - }, - }, - }, nil - }, - } + h := mkHierarchy(mod) + withContainer(h, enum.ContainerID, mod.ID) - // Pre-populate hierarchy so getHierarchy skips the e.reader path. - hierarchy := &ContainerHierarchy{ - moduleIDs: map[model.ID]bool{modID: true}, - moduleNames: map[model.ID]string{modID: "MyModule"}, - containerParent: map[model.ID]model.ID{enumID: modID}, - folderNames: map[model.ID]string{}, - } - - var buf bytes.Buffer - ctx := &ExecContext{ - Context: context.Background(), - Backend: mb, - Output: &buf, - Format: FormatTable, - Cache: &executorCache{hierarchy: hierarchy}, + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListEnumerationsFunc: func() ([]*model.Enumeration, error) { return []*model.Enumeration{enum}, nil }, } - if err := showEnumerations(ctx, ""); err != nil { - t.Fatalf("showEnumerations returned error: %v", err) - } + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showEnumerations(ctx, "")) out := buf.String() - - // Verify table contains our enumeration data. - if !strings.Contains(out, "MyModule.Color") { - t.Errorf("expected qualified name 'MyModule.Color' in output, got:\n%s", out) - } - if !strings.Contains(out, "3") { - t.Errorf("expected value count '3' in output, got:\n%s", out) - } - if !strings.Contains(out, "(1 enumerations)") { - t.Errorf("expected summary '(1 enumerations)' in output, got:\n%s", out) - } + assertContainsStr(t, out, "MyModule.Color") + assertContainsStr(t, out, "| 3") + assertContainsStr(t, out, "(1 enumerations)") } -// TestShowEnumerations_Mock_FilterByModule verifies that passing a module name -// filters the output to only that module's enumerations. func TestShowEnumerations_Mock_FilterByModule(t *testing.T) { - mod1 := model.ID("mod-1") - mod2 := model.ID("mod-2") + mod1 := mkModule("Alpha") + mod2 := mkModule("Beta") + e1 := mkEnumeration(mod1.ID, "Color", "Red") + e2 := mkEnumeration(mod2.ID, "Size", "S", "M") + + h := mkHierarchy(mod1, mod2) + withContainer(h, e1.ContainerID, mod1.ID) + withContainer(h, e2.ContainerID, mod2.ID) mb := &mock.MockBackend{ - IsConnectedFunc: func() bool { return true }, - ListEnumerationsFunc: func() ([]*model.Enumeration, error) { - return []*model.Enumeration{ - { - BaseElement: model.BaseElement{ID: model.ID("e1")}, - ContainerID: mod1, - Name: "Color", - Values: []model.EnumerationValue{{Name: "Red"}}, - }, - { - BaseElement: model.BaseElement{ID: model.ID("e2")}, - ContainerID: mod2, - Name: "Size", - Values: []model.EnumerationValue{{Name: "S"}, {Name: "M"}}, - }, - }, nil - }, + IsConnectedFunc: func() bool { return true }, + ListEnumerationsFunc: func() ([]*model.Enumeration, error) { return []*model.Enumeration{e1, e2}, nil }, } - hierarchy := &ContainerHierarchy{ - moduleIDs: map[model.ID]bool{mod1: true, mod2: true}, - moduleNames: map[model.ID]string{mod1: "Alpha", mod2: "Beta"}, - containerParent: map[model.ID]model.ID{}, - folderNames: map[model.ID]string{}, - } + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showEnumerations(ctx, "Beta")) - var buf bytes.Buffer - ctx := &ExecContext{ - Context: context.Background(), - Backend: mb, - Output: &buf, - Format: FormatTable, - Cache: &executorCache{hierarchy: hierarchy}, + out := buf.String() + assertNotContainsStr(t, out, "Alpha.Color") + assertContainsStr(t, out, "Beta.Size") + assertContainsStr(t, out, "(1 enumerations)") +} + +func TestDescribeEnumeration_Mock(t *testing.T) { + mod := mkModule("MyModule") + enum := &model.Enumeration{ + BaseElement: model.BaseElement{ID: nextID("enum")}, + ContainerID: mod.ID, + Name: "Status", + Values: []model.EnumerationValue{ + {BaseElement: model.BaseElement{ID: nextID("ev")}, Name: "Active", Caption: &model.Text{Translations: map[string]string{"en_US": "Active"}}}, + {BaseElement: model.BaseElement{ID: nextID("ev")}, Name: "Inactive", Caption: &model.Text{Translations: map[string]string{"en_US": "Inactive"}}}, + }, } - if err := showEnumerations(ctx, "Beta"); err != nil { - t.Fatalf("showEnumerations returned error: %v", err) + h := mkHierarchy(mod) + withContainer(h, enum.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListEnumerationsFunc: func() ([]*model.Enumeration, error) { return []*model.Enumeration{enum}, nil }, } + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describeEnumeration(ctx, ast.QualifiedName{Module: "MyModule", Name: "Status"})) + out := buf.String() - if strings.Contains(out, "Alpha.Color") { - t.Errorf("should not contain Alpha.Color when filtering by Beta:\n%s", out) - } - if !strings.Contains(out, "Beta.Size") { - t.Errorf("expected Beta.Size in output:\n%s", out) - } - if !strings.Contains(out, "(1 enumerations)") { - t.Errorf("expected 1 enumeration in summary:\n%s", out) + assertContainsStr(t, out, "CREATE OR MODIFY ENUMERATION MyModule.Status") + assertContainsStr(t, out, "Active") + assertContainsStr(t, out, "Inactive") +} + +func TestDescribeEnumeration_Mock_NotFound(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListEnumerationsFunc: func() ([]*model.Enumeration, error) { return nil, nil }, } + + ctx, _ := newMockCtx(t, withBackend(mb), withHierarchy(h)) + err := describeEnumeration(ctx, ast.QualifiedName{Module: "MyModule", Name: "Missing"}) + assertError(t, err) } diff --git a/mdl/executor/cmd_error_mock_test.go b/mdl/executor/cmd_error_mock_test.go new file mode 100644 index 00000000..d60fa2db --- /dev/null +++ b/mdl/executor/cmd_error_mock_test.go @@ -0,0 +1,474 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "fmt" + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/agenteditor" + "github.com/mendixlabs/mxcli/sdk/microflows" + "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/sdk/pages" + "github.com/mendixlabs/mxcli/sdk/security" + "github.com/mendixlabs/mxcli/sdk/workflows" +) + +// errBackend is a sentinel used in backend-error tests. +var errBackend = fmt.Errorf("backend failure") + +func TestShowEnumerations_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListEnumerationsFunc: func() ([]*model.Enumeration, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showEnumerations(ctx, "")) +} + +func TestShowConstants_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConstantsFunc: func() ([]*model.Constant, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showConstants(ctx, "")) +} + +func TestShowMicroflows_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListMicroflowsFunc: func() ([]*microflows.Microflow, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showMicroflows(ctx, "")) +} + +func TestShowNanoflows_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListNanoflowsFunc: func() ([]*microflows.Nanoflow, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showNanoflows(ctx, "")) +} + +func TestShowPages_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListPagesFunc: func() ([]*pages.Page, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showPages(ctx, "")) +} + +func TestShowSnippets_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListSnippetsFunc: func() ([]*pages.Snippet, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showSnippets(ctx, "")) +} + +func TestShowLayouts_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListLayoutsFunc: func() ([]*pages.Layout, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showLayouts(ctx, "")) +} + +func TestShowWorkflows_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListWorkflowsFunc: func() ([]*workflows.Workflow, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showWorkflows(ctx, "")) +} + +func TestShowODataClients_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConsumedODataServicesFunc: func() ([]*model.ConsumedODataService, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showODataClients(ctx, "")) +} + +func TestShowODataServices_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListPublishedODataServicesFunc: func() ([]*model.PublishedODataService, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showODataServices(ctx, "")) +} + +func TestShowRestClients_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConsumedRestServicesFunc: func() ([]*model.ConsumedRestService, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showRestClients(ctx, "")) +} + +func TestShowPublishedRestServices_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListPublishedRestServicesFunc: func() ([]*model.PublishedRestService, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showPublishedRestServices(ctx, "")) +} + +func TestShowJavaActions_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListJavaActionsFunc: func() ([]*mpr.JavaAction, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showJavaActions(ctx, "")) +} + +func TestShowJavaScriptActions_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListJavaScriptActionsFunc: func() ([]*mpr.JavaScriptAction, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showJavaScriptActions(ctx, "")) +} + +func TestShowDatabaseConnections_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListDatabaseConnectionsFunc: func() ([]*model.DatabaseConnection, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showDatabaseConnections(ctx, "")) +} + +func TestShowImageCollections_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListImageCollectionsFunc: func() ([]*mpr.ImageCollection, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showImageCollections(ctx, "")) +} + +func TestShowJsonStructures_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListJsonStructuresFunc: func() ([]*mpr.JsonStructure, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showJsonStructures(ctx, "")) +} + +func TestShowNavigation_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetNavigationFunc: func() (*mpr.NavigationDocument, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showNavigation(ctx)) +} + +func TestShowProjectSecurity_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetProjectSecurityFunc: func() (*security.ProjectSecurity, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showProjectSecurity(ctx)) +} + +func TestShowModuleRoles_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListModuleSecurityFunc: func() ([]*security.ModuleSecurity, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showModuleRoles(ctx, "")) +} + +func TestShowUserRoles_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetProjectSecurityFunc: func() (*security.ProjectSecurity, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showUserRoles(ctx)) +} + +func TestShowDemoUsers_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetProjectSecurityFunc: func() (*security.ProjectSecurity, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showDemoUsers(ctx)) +} + +func TestShowBusinessEventServices_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListBusinessEventServicesFunc: func() ([]*model.BusinessEventService, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showBusinessEventServices(ctx, "")) +} + +func TestShowAgentEditorModels_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListAgentEditorModelsFunc: func() ([]*agenteditor.Model, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showAgentEditorModels(ctx, "")) +} + +func TestShowAgentEditorAgents_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListAgentEditorAgentsFunc: func() ([]*agenteditor.Agent, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showAgentEditorAgents(ctx, "")) +} + +func TestShowAgentEditorKnowledgeBases_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListAgentEditorKnowledgeBasesFunc: func() ([]*agenteditor.KnowledgeBase, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showAgentEditorKnowledgeBases(ctx, "")) +} + +func TestShowAgentEditorMCPServices_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListAgentEditorConsumedMCPServicesFunc: func() ([]*agenteditor.ConsumedMCPService, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showAgentEditorConsumedMCPServices(ctx, "")) +} + +func TestListDataTransformers_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListDataTransformersFunc: func() ([]*model.DataTransformer, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, listDataTransformers(ctx, "")) +} + +func TestShowExportMappings_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListExportMappingsFunc: func() ([]*model.ExportMapping, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showExportMappings(ctx, "")) +} + +func TestShowImportMappings_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListImportMappingsFunc: func() ([]*model.ImportMapping, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showImportMappings(ctx, "")) +} + +func TestShowSettings_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetProjectSettingsFunc: func() (*model.ProjectSettings, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, showSettings(ctx)) +} + +// Describe handler backend errors + +func TestDescribeEnumeration_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListEnumerationsFunc: func() ([]*model.Enumeration, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, describeEnumeration(ctx, ast.QualifiedName{Module: "M", Name: "E"})) +} + +func TestDescribeConstant_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConstantsFunc: func() ([]*model.Constant, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, describeConstant(ctx, ast.QualifiedName{Module: "M", Name: "C"})) +} + +func TestDescribeMicroflow_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListMicroflowsFunc: func() ([]*microflows.Microflow, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, describeMicroflow(ctx, ast.QualifiedName{Module: "M", Name: "F"})) +} + +func TestDescribeWorkflow_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListWorkflowsFunc: func() ([]*workflows.Workflow, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, describeWorkflow(ctx, ast.QualifiedName{Module: "M", Name: "W"})) +} + +func TestDescribeNavigation_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetNavigationFunc: func() (*mpr.NavigationDocument, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, describeNavigation(ctx, ast.QualifiedName{Module: "M", Name: "N"})) +} + +func TestDescribeODataClient_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConsumedODataServicesFunc: func() ([]*model.ConsumedODataService, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, describeODataClient(ctx, ast.QualifiedName{Module: "M", Name: "C"})) +} + +func TestDescribeODataService_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListPublishedODataServicesFunc: func() ([]*model.PublishedODataService, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, describeODataService(ctx, ast.QualifiedName{Module: "M", Name: "S"})) +} + +func TestDescribeRestClient_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConsumedRestServicesFunc: func() ([]*model.ConsumedRestService, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, describeRestClient(ctx, ast.QualifiedName{Module: "M", Name: "R"})) +} + +func TestDescribeImageCollection_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListImageCollectionsFunc: func() ([]*mpr.ImageCollection, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, describeImageCollection(ctx, ast.QualifiedName{Module: "M", Name: "I"})) +} + +func TestDescribeDatabaseConnection_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListDatabaseConnectionsFunc: func() ([]*model.DatabaseConnection, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, describeDatabaseConnection(ctx, ast.QualifiedName{Module: "M", Name: "D"})) +} + +func TestDescribeModuleRole_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListModuleSecurityFunc: func() ([]*security.ModuleSecurity, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, describeModuleRole(ctx, ast.QualifiedName{Module: "M", Name: "R"})) +} + +func TestDescribeUserRole_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetProjectSecurityFunc: func() (*security.ProjectSecurity, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, describeUserRole(ctx, ast.QualifiedName{Module: "", Name: "Admin"})) +} + +func TestDescribeDemoUser_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetProjectSecurityFunc: func() (*security.ProjectSecurity, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, describeDemoUser(ctx, "demo")) +} + +// Write handler backend errors + +func TestExecCreateModule_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListModulesFunc: func() ([]*model.Module, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, execCreateModule(ctx, &ast.CreateModuleStmt{Name: "M"})) +} + +func TestExecCreateEnumeration_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListEnumerationsFunc: func() ([]*model.Enumeration, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, execCreateEnumeration(ctx, &ast.CreateEnumerationStmt{ + Name: ast.QualifiedName{Module: "M", Name: "E"}, + })) +} + +func TestExecDropMicroflow_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListMicroflowsFunc: func() ([]*microflows.Microflow, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, execDropMicroflow(ctx, &ast.DropMicroflowStmt{ + Name: ast.QualifiedName{Module: "M", Name: "F"}, + })) +} + +func TestExecDropPage_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListPagesFunc: func() ([]*pages.Page, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, execDropPage(ctx, &ast.DropPageStmt{ + Name: ast.QualifiedName{Module: "M", Name: "P"}, + })) +} + +func TestExecDropSnippet_Mock_BackendError(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListSnippetsFunc: func() ([]*pages.Snippet, error) { return nil, errBackend }, + } + ctx, _ := newMockCtx(t, withBackend(mb)) + assertError(t, execDropSnippet(ctx, &ast.DropSnippetStmt{ + Name: ast.QualifiedName{Module: "M", Name: "S"}, + })) +} diff --git a/mdl/executor/cmd_export_mappings_mock_test.go b/mdl/executor/cmd_export_mappings_mock_test.go new file mode 100644 index 00000000..3746286d --- /dev/null +++ b/mdl/executor/cmd_export_mappings_mock_test.go @@ -0,0 +1,34 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" +) + +func TestShowExportMappings_Mock(t *testing.T) { + mod := mkModule("Integration") + em := &model.ExportMapping{ + BaseElement: model.BaseElement{ID: nextID("em")}, + ContainerID: mod.ID, + Name: "ExportOrders", + } + + h := mkHierarchy(mod) + withContainer(h, em.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListExportMappingsFunc: func() ([]*model.ExportMapping, error) { return []*model.ExportMapping{em}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showExportMappings(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "Export Mapping") + assertContainsStr(t, out, "Integration.ExportOrders") +} diff --git a/mdl/executor/cmd_fragments_mock_test.go b/mdl/executor/cmd_fragments_mock_test.go new file mode 100644 index 00000000..c759a4e3 --- /dev/null +++ b/mdl/executor/cmd_fragments_mock_test.go @@ -0,0 +1,31 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" +) + +func TestShowFragments_Mock(t *testing.T) { + ctx, buf := newMockCtx(t) + ctx.Fragments = map[string]*ast.DefineFragmentStmt{ + "myFrag": {Name: "myFrag"}, + } + + assertNoError(t, showFragments(ctx)) + + out := buf.String() + assertContainsStr(t, out, "myFrag") +} + +func TestShowFragments_Empty_Mock(t *testing.T) { + ctx, buf := newMockCtx(t) + ctx.Fragments = map[string]*ast.DefineFragmentStmt{} + + assertNoError(t, showFragments(ctx)) + + out := buf.String() + assertContainsStr(t, out, "No fragments defined.") +} diff --git a/mdl/executor/cmd_imagecollections_mock_test.go b/mdl/executor/cmd_imagecollections_mock_test.go new file mode 100644 index 00000000..9b5f7548 --- /dev/null +++ b/mdl/executor/cmd_imagecollections_mock_test.go @@ -0,0 +1,61 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/mpr" +) + +func TestShowImageCollections_Mock(t *testing.T) { + mod := mkModule("Icons") + ic := &mpr.ImageCollection{ + BaseElement: model.BaseElement{ID: nextID("ic")}, + ContainerID: mod.ID, + Name: "AppIcons", + ExportLevel: "Hidden", + } + + h := mkHierarchy(mod) + withContainer(h, ic.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListImageCollectionsFunc: func() ([]*mpr.ImageCollection, error) { return []*mpr.ImageCollection{ic}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showImageCollections(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "Image Collection") + assertContainsStr(t, out, "Icons.AppIcons") +} + +func TestDescribeImageCollection_Mock(t *testing.T) { + mod := mkModule("Icons") + ic := &mpr.ImageCollection{ + BaseElement: model.BaseElement{ID: nextID("ic")}, + ContainerID: mod.ID, + Name: "AppIcons", + ExportLevel: "Hidden", + } + + h := mkHierarchy(mod) + withContainer(h, ic.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListImageCollectionsFunc: func() ([]*mpr.ImageCollection, error) { return []*mpr.ImageCollection{ic}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describeImageCollection(ctx, ast.QualifiedName{Module: "Icons", Name: "AppIcons"})) + + out := buf.String() + assertContainsStr(t, out, "CREATE OR REPLACE IMAGE COLLECTION") +} diff --git a/mdl/executor/cmd_import_mappings_mock_test.go b/mdl/executor/cmd_import_mappings_mock_test.go new file mode 100644 index 00000000..1d9eeabd --- /dev/null +++ b/mdl/executor/cmd_import_mappings_mock_test.go @@ -0,0 +1,34 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" +) + +func TestShowImportMappings_Mock(t *testing.T) { + mod := mkModule("Integration") + im := &model.ImportMapping{ + BaseElement: model.BaseElement{ID: nextID("im")}, + ContainerID: mod.ID, + Name: "ImportOrders", + } + + h := mkHierarchy(mod) + withContainer(h, im.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListImportMappingsFunc: func() ([]*model.ImportMapping, error) { return []*model.ImportMapping{im}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showImportMappings(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "Import Mapping") + assertContainsStr(t, out, "Integration.ImportOrders") +} diff --git a/mdl/executor/cmd_javaactions_mock_test.go b/mdl/executor/cmd_javaactions_mock_test.go new file mode 100644 index 00000000..bd331139 --- /dev/null +++ b/mdl/executor/cmd_javaactions_mock_test.go @@ -0,0 +1,58 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/javaactions" + "github.com/mendixlabs/mxcli/sdk/mpr" +) + +func TestShowJavaActions_Mock(t *testing.T) { + mod := mkModule("MyModule") + ja := &mpr.JavaAction{ + BaseElement: model.BaseElement{ID: nextID("ja")}, + ContainerID: mod.ID, + Name: "DoSomething", + } + + h := mkHierarchy(mod) + withContainer(h, ja.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListJavaActionsFunc: func() ([]*mpr.JavaAction, error) { return []*mpr.JavaAction{ja}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showJavaActions(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "Qualified Name") + assertContainsStr(t, out, "MyModule.DoSomething") +} + +func TestDescribeJavaAction_Mock(t *testing.T) { + mod := mkModule("MyModule") + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ReadJavaActionByNameFunc: func(qn string) (*javaactions.JavaAction, error) { + return &javaactions.JavaAction{ + BaseElement: model.BaseElement{ID: nextID("ja")}, + ContainerID: mod.ID, + Name: "DoSomething", + }, nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, describeJavaAction(ctx, ast.QualifiedName{Module: "MyModule", Name: "DoSomething"})) + + out := buf.String() + assertContainsStr(t, out, "CREATE JAVA ACTION") +} diff --git a/mdl/executor/cmd_javascript_actions_mock_test.go b/mdl/executor/cmd_javascript_actions_mock_test.go new file mode 100644 index 00000000..dd47c049 --- /dev/null +++ b/mdl/executor/cmd_javascript_actions_mock_test.go @@ -0,0 +1,59 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/mpr" +) + +func TestShowJavaScriptActions_Mock(t *testing.T) { + mod := mkModule("WebMod") + jsa := &mpr.JavaScriptAction{ + BaseElement: model.BaseElement{ID: nextID("jsa")}, + ContainerID: mod.ID, + Name: "ShowAlert", + Platform: "Web", + } + + h := mkHierarchy(mod) + withContainer(h, jsa.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListJavaScriptActionsFunc: func() ([]*mpr.JavaScriptAction, error) { return []*mpr.JavaScriptAction{jsa}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showJavaScriptActions(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "Qualified Name") + assertContainsStr(t, out, "WebMod.ShowAlert") +} + +func TestDescribeJavaScriptAction_Mock(t *testing.T) { + mod := mkModule("WebMod") + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ReadJavaScriptActionByNameFunc: func(qn string) (*mpr.JavaScriptAction, error) { + return &mpr.JavaScriptAction{ + BaseElement: model.BaseElement{ID: nextID("jsa")}, + ContainerID: mod.ID, + Name: "ShowAlert", + Platform: "Web", + }, nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, describeJavaScriptAction(ctx, ast.QualifiedName{Module: "WebMod", Name: "ShowAlert"})) + + out := buf.String() + assertContainsStr(t, out, "CREATE JAVASCRIPT ACTION") +} diff --git a/mdl/executor/cmd_json_mock_test.go b/mdl/executor/cmd_json_mock_test.go new file mode 100644 index 00000000..9986fe53 --- /dev/null +++ b/mdl/executor/cmd_json_mock_test.go @@ -0,0 +1,528 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/agenteditor" + "github.com/mendixlabs/mxcli/sdk/microflows" + "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/sdk/pages" + "github.com/mendixlabs/mxcli/sdk/security" + "github.com/mendixlabs/mxcli/sdk/workflows" +) + +func TestShowEnumerations_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + enum := mkEnumeration(mod.ID, "Status", "Active", "Inactive") + withContainer(h, enum.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListEnumerationsFunc: func() ([]*model.Enumeration, error) { return []*model.Enumeration{enum}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showEnumerations(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "Status") +} + +func TestShowConstants_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + c := mkConstant(mod.ID, "Timeout", "Integer", "30") + withContainer(h, c.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConstantsFunc: func() ([]*model.Constant, error) { return []*model.Constant{c}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showConstants(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "Timeout") +} + +func TestShowMicroflows_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + mf := mkMicroflow(mod.ID, "ACT_DoStuff") + withContainer(h, mf.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListMicroflowsFunc: func() ([]*microflows.Microflow, error) { return []*microflows.Microflow{mf}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showMicroflows(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "ACT_DoStuff") +} + +func TestShowNanoflows_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + nf := mkNanoflow(mod.ID, "NF_Validate") + withContainer(h, nf.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListNanoflowsFunc: func() ([]*microflows.Nanoflow, error) { return []*microflows.Nanoflow{nf}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showNanoflows(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "NF_Validate") +} + +func TestShowPages_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + pg := mkPage(mod.ID, "Page_Home") + withContainer(h, pg.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListPagesFunc: func() ([]*pages.Page, error) { return []*pages.Page{pg}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showPages(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "Page_Home") +} + +func TestShowSnippets_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + snp := mkSnippet(mod.ID, "Snippet_Header") + withContainer(h, snp.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListSnippetsFunc: func() ([]*pages.Snippet, error) { return []*pages.Snippet{snp}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showSnippets(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "Snippet_Header") +} + +func TestShowLayouts_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + lay := mkLayout(mod.ID, "Layout_Main") + withContainer(h, lay.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListLayoutsFunc: func() ([]*pages.Layout, error) { return []*pages.Layout{lay}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showLayouts(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "Layout_Main") +} + +func TestShowWorkflows_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + wf := mkWorkflow(mod.ID, "WF_Approve") + withContainer(h, wf.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListWorkflowsFunc: func() ([]*workflows.Workflow, error) { return []*workflows.Workflow{wf}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showWorkflows(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "WF_Approve") +} + +func TestShowODataClients_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + svc := &model.ConsumedODataService{ + BaseElement: model.BaseElement{ID: nextID("cos")}, + ContainerID: mod.ID, + Name: "ExtService", + } + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConsumedODataServicesFunc: func() ([]*model.ConsumedODataService, error) { return []*model.ConsumedODataService{svc}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showODataClients(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "ExtService") +} + +func TestShowODataServices_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + svc := &model.PublishedODataService{ + BaseElement: model.BaseElement{ID: nextID("pos")}, + ContainerID: mod.ID, + Name: "PubOData", + } + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListPublishedODataServicesFunc: func() ([]*model.PublishedODataService, error) { return []*model.PublishedODataService{svc}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showODataServices(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "PubOData") +} + +func TestShowRestClients_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + svc := &model.ConsumedRestService{ + BaseElement: model.BaseElement{ID: nextID("crs")}, + ContainerID: mod.ID, + Name: "RestClient1", + } + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConsumedRestServicesFunc: func() ([]*model.ConsumedRestService, error) { return []*model.ConsumedRestService{svc}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showRestClients(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "RestClient1") +} + +func TestShowPublishedRestServices_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + svc := &model.PublishedRestService{ + BaseElement: model.BaseElement{ID: nextID("prs")}, + ContainerID: mod.ID, + Name: "PubRest1", + } + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListPublishedRestServicesFunc: func() ([]*model.PublishedRestService, error) { return []*model.PublishedRestService{svc}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showPublishedRestServices(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "PubRest1") +} + +func TestShowJavaActions_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + ja := &mpr.JavaAction{ + BaseElement: model.BaseElement{ID: nextID("ja")}, + ContainerID: mod.ID, + Name: "MyJavaAction", + } + withContainer(h, ja.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListJavaActionsFunc: func() ([]*mpr.JavaAction, error) { return []*mpr.JavaAction{ja}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showJavaActions(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "MyJavaAction") +} + +func TestShowJavaScriptActions_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + jsa := &mpr.JavaScriptAction{ + BaseElement: model.BaseElement{ID: nextID("jsa")}, + ContainerID: mod.ID, + Name: "MyJSAction", + } + withContainer(h, jsa.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListJavaScriptActionsFunc: func() ([]*mpr.JavaScriptAction, error) { return []*mpr.JavaScriptAction{jsa}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showJavaScriptActions(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "MyJSAction") +} + +func TestShowDatabaseConnections_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + dc := &model.DatabaseConnection{ + BaseElement: model.BaseElement{ID: nextID("dc")}, + ContainerID: mod.ID, + Name: "MyDB", + } + withContainer(h, dc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListDatabaseConnectionsFunc: func() ([]*model.DatabaseConnection, error) { return []*model.DatabaseConnection{dc}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showDatabaseConnections(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "MyDB") +} + +func TestShowImageCollections_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + ic := &mpr.ImageCollection{ + BaseElement: model.BaseElement{ID: nextID("ic")}, + ContainerID: mod.ID, + Name: "Icons", + } + withContainer(h, ic.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListImageCollectionsFunc: func() ([]*mpr.ImageCollection, error) { return []*mpr.ImageCollection{ic}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showImageCollections(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "Icons") +} + +func TestShowJsonStructures_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + js := &mpr.JsonStructure{ + BaseElement: model.BaseElement{ID: nextID("js")}, + ContainerID: mod.ID, + Name: "OrderSchema", + } + withContainer(h, js.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListJsonStructuresFunc: func() ([]*mpr.JsonStructure, error) { return []*mpr.JsonStructure{js}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showJsonStructures(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "OrderSchema") +} + +func TestShowUserRoles_Mock_JSON(t *testing.T) { + ps := &security.ProjectSecurity{ + BaseElement: model.BaseElement{ID: nextID("ps")}, + UserRoles: []*security.UserRole{ + {Name: "Administrator"}, + }, + } + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetProjectSecurityFunc: func() (*security.ProjectSecurity, error) { return ps, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON)) + assertNoError(t, showUserRoles(ctx)) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "Administrator") +} + +func TestShowModuleRoles_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + ms := &security.ModuleSecurity{ + BaseElement: model.BaseElement{ID: nextID("ms")}, + ContainerID: mod.ID, + ModuleRoles: []*security.ModuleRole{ + {Name: "User"}, + }, + } + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListModuleSecurityFunc: func() ([]*security.ModuleSecurity, error) { return []*security.ModuleSecurity{ms}, nil }, + } + + h := mkHierarchy(mod) + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showModuleRoles(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "User") +} + +func TestShowDemoUsers_Mock_JSON(t *testing.T) { + ps := &security.ProjectSecurity{ + BaseElement: model.BaseElement{ID: nextID("ps")}, + EnableDemoUsers: true, + DemoUsers: []*security.DemoUser{ + {UserName: "demo_admin"}, + }, + } + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetProjectSecurityFunc: func() (*security.ProjectSecurity, error) { return ps, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON)) + assertNoError(t, showDemoUsers(ctx)) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "demo_admin") +} + +func TestShowBusinessEventServices_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + svc := &model.BusinessEventService{ + BaseElement: model.BaseElement{ID: nextID("bes")}, + ContainerID: mod.ID, + Name: "OrderEvents", + } + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListBusinessEventServicesFunc: func() ([]*model.BusinessEventService, error) { return []*model.BusinessEventService{svc}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showBusinessEventServices(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "OrderEvents") +} + +func TestShowAgentEditorModels_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + m1 := &agenteditor.Model{ + BaseElement: model.BaseElement{ID: nextID("aem")}, + ContainerID: mod.ID, + Name: "GPT4o", + } + withContainer(h, m1.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListAgentEditorModelsFunc: func() ([]*agenteditor.Model, error) { return []*agenteditor.Model{m1}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showAgentEditorModels(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "GPT4o") +} + +func TestShowAgentEditorAgents_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + a1 := &agenteditor.Agent{ + BaseElement: model.BaseElement{ID: nextID("aea")}, + ContainerID: mod.ID, + Name: "Helper", + } + withContainer(h, a1.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListAgentEditorAgentsFunc: func() ([]*agenteditor.Agent, error) { return []*agenteditor.Agent{a1}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showAgentEditorAgents(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "Helper") +} + +func TestShowAgentEditorKnowledgeBases_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + kb := &agenteditor.KnowledgeBase{ + BaseElement: model.BaseElement{ID: nextID("aek")}, + ContainerID: mod.ID, + Name: "FAQ", + } + withContainer(h, kb.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListAgentEditorKnowledgeBasesFunc: func() ([]*agenteditor.KnowledgeBase, error) { return []*agenteditor.KnowledgeBase{kb}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showAgentEditorKnowledgeBases(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "FAQ") +} + +func TestShowAgentEditorMCPServices_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + svc := &agenteditor.ConsumedMCPService{ + BaseElement: model.BaseElement{ID: nextID("aes")}, + ContainerID: mod.ID, + Name: "ToolSvc", + } + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListAgentEditorConsumedMCPServicesFunc: func() ([]*agenteditor.ConsumedMCPService, error) { return []*agenteditor.ConsumedMCPService{svc}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, showAgentEditorConsumedMCPServices(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "ToolSvc") +} + +func TestListDataTransformers_Mock_JSON(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + dt := &model.DataTransformer{ + BaseElement: model.BaseElement{ID: nextID("dt")}, + ContainerID: mod.ID, + Name: "Transform1", + } + withContainer(h, dt.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListDataTransformersFunc: func() ([]*model.DataTransformer, error) { return []*model.DataTransformer{dt}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) + assertNoError(t, listDataTransformers(ctx, "")) + assertValidJSON(t, buf.String()) + assertContainsStr(t, buf.String(), "Transform1") +} diff --git a/mdl/executor/cmd_jsonstructures_mock_test.go b/mdl/executor/cmd_jsonstructures_mock_test.go new file mode 100644 index 00000000..44409735 --- /dev/null +++ b/mdl/executor/cmd_jsonstructures_mock_test.go @@ -0,0 +1,35 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/mpr" +) + +func TestShowJsonStructures_Mock(t *testing.T) { + mod := mkModule("API") + js := &mpr.JsonStructure{ + BaseElement: model.BaseElement{ID: nextID("js")}, + ContainerID: mod.ID, + Name: "OrderSchema", + } + + h := mkHierarchy(mod) + withContainer(h, js.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListJsonStructuresFunc: func() ([]*mpr.JsonStructure, error) { return []*mpr.JsonStructure{js}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showJsonStructures(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "JSON Structure") + assertContainsStr(t, out, "API.OrderSchema") +} diff --git a/mdl/executor/cmd_mermaid_mock_test.go b/mdl/executor/cmd_mermaid_mock_test.go new file mode 100644 index 00000000..cb0af0d8 --- /dev/null +++ b/mdl/executor/cmd_mermaid_mock_test.go @@ -0,0 +1,52 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/domainmodel" +) + +func TestDescribeMermaid_DomainModel_Mock(t *testing.T) { + mod := mkModule("MyModule") + + // Build domain model first to get its ID, then create entities with dm as container. + dm := &domainmodel.DomainModel{ + BaseElement: model.BaseElement{ID: nextID("dm")}, + ContainerID: mod.ID, + } + ent1 := mkEntity(dm.ID, "Customer") + ent2 := mkEntity(dm.ID, "Order") + dm.Entities = []*domainmodel.Entity{ent1, ent2} + dm.Associations = []*domainmodel.Association{ + mkAssociation(mod.ID, "Order_Customer", ent2.ID, ent1.ID), + } + + // Hierarchy: entity.ContainerID (dm.ID) -> mod.ID (module) + // Entities are contained by the domain model; the domain model is contained by the module. + h := mkHierarchy(mod) + withContainer(h, dm.ID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListModulesFunc: func() ([]*model.Module, error) { return []*model.Module{mod}, nil }, + GetDomainModelFunc: func(moduleID model.ID) (*domainmodel.DomainModel, error) { + return dm, nil + }, + ListDomainModelsFunc: func() ([]*domainmodel.DomainModel, error) { + return []*domainmodel.DomainModel{dm}, nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describeMermaid(ctx, "DOMAINMODEL", "MyModule")) + + out := buf.String() + assertContainsStr(t, out, "erDiagram") + assertContainsStr(t, out, "Customer") + assertContainsStr(t, out, "Order") + assertContainsStr(t, out, "Order_Customer") +} diff --git a/mdl/executor/cmd_microflows_mock_test.go b/mdl/executor/cmd_microflows_mock_test.go new file mode 100644 index 00000000..89eef61b --- /dev/null +++ b/mdl/executor/cmd_microflows_mock_test.go @@ -0,0 +1,113 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/domainmodel" + "github.com/mendixlabs/mxcli/sdk/microflows" +) + +func TestShowMicroflows_Mock(t *testing.T) { + mod := mkModule("MyModule") + mf := mkMicroflow(mod.ID, "ACT_CreateOrder") + + h := mkHierarchy(mod) + withContainer(h, mf.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListMicroflowsFunc: func() ([]*microflows.Microflow, error) { return []*microflows.Microflow{mf}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showMicroflows(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "MyModule.ACT_CreateOrder") + assertContainsStr(t, out, "(1 microflows)") +} + +func TestShowMicroflows_Mock_FilterByModule(t *testing.T) { + mod1 := mkModule("Sales") + mod2 := mkModule("HR") + mf1 := mkMicroflow(mod1.ID, "ACT_Sell") + mf2 := mkMicroflow(mod2.ID, "ACT_Hire") + + h := mkHierarchy(mod1, mod2) + withContainer(h, mf1.ContainerID, mod1.ID) + withContainer(h, mf2.ContainerID, mod2.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListMicroflowsFunc: func() ([]*microflows.Microflow, error) { return []*microflows.Microflow{mf1, mf2}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showMicroflows(ctx, "HR")) + + out := buf.String() + assertNotContainsStr(t, out, "Sales.ACT_Sell") + assertContainsStr(t, out, "HR.ACT_Hire") +} + +func TestShowNanoflows_Mock(t *testing.T) { + mod := mkModule("MyModule") + nf := mkNanoflow(mod.ID, "NF_Validate") + + h := mkHierarchy(mod) + withContainer(h, nf.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListNanoflowsFunc: func() ([]*microflows.Nanoflow, error) { return []*microflows.Nanoflow{nf}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showNanoflows(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "MyModule.NF_Validate") + assertContainsStr(t, out, "(1 nanoflows)") +} + +func TestDescribeMicroflow_Mock_Minimal(t *testing.T) { + mod := mkModule("MyModule") + mf := mkMicroflow(mod.ID, "ACT_DoSomething") + + h := mkHierarchy(mod) + withContainer(h, mf.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListMicroflowsFunc: func() ([]*microflows.Microflow, error) { return []*microflows.Microflow{mf}, nil }, + ListDomainModelsFunc: func() ([]*domainmodel.DomainModel, error) { return nil, nil }, + ListModulesFunc: func() ([]*model.Module, error) { return []*model.Module{mod}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describeMicroflow(ctx, ast.QualifiedName{Module: "MyModule", Name: "ACT_DoSomething"})) + + out := buf.String() + assertContainsStr(t, out, "CREATE OR MODIFY MICROFLOW MyModule.ACT_DoSomething") +} + +func TestDescribeMicroflow_Mock_NotFound(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListMicroflowsFunc: func() ([]*microflows.Microflow, error) { return nil, nil }, + ListDomainModelsFunc: func() ([]*domainmodel.DomainModel, error) { return nil, nil }, + ListModulesFunc: func() ([]*model.Module, error) { return []*model.Module{mod}, nil }, + } + + ctx, _ := newMockCtx(t, withBackend(mb), withHierarchy(h)) + err := describeMicroflow(ctx, ast.QualifiedName{Module: "MyModule", Name: "Missing"}) + assertError(t, err) +} diff --git a/mdl/executor/cmd_misc_mock_test.go b/mdl/executor/cmd_misc_mock_test.go new file mode 100644 index 00000000..7ade8376 --- /dev/null +++ b/mdl/executor/cmd_misc_mock_test.go @@ -0,0 +1,35 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/sdk/mpr/version" +) + +func TestShowVersion_Mock(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ProjectVersionFunc: func() *version.ProjectVersion { + return &version.ProjectVersion{ + ProductVersion: "10.18.0", + BuildVersion: "10.18.0.12345", + FormatVersion: 2, + SchemaHash: "abc123def456", + } + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, showVersion(ctx)) + + out := buf.String() + assertContainsStr(t, out, "Mendix Version") + assertContainsStr(t, out, "10.18.0") + assertContainsStr(t, out, "Build Version") + assertContainsStr(t, out, "MPR Format") + assertContainsStr(t, out, "Schema Hash") + assertContainsStr(t, out, "abc123def456") +} diff --git a/mdl/executor/cmd_modules_mock_test.go b/mdl/executor/cmd_modules_mock_test.go new file mode 100644 index 00000000..39119b1d --- /dev/null +++ b/mdl/executor/cmd_modules_mock_test.go @@ -0,0 +1,46 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/domainmodel" + "github.com/mendixlabs/mxcli/sdk/mpr" +) + +func TestShowModules_Mock(t *testing.T) { + mod1 := mkModule("MyModule") + mod2 := mkModule("System") + + // showModules uses ListUnits to count documents per module. + // Provide a unit belonging to mod1 so the count is non-zero. + unitID := nextID("unit") + units := []*mpr.UnitInfo{{ID: unitID, ContainerID: mod1.ID}} + + // Need a hierarchy for getHierarchy — provide modules + units + folders + h := mkHierarchy(mod1, mod2) + withContainer(h, unitID, mod1.ID) + + // Provide one domain model for mod1 with one entity + ent := mkEntity(mod1.ID, "Customer") + dm := mkDomainModel(mod1.ID, ent) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListModulesFunc: func() ([]*model.Module, error) { return []*model.Module{mod1, mod2}, nil }, + ListUnitsFunc: func() ([]*mpr.UnitInfo, error) { return units, nil }, + ListDomainModelsFunc: func() ([]*domainmodel.DomainModel, error) { return []*domainmodel.DomainModel{dm}, nil }, + // All other list functions return nil (zero counts) via MockBackend defaults. + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showModules(ctx)) + + out := buf.String() + assertContainsStr(t, out, "MyModule") + assertContainsStr(t, out, "System") + assertContainsStr(t, out, "(2 modules)") +} diff --git a/mdl/executor/cmd_navigation_mock_test.go b/mdl/executor/cmd_navigation_mock_test.go new file mode 100644 index 00000000..322e4adc --- /dev/null +++ b/mdl/executor/cmd_navigation_mock_test.go @@ -0,0 +1,92 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/sdk/mpr" +) + +func TestShowNavigation_Mock(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetNavigationFunc: func() (*mpr.NavigationDocument, error) { + return &mpr.NavigationDocument{ + Profiles: []*mpr.NavigationProfile{{ + Name: "Responsive", + Kind: "Responsive", + MenuItems: []*mpr.NavMenuItem{ + {Caption: "Home"}, + {Caption: "Admin"}, + {Caption: "Settings"}, + }, + }}, + }, nil + }, + } + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, showNavigation(ctx)) + + out := buf.String() + assertContainsStr(t, out, "Profile") + assertContainsStr(t, out, "Responsive") +} + +func TestShowNavigationMenu_Mock(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetNavigationFunc: func() (*mpr.NavigationDocument, error) { + return &mpr.NavigationDocument{ + Profiles: []*mpr.NavigationProfile{{ + Name: "Responsive", + Kind: "Responsive", + MenuItems: []*mpr.NavMenuItem{ + {Caption: "Dashboard", Page: "MyModule.Dashboard"}, + }, + }}, + }, nil + }, + } + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, showNavigationMenu(ctx, nil)) + assertContainsStr(t, buf.String(), "Dashboard") +} + +func TestShowNavigationHomes_Mock(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetNavigationFunc: func() (*mpr.NavigationDocument, error) { + return &mpr.NavigationDocument{ + Profiles: []*mpr.NavigationProfile{{ + Name: "Responsive", + Kind: "Responsive", + HomePage: &mpr.NavHomePage{Page: "MyModule.Home"}, + }}, + }, nil + }, + } + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, showNavigationHomes(ctx)) + assertContainsStr(t, buf.String(), "Default Home:") +} + +func TestDescribeNavigation_Mock(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetNavigationFunc: func() (*mpr.NavigationDocument, error) { + return &mpr.NavigationDocument{ + Profiles: []*mpr.NavigationProfile{{ + Name: "Responsive", + Kind: "Responsive", + HomePage: &mpr.NavHomePage{Page: "MyModule.Home"}, + }}, + }, nil + }, + } + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, describeNavigation(ctx, ast.QualifiedName{Name: "Responsive"})) + assertContainsStr(t, buf.String(), "CREATE OR REPLACE NAVIGATION") +} diff --git a/mdl/executor/cmd_notconnected_mock_test.go b/mdl/executor/cmd_notconnected_mock_test.go new file mode 100644 index 00000000..ea8b3e1f --- /dev/null +++ b/mdl/executor/cmd_notconnected_mock_test.go @@ -0,0 +1,181 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" +) + +// disconnectedBackend returns a MockBackend that reports not connected. +func disconnectedBackend() *mock.MockBackend { + return &mock.MockBackend{ + IsConnectedFunc: func() bool { return false }, + } +} + +func TestShowModules_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, showModules(ctx)) +} + +func TestShowSettings_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, showSettings(ctx)) +} + +func TestShowVersion_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, showVersion(ctx)) +} + +func TestShowExportMappings_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, showExportMappings(ctx, "")) +} + +func TestShowImportMappings_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, showImportMappings(ctx, "")) +} + +func TestShowBusinessEventServices_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, showBusinessEventServices(ctx, "")) +} + +func TestShowAgentEditorModels_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, showAgentEditorModels(ctx, "")) +} + +func TestShowAgentEditorAgents_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, showAgentEditorAgents(ctx, "")) +} + +func TestShowAgentEditorKnowledgeBases_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, showAgentEditorKnowledgeBases(ctx, "")) +} + +func TestShowAgentEditorMCPServices_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, showAgentEditorConsumedMCPServices(ctx, "")) +} + +func TestDescribeAgentEditorModel_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, describeAgentEditorModel(ctx, ast.QualifiedName{Module: "M", Name: "X"})) +} + +func TestDescribeAgentEditorAgent_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, describeAgentEditorAgent(ctx, ast.QualifiedName{Module: "M", Name: "X"})) +} + +func TestDescribeAgentEditorKnowledgeBase_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, describeAgentEditorKnowledgeBase(ctx, ast.QualifiedName{Module: "M", Name: "X"})) +} + +func TestDescribeAgentEditorMCPService_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, describeAgentEditorConsumedMCPService(ctx, ast.QualifiedName{Module: "M", Name: "X"})) +} + +func TestDescribeMermaid_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, describeMermaid(ctx, "domainmodel", "MyModule")) +} + +func TestDescribeSettings_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, describeSettings(ctx)) +} + +func TestDescribeBusinessEventService_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, describeBusinessEventService(ctx, ast.QualifiedName{Module: "M", Name: "S"})) +} + +func TestDescribeDataTransformer_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, describeDataTransformer(ctx, ast.QualifiedName{Module: "M", Name: "D"})) +} + +func TestDescribePublishedRestService_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, describePublishedRestService(ctx, ast.QualifiedName{Module: "M", Name: "R"})) +} + +func TestExecCreateModule_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, execCreateModule(ctx, &ast.CreateModuleStmt{Name: "M"})) +} + +func TestExecCreateEnumeration_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, execCreateEnumeration(ctx, &ast.CreateEnumerationStmt{ + Name: ast.QualifiedName{Module: "M", Name: "E"}, + })) +} + +func TestExecDropEnumeration_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, execDropEnumeration(ctx, &ast.DropEnumerationStmt{ + Name: ast.QualifiedName{Module: "M", Name: "E"}, + })) +} + +func TestExecDropEntity_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, execDropEntity(ctx, &ast.DropEntityStmt{ + Name: ast.QualifiedName{Module: "M", Name: "E"}, + })) +} + +func TestExecDropMicroflow_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, execDropMicroflow(ctx, &ast.DropMicroflowStmt{ + Name: ast.QualifiedName{Module: "M", Name: "F"}, + })) +} + +func TestExecDropPage_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, execDropPage(ctx, &ast.DropPageStmt{ + Name: ast.QualifiedName{Module: "M", Name: "P"}, + })) +} + +func TestExecDropSnippet_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, execDropSnippet(ctx, &ast.DropSnippetStmt{ + Name: ast.QualifiedName{Module: "M", Name: "S"}, + })) +} + +func TestExecDropAssociation_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, execDropAssociation(ctx, &ast.DropAssociationStmt{ + Name: ast.QualifiedName{Module: "M", Name: "A"}, + })) +} + +func TestExecDropJavaAction_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, execDropJavaAction(ctx, &ast.DropJavaActionStmt{ + Name: ast.QualifiedName{Module: "M", Name: "J"}, + })) +} + +func TestExecDropFolder_Mock_NotConnected(t *testing.T) { + ctx, _ := newMockCtx(t, withBackend(disconnectedBackend())) + assertError(t, execDropFolder(ctx, &ast.DropFolderStmt{ + FolderPath: "Resources/Images", + Module: "M", + })) +} diff --git a/mdl/executor/cmd_odata_mock_test.go b/mdl/executor/cmd_odata_mock_test.go new file mode 100644 index 00000000..a8fc5923 --- /dev/null +++ b/mdl/executor/cmd_odata_mock_test.go @@ -0,0 +1,126 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" +) + +func TestShowODataClients_Mock(t *testing.T) { + mod := mkModule("MyModule") + svc := &model.ConsumedODataService{ + BaseElement: model.BaseElement{ID: nextID("cos")}, + ContainerID: mod.ID, + Name: "PetStoreClient", + MetadataUrl: "https://example.com/$metadata", + Version: "1.0", + ODataVersion: "4.0", + } + h := mkHierarchy(mod) + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConsumedODataServicesFunc: func() ([]*model.ConsumedODataService, error) { + return []*model.ConsumedODataService{svc}, nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showODataClients(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "QualifiedName") + assertContainsStr(t, out, "MyModule.PetStoreClient") +} + +func TestShowODataServices_Mock(t *testing.T) { + mod := mkModule("MyModule") + svc := &model.PublishedODataService{ + BaseElement: model.BaseElement{ID: nextID("pos")}, + ContainerID: mod.ID, + Name: "CatalogService", + Path: "/odata/v1", + Version: "1.0", + ODataVersion: "4.0", + } + h := mkHierarchy(mod) + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListPublishedODataServicesFunc: func() ([]*model.PublishedODataService, error) { + return []*model.PublishedODataService{svc}, nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showODataServices(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "QualifiedName") + assertContainsStr(t, out, "MyModule.CatalogService") +} + +func TestDescribeODataClient_Mock(t *testing.T) { + mod := mkModule("MyModule") + svc := &model.ConsumedODataService{ + BaseElement: model.BaseElement{ID: nextID("cos")}, + ContainerID: mod.ID, + Name: "PetStoreClient", + MetadataUrl: "https://example.com/$metadata", + Version: "2.0", + ODataVersion: "4.0", + } + h := mkHierarchy(mod) + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConsumedODataServicesFunc: func() ([]*model.ConsumedODataService, error) { + return []*model.ConsumedODataService{svc}, nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describeODataClient(ctx, ast.QualifiedName{Module: "MyModule", Name: "PetStoreClient"})) + + out := buf.String() + assertContainsStr(t, out, "CREATE ODATA CLIENT") + assertContainsStr(t, out, "MyModule.PetStoreClient") + assertContainsStr(t, out, "https://example.com/$metadata") + assertContainsStr(t, out, "2.0") +} + +func TestDescribeODataService_Mock(t *testing.T) { + mod := mkModule("MyModule") + svc := &model.PublishedODataService{ + BaseElement: model.BaseElement{ID: nextID("pos")}, + ContainerID: mod.ID, + Name: "CatalogService", + Path: "/odata/v1", + Version: "1.0", + ODataVersion: "4.0", + Namespace: "MyApp", + } + h := mkHierarchy(mod) + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListPublishedODataServicesFunc: func() ([]*model.PublishedODataService, error) { + return []*model.PublishedODataService{svc}, nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describeODataService(ctx, ast.QualifiedName{Module: "MyModule", Name: "CatalogService"})) + + out := buf.String() + assertContainsStr(t, out, "CREATE ODATA SERVICE") + assertContainsStr(t, out, "MyModule.CatalogService") +} diff --git a/mdl/executor/cmd_pages_mock_test.go b/mdl/executor/cmd_pages_mock_test.go new file mode 100644 index 00000000..9f45671d --- /dev/null +++ b/mdl/executor/cmd_pages_mock_test.go @@ -0,0 +1,93 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/sdk/pages" +) + +func TestShowPages_Mock(t *testing.T) { + mod := mkModule("MyModule") + pg := mkPage(mod.ID, "Home") + + h := mkHierarchy(mod) + withContainer(h, pg.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListPagesFunc: func() ([]*pages.Page, error) { return []*pages.Page{pg}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showPages(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "MyModule.Home") + assertContainsStr(t, out, "(1 pages)") +} + +func TestShowPages_Mock_FilterByModule(t *testing.T) { + mod1 := mkModule("Sales") + mod2 := mkModule("HR") + pg1 := mkPage(mod1.ID, "OrderList") + pg2 := mkPage(mod2.ID, "EmployeeList") + + h := mkHierarchy(mod1, mod2) + withContainer(h, pg1.ContainerID, mod1.ID) + withContainer(h, pg2.ContainerID, mod2.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListPagesFunc: func() ([]*pages.Page, error) { return []*pages.Page{pg1, pg2}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showPages(ctx, "HR")) + + out := buf.String() + assertNotContainsStr(t, out, "Sales.OrderList") + assertContainsStr(t, out, "HR.EmployeeList") +} + +func TestShowSnippets_Mock(t *testing.T) { + mod := mkModule("MyModule") + snp := mkSnippet(mod.ID, "Header") + + h := mkHierarchy(mod) + withContainer(h, snp.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListSnippetsFunc: func() ([]*pages.Snippet, error) { return []*pages.Snippet{snp}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showSnippets(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "MyModule.Header") + assertContainsStr(t, out, "(1 snippets)") +} + +func TestShowLayouts_Mock(t *testing.T) { + mod := mkModule("MyModule") + lay := mkLayout(mod.ID, "Atlas_Default") + + h := mkHierarchy(mod) + withContainer(h, lay.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListLayoutsFunc: func() ([]*pages.Layout, error) { return []*pages.Layout{lay}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showLayouts(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "MyModule.Atlas_Default") + assertContainsStr(t, out, "(1 layouts)") +} diff --git a/mdl/executor/cmd_published_rest_mock_test.go b/mdl/executor/cmd_published_rest_mock_test.go new file mode 100644 index 00000000..93423396 --- /dev/null +++ b/mdl/executor/cmd_published_rest_mock_test.go @@ -0,0 +1,70 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" +) + +func TestShowPublishedRestServices_Mock(t *testing.T) { + mod := mkModule("MyModule") + svc := &model.PublishedRestService{ + BaseElement: model.BaseElement{ID: nextID("prs")}, + ContainerID: mod.ID, + Name: "OrderAPI", + Path: "/rest/orders/v1", + Version: "1.0", + Resources: []*model.PublishedRestResource{ + {Name: "Orders"}, + {Name: "Items"}, + }, + } + h := mkHierarchy(mod) + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListPublishedRestServicesFunc: func() ([]*model.PublishedRestService, error) { + return []*model.PublishedRestService{svc}, nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showPublishedRestServices(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "QualifiedName") + assertContainsStr(t, out, "MyModule.OrderAPI") + assertContainsStr(t, out, "(1 published REST services)") +} + +func TestDescribePublishedRestService_Mock(t *testing.T) { + mod := mkModule("MyModule") + svc := &model.PublishedRestService{ + BaseElement: model.BaseElement{ID: nextID("prs")}, + ContainerID: mod.ID, + Name: "OrderAPI", + Path: "/rest/orders/v1", + Version: "1.0", + } + h := mkHierarchy(mod) + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListPublishedRestServicesFunc: func() ([]*model.PublishedRestService, error) { + return []*model.PublishedRestService{svc}, nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describePublishedRestService(ctx, ast.QualifiedName{Module: "MyModule", Name: "OrderAPI"})) + + out := buf.String() + assertContainsStr(t, out, "CREATE PUBLISHED REST SERVICE") + assertContainsStr(t, out, "MyModule.OrderAPI") +} diff --git a/mdl/executor/cmd_rest_clients_mock_test.go b/mdl/executor/cmd_rest_clients_mock_test.go new file mode 100644 index 00000000..6670f581 --- /dev/null +++ b/mdl/executor/cmd_rest_clients_mock_test.go @@ -0,0 +1,63 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" +) + +func TestShowRestClients_Mock(t *testing.T) { + mod := mkModule("MyModule") + svc := &model.ConsumedRestService{ + BaseElement: model.BaseElement{ID: nextID("crs")}, + ContainerID: mod.ID, + Name: "WeatherAPI", + BaseUrl: "https://api.weather.com", + } + h := mkHierarchy(mod) + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConsumedRestServicesFunc: func() ([]*model.ConsumedRestService, error) { + return []*model.ConsumedRestService{svc}, nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showRestClients(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "QualifiedName") + assertContainsStr(t, out, "MyModule.WeatherAPI") +} + +func TestDescribeRestClient_Mock(t *testing.T) { + mod := mkModule("MyModule") + svc := &model.ConsumedRestService{ + BaseElement: model.BaseElement{ID: nextID("crs")}, + ContainerID: mod.ID, + Name: "WeatherAPI", + BaseUrl: "https://api.weather.com", + } + h := mkHierarchy(mod) + withContainer(h, svc.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListConsumedRestServicesFunc: func() ([]*model.ConsumedRestService, error) { + return []*model.ConsumedRestService{svc}, nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describeRestClient(ctx, ast.QualifiedName{Module: "MyModule", Name: "WeatherAPI"})) + + out := buf.String() + assertContainsStr(t, out, "CREATE REST CLIENT") + assertContainsStr(t, out, "MyModule.WeatherAPI") +} diff --git a/mdl/executor/cmd_security_mock_test.go b/mdl/executor/cmd_security_mock_test.go new file mode 100644 index 00000000..75f23fd9 --- /dev/null +++ b/mdl/executor/cmd_security_mock_test.go @@ -0,0 +1,174 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/sdk/security" +) + +func TestShowProjectSecurity_Mock(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetProjectSecurityFunc: func() (*security.ProjectSecurity, error) { + return &security.ProjectSecurity{ + SecurityLevel: "CheckEverything", + EnableDemoUsers: true, + AdminUserName: "MxAdmin", + UserRoles: []*security.UserRole{{Name: "Admin"}, {Name: "User"}}, + DemoUsers: []*security.DemoUser{{UserName: "demo_admin"}}, + PasswordPolicy: &security.PasswordPolicy{MinimumLength: 8}, + }, nil + }, + } + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, showProjectSecurity(ctx)) + + out := buf.String() + assertContainsStr(t, out, "Security Level:") + assertContainsStr(t, out, "MxAdmin") + assertContainsStr(t, out, "Demo Users Enabled:") +} + +func TestShowModuleRoles_Mock(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListModuleSecurityFunc: func() ([]*security.ModuleSecurity, error) { + return []*security.ModuleSecurity{{ + ContainerID: mod.ID, + ModuleRoles: []*security.ModuleRole{ + {Name: "Admin"}, + {Name: "User"}, + }, + }}, nil + }, + } + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showModuleRoles(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "Qualified Name") + assertContainsStr(t, out, "Role") + assertContainsStr(t, out, "Admin") + assertContainsStr(t, out, "User") +} + +func TestShowUserRoles_Mock(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetProjectSecurityFunc: func() (*security.ProjectSecurity, error) { + return &security.ProjectSecurity{ + UserRoles: []*security.UserRole{ + {Name: "Administrator", ModuleRoles: []string{"MyModule.Admin"}}, + {Name: "NormalUser", ModuleRoles: []string{"MyModule.User"}}, + }, + }, nil + }, + } + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, showUserRoles(ctx)) + + out := buf.String() + assertContainsStr(t, out, "Name") + assertContainsStr(t, out, "Module Roles") + assertContainsStr(t, out, "Administrator") + assertContainsStr(t, out, "NormalUser") +} + +func TestShowDemoUsers_Mock(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetProjectSecurityFunc: func() (*security.ProjectSecurity, error) { + return &security.ProjectSecurity{ + EnableDemoUsers: true, + DemoUsers: []*security.DemoUser{ + {UserName: "demo_admin", UserRoles: []string{"Administrator"}}, + }, + }, nil + }, + } + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, showDemoUsers(ctx)) + + out := buf.String() + assertContainsStr(t, out, "User Name") + assertContainsStr(t, out, "demo_admin") +} + +func TestShowDemoUsers_Disabled_Mock(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetProjectSecurityFunc: func() (*security.ProjectSecurity, error) { + return &security.ProjectSecurity{ + EnableDemoUsers: false, + }, nil + }, + } + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, showDemoUsers(ctx)) + assertContainsStr(t, buf.String(), "Demo users are disabled.") +} + +func TestDescribeModuleRole_Mock(t *testing.T) { + mod := mkModule("MyModule") + h := mkHierarchy(mod) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListModuleSecurityFunc: func() ([]*security.ModuleSecurity, error) { + return []*security.ModuleSecurity{{ + ContainerID: mod.ID, + ModuleRoles: []*security.ModuleRole{{Name: "Admin", Description: "Full access"}}, + }}, nil + }, + GetProjectSecurityFunc: func() (*security.ProjectSecurity, error) { + return &security.ProjectSecurity{ + UserRoles: []*security.UserRole{ + {Name: "Administrator", ModuleRoles: []string{"MyModule.Admin"}}, + }, + }, nil + }, + } + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describeModuleRole(ctx, ast.QualifiedName{Module: "MyModule", Name: "Admin"})) + assertContainsStr(t, buf.String(), "CREATE MODULE ROLE") +} + +func TestDescribeUserRole_Mock(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetProjectSecurityFunc: func() (*security.ProjectSecurity, error) { + return &security.ProjectSecurity{ + UserRoles: []*security.UserRole{ + {Name: "Administrator", ModuleRoles: []string{"MyModule.Admin"}}, + }, + }, nil + }, + } + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, describeUserRole(ctx, ast.QualifiedName{Name: "Administrator"})) + assertContainsStr(t, buf.String(), "CREATE USER ROLE") +} + +func TestDescribeDemoUser_Mock(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetProjectSecurityFunc: func() (*security.ProjectSecurity, error) { + return &security.ProjectSecurity{ + EnableDemoUsers: true, + DemoUsers: []*security.DemoUser{ + {UserName: "demo_admin", UserRoles: []string{"Administrator"}}, + }, + }, nil + }, + } + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, describeDemoUser(ctx, "demo_admin")) + assertContainsStr(t, buf.String(), "CREATE DEMO USER") +} diff --git a/mdl/executor/cmd_settings_mock_test.go b/mdl/executor/cmd_settings_mock_test.go new file mode 100644 index 00000000..22d57881 --- /dev/null +++ b/mdl/executor/cmd_settings_mock_test.go @@ -0,0 +1,48 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" +) + +func TestShowSettings_Mock(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetProjectSettingsFunc: func() (*model.ProjectSettings, error) { + return &model.ProjectSettings{ + Model: &model.ModelSettings{ + HashAlgorithm: "BCrypt", + JavaVersion: "17", + }, + }, nil + }, + } + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, showSettings(ctx)) + + out := buf.String() + assertContainsStr(t, out, "Section") + assertContainsStr(t, out, "Key Values") +} + +func TestDescribeSettings_Mock(t *testing.T) { + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + GetProjectSettingsFunc: func() (*model.ProjectSettings, error) { + return &model.ProjectSettings{ + Model: &model.ModelSettings{ + HashAlgorithm: "BCrypt", + JavaVersion: "17", + RoundingMode: "HalfUp", + }, + }, nil + }, + } + ctx, buf := newMockCtx(t, withBackend(mb)) + assertNoError(t, describeSettings(ctx)) + assertContainsStr(t, buf.String(), "ALTER SETTINGS") +} diff --git a/mdl/executor/cmd_workflows_mock_test.go b/mdl/executor/cmd_workflows_mock_test.go new file mode 100644 index 00000000..42437ab6 --- /dev/null +++ b/mdl/executor/cmd_workflows_mock_test.go @@ -0,0 +1,52 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/sdk/workflows" +) + +func TestShowWorkflows_Mock(t *testing.T) { + mod := mkModule("Sales") + wf := mkWorkflow(mod.ID, "ApproveOrder") + + h := mkHierarchy(mod) + withContainer(h, wf.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListWorkflowsFunc: func() ([]*workflows.Workflow, error) { return []*workflows.Workflow{wf}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, showWorkflows(ctx, "")) + + out := buf.String() + assertContainsStr(t, out, "Qualified Name") + assertContainsStr(t, out, "Sales.ApproveOrder") +} + +func TestDescribeWorkflow_Mock(t *testing.T) { + mod := mkModule("Sales") + wf := mkWorkflow(mod.ID, "ApproveOrder") + wf.Parameter = &workflows.WorkflowParameter{EntityRef: "Sales.Order"} + + h := mkHierarchy(mod) + withContainer(h, wf.ContainerID, mod.ID) + + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListWorkflowsFunc: func() ([]*workflows.Workflow, error) { return []*workflows.Workflow{wf}, nil }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + assertNoError(t, describeWorkflow(ctx, ast.QualifiedName{Module: "Sales", Name: "ApproveOrder"})) + + out := buf.String() + assertContainsStr(t, out, "WORKFLOW") + assertContainsStr(t, out, "Sales.ApproveOrder") +} diff --git a/mdl/executor/cmd_write_handlers_mock_test.go b/mdl/executor/cmd_write_handlers_mock_test.go new file mode 100644 index 00000000..6d1130a9 --- /dev/null +++ b/mdl/executor/cmd_write_handlers_mock_test.go @@ -0,0 +1,328 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/domainmodel" + "github.com/mendixlabs/mxcli/sdk/microflows" + "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/sdk/pages" +) + +func TestExecCreateModule_Mock(t *testing.T) { + called := false + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListModulesFunc: func() ([]*model.Module, error) { + return nil, nil // no existing modules + }, + CreateModuleFunc: func(m *model.Module) error { + called = true + return nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb)) + err := execCreateModule(ctx, &ast.CreateModuleStmt{Name: "NewModule"}) + assertNoError(t, err) + assertContainsStr(t, buf.String(), "Created module: NewModule") + if !called { + t.Fatal("CreateModuleFunc was not called") + } +} + +func TestExecDropEnumeration_Mock(t *testing.T) { + mod := mkModule("MyModule") + enum := mkEnumeration(mod.ID, "Status", "Active", "Inactive") + + called := false + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListEnumerationsFunc: func() ([]*model.Enumeration, error) { + return []*model.Enumeration{enum}, nil + }, + ListModulesFunc: func() ([]*model.Module, error) { + return []*model.Module{mod}, nil + }, + DeleteEnumerationFunc: func(id model.ID) error { + called = true + return nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb)) + err := execDropEnumeration(ctx, &ast.DropEnumerationStmt{ + Name: ast.QualifiedName{Module: "MyModule", Name: "Status"}, + }) + assertNoError(t, err) + assertContainsStr(t, buf.String(), "Dropped enumeration:") + if !called { + t.Fatal("DeleteEnumerationFunc was not called") + } +} + +func TestExecCreateEnumeration_Mock(t *testing.T) { + mod := mkModule("MyModule") + + h := mkHierarchy(mod) + + called := false + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListModulesFunc: func() ([]*model.Module, error) { + return []*model.Module{mod}, nil + }, + ListEnumerationsFunc: func() ([]*model.Enumeration, error) { + return nil, nil // no duplicates + }, + CreateEnumerationFunc: func(e *model.Enumeration) error { + called = true + return nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + err := execCreateEnumeration(ctx, &ast.CreateEnumerationStmt{ + Name: ast.QualifiedName{Module: "MyModule", Name: "Color"}, + Values: []ast.EnumValue{{Name: "Red", Caption: "Red"}, {Name: "Blue", Caption: "Blue"}}, + }) + assertNoError(t, err) + assertContainsStr(t, buf.String(), "Created enumeration:") + if !called { + t.Fatal("CreateEnumerationFunc was not called") + } +} + +func TestExecDropEntity_Mock(t *testing.T) { + mod := mkModule("MyModule") + ent := mkEntity(mod.ID, "Customer") + dm := mkDomainModel(mod.ID, ent) + + called := false + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListModulesFunc: func() ([]*model.Module, error) { + return []*model.Module{mod}, nil + }, + GetDomainModelFunc: func(moduleID model.ID) (*domainmodel.DomainModel, error) { + return dm, nil + }, + DeleteEntityFunc: func(domainModelID model.ID, entityID model.ID) error { + called = true + return nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb)) + err := execDropEntity(ctx, &ast.DropEntityStmt{ + Name: ast.QualifiedName{Module: "MyModule", Name: "Customer"}, + }) + assertNoError(t, err) + assertContainsStr(t, buf.String(), "Dropped entity:") + if !called { + t.Fatal("DeleteEntityFunc was not called") + } +} + +func TestExecDropMicroflow_Mock(t *testing.T) { + mod := mkModule("MyModule") + mf := mkMicroflow(mod.ID, "DoSomething") + + h := mkHierarchy(mod) + withContainer(h, mf.ContainerID, mod.ID) + + called := false + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListMicroflowsFunc: func() ([]*microflows.Microflow, error) { + return []*microflows.Microflow{mf}, nil + }, + DeleteMicroflowFunc: func(id model.ID) error { + called = true + return nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + err := execDropMicroflow(ctx, &ast.DropMicroflowStmt{ + Name: ast.QualifiedName{Module: "MyModule", Name: "DoSomething"}, + }) + assertNoError(t, err) + assertContainsStr(t, buf.String(), "Dropped microflow:") + if !called { + t.Fatal("DeleteMicroflowFunc was not called") + } +} + +func TestExecDropPage_Mock(t *testing.T) { + mod := mkModule("MyModule") + pg := mkPage(mod.ID, "HomePage") + + h := mkHierarchy(mod) + withContainer(h, pg.ContainerID, mod.ID) + + called := false + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListPagesFunc: func() ([]*pages.Page, error) { + return []*pages.Page{pg}, nil + }, + DeletePageFunc: func(id model.ID) error { + called = true + return nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + err := execDropPage(ctx, &ast.DropPageStmt{ + Name: ast.QualifiedName{Module: "MyModule", Name: "HomePage"}, + }) + assertNoError(t, err) + assertContainsStr(t, buf.String(), "Dropped page") + if !called { + t.Fatal("DeletePageFunc was not called") + } +} + +func TestExecDropSnippet_Mock(t *testing.T) { + mod := mkModule("MyModule") + snp := mkSnippet(mod.ID, "HeaderSnippet") + + h := mkHierarchy(mod) + withContainer(h, snp.ContainerID, mod.ID) + + called := false + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListSnippetsFunc: func() ([]*pages.Snippet, error) { + return []*pages.Snippet{snp}, nil + }, + DeleteSnippetFunc: func(id model.ID) error { + called = true + return nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + err := execDropSnippet(ctx, &ast.DropSnippetStmt{ + Name: ast.QualifiedName{Module: "MyModule", Name: "HeaderSnippet"}, + }) + assertNoError(t, err) + assertContainsStr(t, buf.String(), "Dropped snippet") + if !called { + t.Fatal("DeleteSnippetFunc was not called") + } +} + +func TestExecDropAssociation_Mock(t *testing.T) { + mod := mkModule("MyModule") + ent1 := mkEntity(mod.ID, "Order") + ent2 := mkEntity(mod.ID, "Customer") + assoc := mkAssociation(mod.ID, "Order_Customer", ent1.ID, ent2.ID) + + dm := mkDomainModel(mod.ID, ent1, ent2) + dm.Associations = []*domainmodel.Association{assoc} + + called := false + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListModulesFunc: func() ([]*model.Module, error) { + return []*model.Module{mod}, nil + }, + GetDomainModelFunc: func(moduleID model.ID) (*domainmodel.DomainModel, error) { + return dm, nil + }, + DeleteAssociationFunc: func(domainModelID model.ID, assocID model.ID) error { + called = true + return nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb)) + err := execDropAssociation(ctx, &ast.DropAssociationStmt{ + Name: ast.QualifiedName{Module: "MyModule", Name: "Order_Customer"}, + }) + assertNoError(t, err) + assertContainsStr(t, buf.String(), "Dropped association:") + if !called { + t.Fatal("DeleteAssociationFunc was not called") + } +} + +func TestExecDropJavaAction_Mock(t *testing.T) { + mod := mkModule("MyModule") + jaID := nextID("ja") + ja := &mpr.JavaAction{ + BaseElement: model.BaseElement{ID: jaID}, + ContainerID: mod.ID, + Name: "MyAction", + } + + h := mkHierarchy(mod) + + called := false + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListJavaActionsFunc: func() ([]*mpr.JavaAction, error) { + return []*mpr.JavaAction{ja}, nil + }, + DeleteJavaActionFunc: func(id model.ID) error { + called = true + return nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + err := execDropJavaAction(ctx, &ast.DropJavaActionStmt{ + Name: ast.QualifiedName{Module: "MyModule", Name: "MyAction"}, + }) + assertNoError(t, err) + assertContainsStr(t, buf.String(), "Dropped java action:") + if !called { + t.Fatal("DeleteJavaActionFunc was not called") + } +} + +func TestExecDropFolder_Mock(t *testing.T) { + mod := mkModule("MyModule") + folderID := nextID("folder") + folder := &mpr.FolderInfo{ + ID: folderID, + ContainerID: mod.ID, + Name: "Resources", + } + + h := mkHierarchy(mod) + withContainer(h, folderID, mod.ID) + + called := false + mb := &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + ListModulesFunc: func() ([]*model.Module, error) { + return []*model.Module{mod}, nil + }, + ListFoldersFunc: func() ([]*mpr.FolderInfo, error) { + return []*mpr.FolderInfo{folder}, nil + }, + DeleteFolderFunc: func(id model.ID) error { + called = true + return nil + }, + } + + ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) + err := execDropFolder(ctx, &ast.DropFolderStmt{ + FolderPath: "Resources", + Module: "MyModule", + }) + assertNoError(t, err) + assertContainsStr(t, buf.String(), "Dropped folder:") + if !called { + t.Fatal("DeleteFolderFunc was not called") + } +} diff --git a/mdl/executor/mock_test_helpers_test.go b/mdl/executor/mock_test_helpers_test.go new file mode 100644 index 00000000..288290d1 --- /dev/null +++ b/mdl/executor/mock_test_helpers_test.go @@ -0,0 +1,273 @@ +// SPDX-License-Identifier: Apache-2.0 + +package executor + +import ( + "bytes" + "context" + "encoding/json" + "strconv" + "strings" + "sync/atomic" + "testing" + + "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/domainmodel" + "github.com/mendixlabs/mxcli/sdk/microflows" + "github.com/mendixlabs/mxcli/sdk/pages" + "github.com/mendixlabs/mxcli/sdk/workflows" +) + +// --- Context construction --- + +type mockCtxOption func(*ExecContext) + +// newMockCtx creates an ExecContext backed by a MockBackend with a bytes.Buffer +// as output. Returns the context and the buffer for output assertions. +// The default format is FormatTable. Pass options to override. +func newMockCtx(t *testing.T, opts ...mockCtxOption) (*ExecContext, *bytes.Buffer) { + t.Helper() + var buf bytes.Buffer + ctx := &ExecContext{ + Context: context.Background(), + Backend: &mock.MockBackend{ + IsConnectedFunc: func() bool { return true }, + }, + Output: &buf, + Format: FormatTable, + } + for _, opt := range opts { + opt(ctx) + } + return ctx, &buf +} + +func withBackend(b *mock.MockBackend) mockCtxOption { + return func(ctx *ExecContext) { ctx.Backend = b } +} + +func withFormat(f OutputFormat) mockCtxOption { + return func(ctx *ExecContext) { ctx.Format = f } +} + +func withQuiet() mockCtxOption { + return func(ctx *ExecContext) { ctx.Quiet = true } +} + +func withCache(c *executorCache) mockCtxOption { + return func(ctx *ExecContext) { ctx.Cache = c } +} + +func withHierarchy(h *ContainerHierarchy) mockCtxOption { + return func(ctx *ExecContext) { + if ctx.Cache == nil { + ctx.Cache = &executorCache{} + } + ctx.Cache.hierarchy = h + } +} + +func withMprPath(p string) mockCtxOption { + return func(ctx *ExecContext) { ctx.MprPath = p } +} + +func withSettings(s map[string]any) mockCtxOption { + return func(ctx *ExecContext) { ctx.Settings = s } +} + +// --- Hierarchy construction --- + +// mkHierarchy builds a ContainerHierarchy from modules. After creation, use +// withContainer to register container-parent links for documents (entities, +// enumerations, etc.) so that FindModuleID can walk up to the owning module. +func mkHierarchy(modules ...*model.Module) *ContainerHierarchy { + h := &ContainerHierarchy{ + moduleIDs: make(map[model.ID]bool), + moduleNames: make(map[model.ID]string), + containerParent: make(map[model.ID]model.ID), + folderNames: make(map[model.ID]string), + } + for _, m := range modules { + h.moduleIDs[m.ID] = true + h.moduleNames[m.ID] = m.Name + } + return h +} + +// withContainer registers a container-parent link in the hierarchy so that +// ContainerHierarchy.FindModuleID can walk from containerID up to a module. +// In production, FindModuleID is always called with an element's ContainerID +// field (not the element's own ID). For elements whose ContainerID is already +// a module ID, this call is technically redundant (the module is found directly +// in moduleIDs), but it keeps test setup explicit about parentage. For +// intermediate containers (folders, units) this call is required. +func withContainer(h *ContainerHierarchy, containerID, parentID model.ID) { + h.containerParent[containerID] = parentID +} + +// --- Model factories --- + +// idCounter generates unique IDs across all tests in the package. IDs are +// non-deterministic across runs (depend on test execution order), which is +// fine for string-contains assertions but would break exact-value assertions. +var idCounter atomic.Int64 + +func nextID(prefix string) model.ID { + n := idCounter.Add(1) + return model.ID(prefix + "-" + strconv.FormatInt(n, 10)) +} + +func mkModule(name string) *model.Module { + return &model.Module{ + BaseElement: model.BaseElement{ID: nextID("mod")}, + Name: name, + } +} + +func mkEnumeration(containerID model.ID, name string, values ...string) *model.Enumeration { + e := &model.Enumeration{ + BaseElement: model.BaseElement{ID: nextID("enum")}, + ContainerID: containerID, + Name: name, + } + for _, v := range values { + e.Values = append(e.Values, model.EnumerationValue{ + BaseElement: model.BaseElement{ID: nextID("ev")}, + Name: v, + }) + } + return e +} + +func mkConstant(containerID model.ID, name string, typ string, defaultVal string) *model.Constant { + return &model.Constant{ + BaseElement: model.BaseElement{ID: nextID("const")}, + ContainerID: containerID, + Name: name, + Type: model.ConstantDataType{Kind: typ}, + DefaultValue: defaultVal, + } +} + +func mkEntity(containerID model.ID, name string) *domainmodel.Entity { + return &domainmodel.Entity{ + BaseElement: model.BaseElement{ID: nextID("ent")}, + ContainerID: containerID, + Name: name, + Persistable: true, + } +} + +func mkDomainModel(containerID model.ID, entities ...*domainmodel.Entity) *domainmodel.DomainModel { + return &domainmodel.DomainModel{ + BaseElement: model.BaseElement{ID: nextID("dm")}, + ContainerID: containerID, + Entities: entities, + } +} + +func mkAssociation(containerID model.ID, name string, parentID, childID model.ID) *domainmodel.Association { + return &domainmodel.Association{ + BaseElement: model.BaseElement{ID: nextID("assoc")}, + ContainerID: containerID, + Name: name, + ParentID: parentID, + ChildID: childID, + Type: "Reference", + Owner: "Default", + } +} + +func mkMicroflow(containerID model.ID, name string) *microflows.Microflow { + return µflows.Microflow{ + BaseElement: model.BaseElement{ID: nextID("mf")}, + ContainerID: containerID, + Name: name, + } +} + +func mkNanoflow(containerID model.ID, name string) *microflows.Nanoflow { + return µflows.Nanoflow{ + BaseElement: model.BaseElement{ID: nextID("nf")}, + ContainerID: containerID, + Name: name, + } +} + +func mkPage(containerID model.ID, name string) *pages.Page { + return &pages.Page{ + BaseElement: model.BaseElement{ID: nextID("pg")}, + ContainerID: containerID, + Name: name, + } +} + +func mkSnippet(containerID model.ID, name string) *pages.Snippet { + return &pages.Snippet{ + BaseElement: model.BaseElement{ID: nextID("snp")}, + ContainerID: containerID, + Name: name, + } +} + +func mkLayout(containerID model.ID, name string) *pages.Layout { + return &pages.Layout{ + BaseElement: model.BaseElement{ID: nextID("lay")}, + ContainerID: containerID, + Name: name, + } +} + +func mkWorkflow(containerID model.ID, name string) *workflows.Workflow { + return &workflows.Workflow{ + BaseElement: model.BaseElement{ID: nextID("wf")}, + ContainerID: containerID, + Name: name, + } +} + +// --- Assertion helpers --- + +func assertNoError(t *testing.T, err error) { + t.Helper() + if err != nil { + t.Fatalf("unexpected error: %v", err) + } +} + +func assertError(t *testing.T, err error) { + t.Helper() + if err == nil { + t.Fatal("expected error, got nil") + } +} + +func assertContainsStr(t *testing.T, got, want string) { + t.Helper() + if !strings.Contains(got, want) { + t.Errorf("output should contain %q, got:\n%s", want, got) + } +} + +func assertNotContainsStr(t *testing.T, got, unwanted string) { + t.Helper() + if strings.Contains(got, unwanted) { + t.Errorf("output should NOT contain %q, got:\n%s", unwanted, got) + } +} + +// assertValidJSON checks that s is valid JSON starting with '{' or '['. +// Unlike json.Valid alone, this rejects scalar JSON values (true, 123, null) +// which would not be valid handler output. +func assertValidJSON(t *testing.T, s string) { + t.Helper() + trimmed := strings.TrimSpace(s) + if len(trimmed) == 0 || (trimmed[0] != '{' && trimmed[0] != '[') { + t.Errorf("expected JSON array or object, got:\n%s", s) + return + } + if !json.Valid([]byte(trimmed)) { + t.Errorf("expected valid JSON, got:\n%s", s) + } +} diff --git a/mdl/visitor/visitor_agenteditor.go b/mdl/visitor/visitor_agenteditor.go index d095f0a4..156c8740 100644 --- a/mdl/visitor/visitor_agenteditor.go +++ b/mdl/visitor/visitor_agenteditor.go @@ -282,7 +282,6 @@ func (b *Builder) ExitCreateAgentStatement(ctx *parser.CreateAgentStatementConte b.statements = append(b.statements, stmt) } - // parseQualifiedNameString splits "Module.Name" into a QualifiedName. func parseQualifiedNameString(s string) ast.QualifiedName { parts := strings.SplitN(s, ".", 2) From 1c7435b39057b93832eda19d96a073652792494a Mon Sep 17 00:00:00 2001 From: Andrew Vasilyev Date: Sun, 19 Apr 2026 15:33:50 +0200 Subject: [PATCH 02/16] refactor: extract shared types and utility functions to mdl/types Create mdl/types/ package with WASM-safe shared types extracted from sdk/mpr: domain model types, ID utilities, EDMX/AsyncAPI parsing, JSON formatting helpers. Migrate all executor handlers to use mdl/types directly, removing type aliases from sdk/mpr/reader_types.go. - Extract 16+ domain types to mdl/types/ (infrastructure, java, navigation, mapping) - Extract GenerateID, BlobToUUID, ValidateID to mdl/types/id.go - Extract ParseEdmx, ParseAsyncAPI to mdl/types/edmx.go, asyncapi.go - Extract PrettyPrintJSON, BuildJsonElementsFromSnippet to json_utils.go - Migrate 30+ executor handler files off sdk/mpr type references - sdk/mpr retains thin delegation wrappers for backward compatibility --- cmd/mxcli/project_tree.go | 3 +- mdl/backend/connection.go | 9 +- mdl/backend/doc.go | 7 +- mdl/backend/infrastructure.go | 16 +- mdl/backend/java.go | 8 +- mdl/backend/mapping.go | 8 +- mdl/backend/mock/backend.go | 47 +- mdl/backend/mock/mock_connection.go | 9 +- mdl/backend/mock/mock_infrastructure.go | 16 +- mdl/backend/mock/mock_java.go | 8 +- mdl/backend/mock/mock_mapping.go | 8 +- mdl/backend/mock/mock_module.go | 4 +- mdl/backend/mock/mock_navigation.go | 8 +- mdl/backend/mock/mock_security.go | 4 +- mdl/backend/mock/mock_workflow.go | 6 +- mdl/backend/mpr/backend.go | 86 +-- mdl/backend/mpr/convert.go | 457 +++++++++++++++ mdl/backend/navigation.go | 8 +- mdl/backend/security.go | 6 +- mdl/backend/workflow.go | 6 +- mdl/catalog/builder.go | 12 +- mdl/catalog/builder_contract.go | 6 +- mdl/catalog/builder_navigation.go | 6 +- mdl/catalog/builder_references.go | 4 +- mdl/executor/cmd_businessevents.go | 4 +- mdl/executor/cmd_contract.go | 68 +-- mdl/executor/cmd_entities.go | 32 +- mdl/executor/cmd_error_mock_test.go | 16 +- mdl/executor/cmd_export_mappings.go | 5 +- mdl/executor/cmd_folders.go | 4 +- mdl/executor/cmd_imagecollections.go | 8 +- .../cmd_imagecollections_mock_test.go | 10 +- mdl/executor/cmd_import_mappings.go | 7 +- mdl/executor/cmd_javaactions.go | 54 +- mdl/executor/cmd_javaactions_mock_test.go | 6 +- .../cmd_javascript_actions_mock_test.go | 10 +- mdl/executor/cmd_json_mock_test.go | 18 +- mdl/executor/cmd_jsonstructures.go | 16 +- mdl/executor/cmd_jsonstructures_mock_test.go | 6 +- .../cmd_microflows_builder_actions.go | 92 +-- .../cmd_microflows_builder_annotations.go | 12 +- mdl/executor/cmd_microflows_builder_calls.go | 120 ++-- .../cmd_microflows_builder_control.go | 20 +- mdl/executor/cmd_microflows_builder_flows.go | 16 +- mdl/executor/cmd_microflows_builder_graph.go | 8 +- .../cmd_microflows_builder_workflow.go | 38 +- mdl/executor/cmd_microflows_create.go | 6 +- mdl/executor/cmd_misc_mock_test.go | 6 +- mdl/executor/cmd_modules_mock_test.go | 6 +- mdl/executor/cmd_navigation.go | 22 +- mdl/executor/cmd_navigation_mock_test.go | 34 +- mdl/executor/cmd_odata.go | 8 +- mdl/executor/cmd_pages_builder.go | 9 +- mdl/executor/cmd_pages_builder_v3.go | 68 +-- mdl/executor/cmd_pages_builder_v3_layout.go | 36 +- mdl/executor/cmd_pages_builder_v3_widgets.go | 68 +-- mdl/executor/cmd_rename.go | 10 +- mdl/executor/cmd_security_write.go | 16 +- mdl/executor/cmd_workflows_write.go | 16 +- mdl/executor/cmd_write_handlers_mock_test.go | 14 +- mdl/executor/executor.go | 5 +- mdl/executor/helpers.go | 8 +- mdl/executor/widget_engine.go | 5 +- mdl/executor/widget_operations.go | 3 +- mdl/executor/widget_templates.go | 4 +- mdl/linter/rules/page_navigation_security.go | 3 +- mdl/types/asyncapi.go | 205 +++++++ mdl/types/doc.go | 7 + mdl/types/edmx.go | 541 +++++++++++++++++ mdl/types/id.go | 103 ++++ mdl/types/infrastructure.go | 93 +++ mdl/types/java.go | 54 ++ mdl/types/json_utils.go | 374 ++++++++++++ mdl/types/mapping.go | 65 +++ mdl/types/navigation.go | 85 +++ sdk/mpr/asyncapi.go | 203 +------ sdk/mpr/edmx.go | 548 +----------------- sdk/mpr/parser_misc.go | 51 +- sdk/mpr/reader.go | 15 +- sdk/mpr/reader_types.go | 249 +------- sdk/mpr/utils.go | 48 +- sdk/mpr/writer_core.go | 64 +- sdk/mpr/writer_imagecollection.go | 5 +- sdk/mpr/writer_imagecollection_test.go | 5 +- sdk/mpr/writer_jsonstructure.go | 376 +----------- 85 files changed, 2706 insertions(+), 2054 deletions(-) create mode 100644 mdl/backend/mpr/convert.go create mode 100644 mdl/types/asyncapi.go create mode 100644 mdl/types/doc.go create mode 100644 mdl/types/edmx.go create mode 100644 mdl/types/id.go create mode 100644 mdl/types/infrastructure.go create mode 100644 mdl/types/java.go create mode 100644 mdl/types/json_utils.go create mode 100644 mdl/types/mapping.go create mode 100644 mdl/types/navigation.go diff --git a/cmd/mxcli/project_tree.go b/cmd/mxcli/project_tree.go index 4238e8aa..86cb5859 100644 --- a/cmd/mxcli/project_tree.go +++ b/cmd/mxcli/project_tree.go @@ -9,6 +9,7 @@ import ( "sort" "github.com/mendixlabs/mxcli/mdl/executor" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/mpr" "github.com/spf13/cobra" @@ -951,7 +952,7 @@ func buildDatabaseConnectionChildren(dbc *model.DatabaseConnection) []*TreeNode } // buildMenuTreeNodes recursively builds tree nodes from navigation menu items. -func buildMenuTreeNodes(parent *TreeNode, items []*mpr.NavMenuItem) { +func buildMenuTreeNodes(parent *TreeNode, items []*types.NavMenuItem) { for _, item := range items { label := item.Caption if label == "" { diff --git a/mdl/backend/connection.go b/mdl/backend/connection.go index 0bf5aa61..c1d3594a 100644 --- a/mdl/backend/connection.go +++ b/mdl/backend/connection.go @@ -3,9 +3,8 @@ package backend import ( + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" - "github.com/mendixlabs/mxcli/sdk/mpr/version" ) // ConnectionBackend manages the lifecycle of a backend connection. @@ -22,9 +21,9 @@ type ConnectionBackend interface { // Path returns the path of the connected project, or "" if not connected. Path() string // Version returns the MPR format version. - Version() mpr.MPRVersion + Version() types.MPRVersion // ProjectVersion returns the Mendix project version. - ProjectVersion() *version.ProjectVersion + ProjectVersion() *types.ProjectVersion // GetMendixVersion returns the Mendix version string. GetMendixVersion() (string, error) } @@ -42,7 +41,7 @@ type ModuleBackend interface { // FolderBackend provides folder operations. type FolderBackend interface { - ListFolders() ([]*mpr.FolderInfo, error) + ListFolders() ([]*types.FolderInfo, error) CreateFolder(folder *model.Folder) error DeleteFolder(id model.ID) error MoveFolder(id model.ID, newContainerID model.ID) error diff --git a/mdl/backend/doc.go b/mdl/backend/doc.go index 7e316b39..475f526a 100644 --- a/mdl/backend/doc.go +++ b/mdl/backend/doc.go @@ -4,8 +4,7 @@ // executor from concrete storage (e.g. .mpr files). Each interface // groups related read/write operations by domain concept. // -// Several method signatures currently reference types from sdk/mpr -// (e.g. NavigationDocument, FolderInfo, ImageCollection, JsonStructure, -// JavaAction, EntityMemberAccess, RenameHit). These should eventually be -// extracted into a shared types package to remove the mpr dependency. +// Shared value types live in mdl/types to keep this package free of +// sdk/mpr dependencies. Conversion between types.* and sdk/mpr.* +// structs is handled inside mdl/backend/mpr (MprBackend). package backend diff --git a/mdl/backend/infrastructure.go b/mdl/backend/infrastructure.go index a5e153f1..779a2b19 100644 --- a/mdl/backend/infrastructure.go +++ b/mdl/backend/infrastructure.go @@ -3,15 +3,15 @@ package backend import ( + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/agenteditor" - "github.com/mendixlabs/mxcli/sdk/mpr" ) // RenameBackend provides cross-cutting rename and reference-update operations. type RenameBackend interface { UpdateQualifiedNameInAllUnits(oldName, newName string) (int, error) - RenameReferences(oldName, newName string, dryRun bool) ([]mpr.RenameHit, error) + RenameReferences(oldName, newName string, dryRun bool) ([]types.RenameHit, error) RenameDocumentByName(moduleName, oldName, newName string) error } @@ -20,9 +20,9 @@ type RenameBackend interface { type RawUnitBackend interface { GetRawUnit(id model.ID) (map[string]any, error) GetRawUnitBytes(id model.ID) ([]byte, error) - ListRawUnitsByType(typePrefix string) ([]*mpr.RawUnit, error) - ListRawUnits(objectType string) ([]*mpr.RawUnitInfo, error) - GetRawUnitByName(objectType, qualifiedName string) (*mpr.RawUnitInfo, error) + ListRawUnitsByType(typePrefix string) ([]*types.RawUnit, error) + ListRawUnits(objectType string) ([]*types.RawUnitInfo, error) + GetRawUnitByName(objectType, qualifiedName string) (*types.RawUnitInfo, error) GetRawMicroflowByName(qualifiedName string) ([]byte, error) UpdateRawUnit(unitID string, contents []byte) error } @@ -30,7 +30,7 @@ type RawUnitBackend interface { // MetadataBackend provides project-level metadata and introspection. type MetadataBackend interface { ListAllUnitIDs() ([]string, error) - ListUnits() ([]*mpr.UnitInfo, error) + ListUnits() ([]*types.UnitInfo, error) GetUnitTypes() (map[string]int, error) GetProjectRootID() (string, error) ContentsDir() string @@ -40,8 +40,8 @@ type MetadataBackend interface { // WidgetBackend provides widget introspection operations. type WidgetBackend interface { - FindCustomWidgetType(widgetID string) (*mpr.RawCustomWidgetType, error) - FindAllCustomWidgetTypes(widgetID string) ([]*mpr.RawCustomWidgetType, error) + FindCustomWidgetType(widgetID string) (*types.RawCustomWidgetType, error) + FindAllCustomWidgetTypes(widgetID string) ([]*types.RawCustomWidgetType, error) } // AgentEditorBackend provides agent editor document operations. diff --git a/mdl/backend/java.go b/mdl/backend/java.go index 4908f2c4..cc4935fd 100644 --- a/mdl/backend/java.go +++ b/mdl/backend/java.go @@ -3,18 +3,18 @@ package backend import ( + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/javaactions" - "github.com/mendixlabs/mxcli/sdk/mpr" ) // JavaBackend provides Java and JavaScript action operations. type JavaBackend interface { - ListJavaActions() ([]*mpr.JavaAction, error) + ListJavaActions() ([]*types.JavaAction, error) ListJavaActionsFull() ([]*javaactions.JavaAction, error) - ListJavaScriptActions() ([]*mpr.JavaScriptAction, error) + ListJavaScriptActions() ([]*types.JavaScriptAction, error) ReadJavaActionByName(qualifiedName string) (*javaactions.JavaAction, error) - ReadJavaScriptActionByName(qualifiedName string) (*mpr.JavaScriptAction, error) + ReadJavaScriptActionByName(qualifiedName string) (*types.JavaScriptAction, error) CreateJavaAction(ja *javaactions.JavaAction) error UpdateJavaAction(ja *javaactions.JavaAction) error DeleteJavaAction(id model.ID) error diff --git a/mdl/backend/mapping.go b/mdl/backend/mapping.go index 50008b2d..9bbf1efe 100644 --- a/mdl/backend/mapping.go +++ b/mdl/backend/mapping.go @@ -3,8 +3,8 @@ package backend import ( + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" ) // MappingBackend provides import/export mapping and JSON structure operations. @@ -23,8 +23,8 @@ type MappingBackend interface { DeleteExportMapping(id model.ID) error MoveExportMapping(em *model.ExportMapping) error - ListJsonStructures() ([]*mpr.JsonStructure, error) - GetJsonStructureByQualifiedName(moduleName, name string) (*mpr.JsonStructure, error) - CreateJsonStructure(js *mpr.JsonStructure) error + ListJsonStructures() ([]*types.JsonStructure, error) + GetJsonStructureByQualifiedName(moduleName, name string) (*types.JsonStructure, error) + CreateJsonStructure(js *types.JsonStructure) error DeleteJsonStructure(id string) error } diff --git a/mdl/backend/mock/backend.go b/mdl/backend/mock/backend.go index b6dc8a39..e2f78a0c 100644 --- a/mdl/backend/mock/backend.go +++ b/mdl/backend/mock/backend.go @@ -12,8 +12,7 @@ import ( "github.com/mendixlabs/mxcli/sdk/domainmodel" "github.com/mendixlabs/mxcli/sdk/javaactions" "github.com/mendixlabs/mxcli/sdk/microflows" - "github.com/mendixlabs/mxcli/sdk/mpr" - "github.com/mendixlabs/mxcli/sdk/mpr/version" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/sdk/pages" "github.com/mendixlabs/mxcli/sdk/security" "github.com/mendixlabs/mxcli/sdk/workflows" @@ -31,8 +30,8 @@ type MockBackend struct { CommitFunc func() error IsConnectedFunc func() bool PathFunc func() string - VersionFunc func() mpr.MPRVersion - ProjectVersionFunc func() *version.ProjectVersion + VersionFunc func() types.MPRVersion + ProjectVersionFunc func() *types.ProjectVersion GetMendixVersionFunc func() (string, error) // ModuleBackend @@ -45,7 +44,7 @@ type MockBackend struct { DeleteModuleWithCleanupFunc func(id model.ID, moduleName string) error // FolderBackend - ListFoldersFunc func() ([]*mpr.FolderInfo, error) + ListFoldersFunc func() ([]*types.FolderInfo, error) CreateFolderFunc func(folder *model.Folder) error DeleteFolderFunc func(id model.ID) error MoveFolderFunc func(id model.ID, newContainerID model.ID) error @@ -144,14 +143,14 @@ type MockBackend struct { RemoveFromAllowedRolesFunc func(unitID model.ID, roleName string) (bool, error) AddEntityAccessRuleFunc func(params backend.EntityAccessRuleParams) error RemoveEntityAccessRuleFunc func(unitID model.ID, entityName string, roleNames []string) (int, error) - RevokeEntityMemberAccessFunc func(unitID model.ID, entityName string, roleNames []string, revocation mpr.EntityAccessRevocation) (int, error) + RevokeEntityMemberAccessFunc func(unitID model.ID, entityName string, roleNames []string, revocation types.EntityAccessRevocation) (int, error) RemoveRoleFromAllEntitiesFunc func(unitID model.ID, roleName string) (int, error) ReconcileMemberAccessesFunc func(unitID model.ID, moduleName string) (int, error) // NavigationBackend - ListNavigationDocumentsFunc func() ([]*mpr.NavigationDocument, error) - GetNavigationFunc func() (*mpr.NavigationDocument, error) - UpdateNavigationProfileFunc func(navDocID model.ID, profileName string, spec mpr.NavigationProfileSpec) error + ListNavigationDocumentsFunc func() ([]*types.NavigationDocument, error) + GetNavigationFunc func() (*types.NavigationDocument, error) + UpdateNavigationProfileFunc func(navDocID model.ID, profileName string, spec types.NavigationProfileSpec) error // ServiceBackend ListConsumedODataServicesFunc func() ([]*model.ConsumedODataService, error) @@ -196,17 +195,17 @@ type MockBackend struct { UpdateExportMappingFunc func(em *model.ExportMapping) error DeleteExportMappingFunc func(id model.ID) error MoveExportMappingFunc func(em *model.ExportMapping) error - ListJsonStructuresFunc func() ([]*mpr.JsonStructure, error) - GetJsonStructureByQualifiedNameFunc func(moduleName, name string) (*mpr.JsonStructure, error) - CreateJsonStructureFunc func(js *mpr.JsonStructure) error + ListJsonStructuresFunc func() ([]*types.JsonStructure, error) + GetJsonStructureByQualifiedNameFunc func(moduleName, name string) (*types.JsonStructure, error) + CreateJsonStructureFunc func(js *types.JsonStructure) error DeleteJsonStructureFunc func(id string) error // JavaBackend - ListJavaActionsFunc func() ([]*mpr.JavaAction, error) + ListJavaActionsFunc func() ([]*types.JavaAction, error) ListJavaActionsFullFunc func() ([]*javaactions.JavaAction, error) - ListJavaScriptActionsFunc func() ([]*mpr.JavaScriptAction, error) + ListJavaScriptActionsFunc func() ([]*types.JavaScriptAction, error) ReadJavaActionByNameFunc func(qualifiedName string) (*javaactions.JavaAction, error) - ReadJavaScriptActionByNameFunc func(qualifiedName string) (*mpr.JavaScriptAction, error) + ReadJavaScriptActionByNameFunc func(qualifiedName string) (*types.JavaScriptAction, error) CreateJavaActionFunc func(ja *javaactions.JavaAction) error UpdateJavaActionFunc func(ja *javaactions.JavaAction) error DeleteJavaActionFunc func(id model.ID) error @@ -224,8 +223,8 @@ type MockBackend struct { UpdateProjectSettingsFunc func(ps *model.ProjectSettings) error // ImageBackend - ListImageCollectionsFunc func() ([]*mpr.ImageCollection, error) - CreateImageCollectionFunc func(ic *mpr.ImageCollection) error + ListImageCollectionsFunc func() ([]*types.ImageCollection, error) + CreateImageCollectionFunc func(ic *types.ImageCollection) error DeleteImageCollectionFunc func(id string) error // ScheduledEventBackend @@ -234,21 +233,21 @@ type MockBackend struct { // RenameBackend UpdateQualifiedNameInAllUnitsFunc func(oldName, newName string) (int, error) - RenameReferencesFunc func(oldName, newName string, dryRun bool) ([]mpr.RenameHit, error) + RenameReferencesFunc func(oldName, newName string, dryRun bool) ([]types.RenameHit, error) RenameDocumentByNameFunc func(moduleName, oldName, newName string) error // RawUnitBackend GetRawUnitFunc func(id model.ID) (map[string]any, error) GetRawUnitBytesFunc func(id model.ID) ([]byte, error) - ListRawUnitsByTypeFunc func(typePrefix string) ([]*mpr.RawUnit, error) - ListRawUnitsFunc func(objectType string) ([]*mpr.RawUnitInfo, error) - GetRawUnitByNameFunc func(objectType, qualifiedName string) (*mpr.RawUnitInfo, error) + ListRawUnitsByTypeFunc func(typePrefix string) ([]*types.RawUnit, error) + ListRawUnitsFunc func(objectType string) ([]*types.RawUnitInfo, error) + GetRawUnitByNameFunc func(objectType, qualifiedName string) (*types.RawUnitInfo, error) GetRawMicroflowByNameFunc func(qualifiedName string) ([]byte, error) UpdateRawUnitFunc func(unitID string, contents []byte) error // MetadataBackend ListAllUnitIDsFunc func() ([]string, error) - ListUnitsFunc func() ([]*mpr.UnitInfo, error) + ListUnitsFunc func() ([]*types.UnitInfo, error) GetUnitTypesFunc func() (map[string]int, error) GetProjectRootIDFunc func() (string, error) ContentsDirFunc func() string @@ -256,8 +255,8 @@ type MockBackend struct { InvalidateCacheFunc func() // WidgetBackend - FindCustomWidgetTypeFunc func(widgetID string) (*mpr.RawCustomWidgetType, error) - FindAllCustomWidgetTypesFunc func(widgetID string) ([]*mpr.RawCustomWidgetType, error) + FindCustomWidgetTypeFunc func(widgetID string) (*types.RawCustomWidgetType, error) + FindAllCustomWidgetTypesFunc func(widgetID string) ([]*types.RawCustomWidgetType, error) // AgentEditorBackend ListAgentEditorModelsFunc func() ([]*agenteditor.Model, error) diff --git a/mdl/backend/mock/mock_connection.go b/mdl/backend/mock/mock_connection.go index 54ebdefd..7c7d6a7f 100644 --- a/mdl/backend/mock/mock_connection.go +++ b/mdl/backend/mock/mock_connection.go @@ -3,8 +3,7 @@ package mock import ( - "github.com/mendixlabs/mxcli/sdk/mpr" - "github.com/mendixlabs/mxcli/sdk/mpr/version" + "github.com/mendixlabs/mxcli/mdl/types" ) func (m *MockBackend) Connect(path string) error { @@ -42,15 +41,15 @@ func (m *MockBackend) Path() string { return "" } -func (m *MockBackend) Version() mpr.MPRVersion { +func (m *MockBackend) Version() types.MPRVersion { if m.VersionFunc != nil { return m.VersionFunc() } - var zero mpr.MPRVersion + var zero types.MPRVersion return zero } -func (m *MockBackend) ProjectVersion() *version.ProjectVersion { +func (m *MockBackend) ProjectVersion() *types.ProjectVersion { if m.ProjectVersionFunc != nil { return m.ProjectVersionFunc() } diff --git a/mdl/backend/mock/mock_infrastructure.go b/mdl/backend/mock/mock_infrastructure.go index f0b6f9bc..0e033f08 100644 --- a/mdl/backend/mock/mock_infrastructure.go +++ b/mdl/backend/mock/mock_infrastructure.go @@ -5,7 +5,7 @@ package mock import ( "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/agenteditor" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) // --------------------------------------------------------------------------- @@ -19,7 +19,7 @@ func (m *MockBackend) UpdateQualifiedNameInAllUnits(oldName, newName string) (in return 0, nil } -func (m *MockBackend) RenameReferences(oldName, newName string, dryRun bool) ([]mpr.RenameHit, error) { +func (m *MockBackend) RenameReferences(oldName, newName string, dryRun bool) ([]types.RenameHit, error) { if m.RenameReferencesFunc != nil { return m.RenameReferencesFunc(oldName, newName, dryRun) } @@ -51,21 +51,21 @@ func (m *MockBackend) GetRawUnitBytes(id model.ID) ([]byte, error) { return nil, nil } -func (m *MockBackend) ListRawUnitsByType(typePrefix string) ([]*mpr.RawUnit, error) { +func (m *MockBackend) ListRawUnitsByType(typePrefix string) ([]*types.RawUnit, error) { if m.ListRawUnitsByTypeFunc != nil { return m.ListRawUnitsByTypeFunc(typePrefix) } return nil, nil } -func (m *MockBackend) ListRawUnits(objectType string) ([]*mpr.RawUnitInfo, error) { +func (m *MockBackend) ListRawUnits(objectType string) ([]*types.RawUnitInfo, error) { if m.ListRawUnitsFunc != nil { return m.ListRawUnitsFunc(objectType) } return nil, nil } -func (m *MockBackend) GetRawUnitByName(objectType, qualifiedName string) (*mpr.RawUnitInfo, error) { +func (m *MockBackend) GetRawUnitByName(objectType, qualifiedName string) (*types.RawUnitInfo, error) { if m.GetRawUnitByNameFunc != nil { return m.GetRawUnitByNameFunc(objectType, qualifiedName) } @@ -97,7 +97,7 @@ func (m *MockBackend) ListAllUnitIDs() ([]string, error) { return nil, nil } -func (m *MockBackend) ListUnits() ([]*mpr.UnitInfo, error) { +func (m *MockBackend) ListUnits() ([]*types.UnitInfo, error) { if m.ListUnitsFunc != nil { return m.ListUnitsFunc() } @@ -142,14 +142,14 @@ func (m *MockBackend) InvalidateCache() { // WidgetBackend // --------------------------------------------------------------------------- -func (m *MockBackend) FindCustomWidgetType(widgetID string) (*mpr.RawCustomWidgetType, error) { +func (m *MockBackend) FindCustomWidgetType(widgetID string) (*types.RawCustomWidgetType, error) { if m.FindCustomWidgetTypeFunc != nil { return m.FindCustomWidgetTypeFunc(widgetID) } return nil, nil } -func (m *MockBackend) FindAllCustomWidgetTypes(widgetID string) ([]*mpr.RawCustomWidgetType, error) { +func (m *MockBackend) FindAllCustomWidgetTypes(widgetID string) ([]*types.RawCustomWidgetType, error) { if m.FindAllCustomWidgetTypesFunc != nil { return m.FindAllCustomWidgetTypesFunc(widgetID) } diff --git a/mdl/backend/mock/mock_java.go b/mdl/backend/mock/mock_java.go index 776c6e80..c44ce357 100644 --- a/mdl/backend/mock/mock_java.go +++ b/mdl/backend/mock/mock_java.go @@ -5,10 +5,10 @@ package mock import ( "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/javaactions" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) -func (m *MockBackend) ListJavaActions() ([]*mpr.JavaAction, error) { +func (m *MockBackend) ListJavaActions() ([]*types.JavaAction, error) { if m.ListJavaActionsFunc != nil { return m.ListJavaActionsFunc() } @@ -22,7 +22,7 @@ func (m *MockBackend) ListJavaActionsFull() ([]*javaactions.JavaAction, error) { return nil, nil } -func (m *MockBackend) ListJavaScriptActions() ([]*mpr.JavaScriptAction, error) { +func (m *MockBackend) ListJavaScriptActions() ([]*types.JavaScriptAction, error) { if m.ListJavaScriptActionsFunc != nil { return m.ListJavaScriptActionsFunc() } @@ -36,7 +36,7 @@ func (m *MockBackend) ReadJavaActionByName(qualifiedName string) (*javaactions.J return nil, nil } -func (m *MockBackend) ReadJavaScriptActionByName(qualifiedName string) (*mpr.JavaScriptAction, error) { +func (m *MockBackend) ReadJavaScriptActionByName(qualifiedName string) (*types.JavaScriptAction, error) { if m.ReadJavaScriptActionByNameFunc != nil { return m.ReadJavaScriptActionByNameFunc(qualifiedName) } diff --git a/mdl/backend/mock/mock_mapping.go b/mdl/backend/mock/mock_mapping.go index 6bdd1592..27b7b0bc 100644 --- a/mdl/backend/mock/mock_mapping.go +++ b/mdl/backend/mock/mock_mapping.go @@ -4,7 +4,7 @@ package mock import ( "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) func (m *MockBackend) ListImportMappings() ([]*model.ImportMapping, error) { @@ -91,21 +91,21 @@ func (m *MockBackend) MoveExportMapping(em *model.ExportMapping) error { return nil } -func (m *MockBackend) ListJsonStructures() ([]*mpr.JsonStructure, error) { +func (m *MockBackend) ListJsonStructures() ([]*types.JsonStructure, error) { if m.ListJsonStructuresFunc != nil { return m.ListJsonStructuresFunc() } return nil, nil } -func (m *MockBackend) GetJsonStructureByQualifiedName(moduleName, name string) (*mpr.JsonStructure, error) { +func (m *MockBackend) GetJsonStructureByQualifiedName(moduleName, name string) (*types.JsonStructure, error) { if m.GetJsonStructureByQualifiedNameFunc != nil { return m.GetJsonStructureByQualifiedNameFunc(moduleName, name) } return nil, nil } -func (m *MockBackend) CreateJsonStructure(js *mpr.JsonStructure) error { +func (m *MockBackend) CreateJsonStructure(js *types.JsonStructure) error { if m.CreateJsonStructureFunc != nil { return m.CreateJsonStructureFunc(js) } diff --git a/mdl/backend/mock/mock_module.go b/mdl/backend/mock/mock_module.go index c8bc4087..ea99426a 100644 --- a/mdl/backend/mock/mock_module.go +++ b/mdl/backend/mock/mock_module.go @@ -3,8 +3,8 @@ package mock import ( + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" ) // --------------------------------------------------------------------------- @@ -64,7 +64,7 @@ func (m *MockBackend) DeleteModuleWithCleanup(id model.ID, moduleName string) er // FolderBackend // --------------------------------------------------------------------------- -func (m *MockBackend) ListFolders() ([]*mpr.FolderInfo, error) { +func (m *MockBackend) ListFolders() ([]*types.FolderInfo, error) { if m.ListFoldersFunc != nil { return m.ListFoldersFunc() } diff --git a/mdl/backend/mock/mock_navigation.go b/mdl/backend/mock/mock_navigation.go index f03c36eb..e993bc80 100644 --- a/mdl/backend/mock/mock_navigation.go +++ b/mdl/backend/mock/mock_navigation.go @@ -4,24 +4,24 @@ package mock import ( "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) -func (m *MockBackend) ListNavigationDocuments() ([]*mpr.NavigationDocument, error) { +func (m *MockBackend) ListNavigationDocuments() ([]*types.NavigationDocument, error) { if m.ListNavigationDocumentsFunc != nil { return m.ListNavigationDocumentsFunc() } return nil, nil } -func (m *MockBackend) GetNavigation() (*mpr.NavigationDocument, error) { +func (m *MockBackend) GetNavigation() (*types.NavigationDocument, error) { if m.GetNavigationFunc != nil { return m.GetNavigationFunc() } return nil, nil } -func (m *MockBackend) UpdateNavigationProfile(navDocID model.ID, profileName string, spec mpr.NavigationProfileSpec) error { +func (m *MockBackend) UpdateNavigationProfile(navDocID model.ID, profileName string, spec types.NavigationProfileSpec) error { if m.UpdateNavigationProfileFunc != nil { return m.UpdateNavigationProfileFunc(navDocID, profileName, spec) } diff --git a/mdl/backend/mock/mock_security.go b/mdl/backend/mock/mock_security.go index f8d03bff..214f907e 100644 --- a/mdl/backend/mock/mock_security.go +++ b/mdl/backend/mock/mock_security.go @@ -5,7 +5,7 @@ package mock import ( "github.com/mendixlabs/mxcli/mdl/backend" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/sdk/security" ) @@ -135,7 +135,7 @@ func (m *MockBackend) RemoveEntityAccessRule(unitID model.ID, entityName string, return 0, nil } -func (m *MockBackend) RevokeEntityMemberAccess(unitID model.ID, entityName string, roleNames []string, revocation mpr.EntityAccessRevocation) (int, error) { +func (m *MockBackend) RevokeEntityMemberAccess(unitID model.ID, entityName string, roleNames []string, revocation types.EntityAccessRevocation) (int, error) { if m.RevokeEntityMemberAccessFunc != nil { return m.RevokeEntityMemberAccessFunc(unitID, entityName, roleNames, revocation) } diff --git a/mdl/backend/mock/mock_workflow.go b/mdl/backend/mock/mock_workflow.go index 5f2a1298..73ea9fe0 100644 --- a/mdl/backend/mock/mock_workflow.go +++ b/mdl/backend/mock/mock_workflow.go @@ -4,7 +4,7 @@ package mock import ( "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/sdk/workflows" ) @@ -62,14 +62,14 @@ func (m *MockBackend) UpdateProjectSettings(ps *model.ProjectSettings) error { // ImageBackend // --------------------------------------------------------------------------- -func (m *MockBackend) ListImageCollections() ([]*mpr.ImageCollection, error) { +func (m *MockBackend) ListImageCollections() ([]*types.ImageCollection, error) { if m.ListImageCollectionsFunc != nil { return m.ListImageCollectionsFunc() } return nil, nil } -func (m *MockBackend) CreateImageCollection(ic *mpr.ImageCollection) error { +func (m *MockBackend) CreateImageCollection(ic *types.ImageCollection) error { if m.CreateImageCollectionFunc != nil { return m.CreateImageCollectionFunc(ic) } diff --git a/mdl/backend/mpr/backend.go b/mdl/backend/mpr/backend.go index ca93c87f..2dc66ecf 100644 --- a/mdl/backend/mpr/backend.go +++ b/mdl/backend/mpr/backend.go @@ -7,13 +7,13 @@ package mprbackend import ( "github.com/mendixlabs/mxcli/mdl/backend" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/agenteditor" "github.com/mendixlabs/mxcli/sdk/domainmodel" "github.com/mendixlabs/mxcli/sdk/javaactions" "github.com/mendixlabs/mxcli/sdk/microflows" "github.com/mendixlabs/mxcli/sdk/mpr" - "github.com/mendixlabs/mxcli/sdk/mpr/version" "github.com/mendixlabs/mxcli/sdk/pages" "github.com/mendixlabs/mxcli/sdk/security" "github.com/mendixlabs/mxcli/sdk/workflows" @@ -75,9 +75,9 @@ func (b *MprBackend) Disconnect() error { func (b *MprBackend) IsConnected() bool { return b.writer != nil } func (b *MprBackend) Path() string { return b.path } -func (b *MprBackend) Version() mpr.MPRVersion { return b.reader.Version() } -func (b *MprBackend) ProjectVersion() *version.ProjectVersion { return b.reader.ProjectVersion() } -func (b *MprBackend) GetMendixVersion() (string, error) { return b.reader.GetMendixVersion() } +func (b *MprBackend) Version() types.MPRVersion { return convertMPRVersion(b.reader.Version()) } +func (b *MprBackend) ProjectVersion() *types.ProjectVersion { return convertProjectVersion(b.reader.ProjectVersion()) } +func (b *MprBackend) GetMendixVersion() (string, error) { return b.reader.GetMendixVersion() } // Commit is a no-op — the MPR writer auto-commits on each write operation. func (b *MprBackend) Commit() error { return nil } @@ -102,7 +102,7 @@ func (b *MprBackend) DeleteModuleWithCleanup(id model.ID, moduleName string) err // FolderBackend // --------------------------------------------------------------------------- -func (b *MprBackend) ListFolders() ([]*mpr.FolderInfo, error) { return b.reader.ListFolders() } +func (b *MprBackend) ListFolders() ([]*types.FolderInfo, error) { return convertFolderInfoSlice(b.reader.ListFolders()) } func (b *MprBackend) CreateFolder(folder *model.Folder) error { return b.writer.CreateFolder(folder) } func (b *MprBackend) DeleteFolder(id model.ID) error { return b.writer.DeleteFolder(id) } func (b *MprBackend) MoveFolder(id model.ID, newContainerID model.ID) error { @@ -354,13 +354,13 @@ func (b *MprBackend) RemoveFromAllowedRoles(unitID model.ID, roleName string) (b return b.writer.RemoveFromAllowedRoles(unitID, roleName) } func (b *MprBackend) AddEntityAccessRule(params backend.EntityAccessRuleParams) error { - return b.writer.AddEntityAccessRule(params.UnitID, params.EntityName, params.RoleNames, params.AllowCreate, params.AllowDelete, params.DefaultMemberAccess, params.XPathConstraint, params.MemberAccesses) + return b.writer.AddEntityAccessRule(params.UnitID, params.EntityName, params.RoleNames, params.AllowCreate, params.AllowDelete, params.DefaultMemberAccess, params.XPathConstraint, unconvertEntityMemberAccessSlice(params.MemberAccesses)) } func (b *MprBackend) RemoveEntityAccessRule(unitID model.ID, entityName string, roleNames []string) (int, error) { return b.writer.RemoveEntityAccessRule(unitID, entityName, roleNames) } -func (b *MprBackend) RevokeEntityMemberAccess(unitID model.ID, entityName string, roleNames []string, revocation mpr.EntityAccessRevocation) (int, error) { - return b.writer.RevokeEntityMemberAccess(unitID, entityName, roleNames, revocation) +func (b *MprBackend) RevokeEntityMemberAccess(unitID model.ID, entityName string, roleNames []string, revocation types.EntityAccessRevocation) (int, error) { + return b.writer.RevokeEntityMemberAccess(unitID, entityName, roleNames, unconvertEntityAccessRevocation(revocation)) } func (b *MprBackend) RemoveRoleFromAllEntities(unitID model.ID, roleName string) (int, error) { return b.writer.RemoveRoleFromAllEntities(unitID, roleName) @@ -373,14 +373,14 @@ func (b *MprBackend) ReconcileMemberAccesses(unitID model.ID, moduleName string) // NavigationBackend // --------------------------------------------------------------------------- -func (b *MprBackend) ListNavigationDocuments() ([]*mpr.NavigationDocument, error) { - return b.reader.ListNavigationDocuments() +func (b *MprBackend) ListNavigationDocuments() ([]*types.NavigationDocument, error) { + return convertNavDocSlice(b.reader.ListNavigationDocuments()) } -func (b *MprBackend) GetNavigation() (*mpr.NavigationDocument, error) { - return b.reader.GetNavigation() +func (b *MprBackend) GetNavigation() (*types.NavigationDocument, error) { + return convertNavDocPtr(b.reader.GetNavigation()) } -func (b *MprBackend) UpdateNavigationProfile(navDocID model.ID, profileName string, spec mpr.NavigationProfileSpec) error { - return b.writer.UpdateNavigationProfile(navDocID, profileName, spec) +func (b *MprBackend) UpdateNavigationProfile(navDocID model.ID, profileName string, spec types.NavigationProfileSpec) error { + return b.writer.UpdateNavigationProfile(navDocID, profileName, unconvertNavProfileSpec(spec)) } // --------------------------------------------------------------------------- @@ -518,14 +518,14 @@ func (b *MprBackend) MoveExportMapping(em *model.ExportMapping) error { return b.writer.MoveExportMapping(em) } -func (b *MprBackend) ListJsonStructures() ([]*mpr.JsonStructure, error) { - return b.reader.ListJsonStructures() +func (b *MprBackend) ListJsonStructures() ([]*types.JsonStructure, error) { + return convertJsonStructureSlice(b.reader.ListJsonStructures()) } -func (b *MprBackend) GetJsonStructureByQualifiedName(moduleName, name string) (*mpr.JsonStructure, error) { - return b.reader.GetJsonStructureByQualifiedName(moduleName, name) +func (b *MprBackend) GetJsonStructureByQualifiedName(moduleName, name string) (*types.JsonStructure, error) { + return convertJsonStructurePtr(b.reader.GetJsonStructureByQualifiedName(moduleName, name)) } -func (b *MprBackend) CreateJsonStructure(js *mpr.JsonStructure) error { - return b.writer.CreateJsonStructure(js) +func (b *MprBackend) CreateJsonStructure(js *types.JsonStructure) error { + return b.writer.CreateJsonStructure(unconvertJsonStructure(js)) } func (b *MprBackend) DeleteJsonStructure(id string) error { return b.writer.DeleteJsonStructure(id) @@ -535,20 +535,20 @@ func (b *MprBackend) DeleteJsonStructure(id string) error { // JavaBackend // --------------------------------------------------------------------------- -func (b *MprBackend) ListJavaActions() ([]*mpr.JavaAction, error) { - return b.reader.ListJavaActions() +func (b *MprBackend) ListJavaActions() ([]*types.JavaAction, error) { + return convertJavaActionSlice(b.reader.ListJavaActions()) } func (b *MprBackend) ListJavaActionsFull() ([]*javaactions.JavaAction, error) { return b.reader.ListJavaActionsFull() } -func (b *MprBackend) ListJavaScriptActions() ([]*mpr.JavaScriptAction, error) { - return b.reader.ListJavaScriptActions() +func (b *MprBackend) ListJavaScriptActions() ([]*types.JavaScriptAction, error) { + return convertJavaScriptActionSlice(b.reader.ListJavaScriptActions()) } func (b *MprBackend) ReadJavaActionByName(qualifiedName string) (*javaactions.JavaAction, error) { return b.reader.ReadJavaActionByName(qualifiedName) } -func (b *MprBackend) ReadJavaScriptActionByName(qualifiedName string) (*mpr.JavaScriptAction, error) { - return b.reader.ReadJavaScriptActionByName(qualifiedName) +func (b *MprBackend) ReadJavaScriptActionByName(qualifiedName string) (*types.JavaScriptAction, error) { + return convertJavaScriptActionPtr(b.reader.ReadJavaScriptActionByName(qualifiedName)) } func (b *MprBackend) CreateJavaAction(ja *javaactions.JavaAction) error { return b.writer.CreateJavaAction(ja) @@ -600,11 +600,11 @@ func (b *MprBackend) UpdateProjectSettings(ps *model.ProjectSettings) error { // ImageBackend // --------------------------------------------------------------------------- -func (b *MprBackend) ListImageCollections() ([]*mpr.ImageCollection, error) { - return b.reader.ListImageCollections() +func (b *MprBackend) ListImageCollections() ([]*types.ImageCollection, error) { + return convertImageCollectionSlice(b.reader.ListImageCollections()) } -func (b *MprBackend) CreateImageCollection(ic *mpr.ImageCollection) error { - return b.writer.CreateImageCollection(ic) +func (b *MprBackend) CreateImageCollection(ic *types.ImageCollection) error { + return b.writer.CreateImageCollection(unconvertImageCollection(ic)) } func (b *MprBackend) DeleteImageCollection(id string) error { return b.writer.DeleteImageCollection(id) @@ -628,8 +628,8 @@ func (b *MprBackend) GetScheduledEvent(id model.ID) (*model.ScheduledEvent, erro func (b *MprBackend) UpdateQualifiedNameInAllUnits(oldName, newName string) (int, error) { return b.writer.UpdateQualifiedNameInAllUnits(oldName, newName) } -func (b *MprBackend) RenameReferences(oldName, newName string, dryRun bool) ([]mpr.RenameHit, error) { - return b.writer.RenameReferences(oldName, newName, dryRun) +func (b *MprBackend) RenameReferences(oldName, newName string, dryRun bool) ([]types.RenameHit, error) { + return convertRenameHitSlice(b.writer.RenameReferences(oldName, newName, dryRun)) } func (b *MprBackend) RenameDocumentByName(moduleName, oldName, newName string) error { return b.writer.RenameDocumentByName(moduleName, oldName, newName) @@ -645,14 +645,14 @@ func (b *MprBackend) GetRawUnit(id model.ID) (map[string]any, error) { func (b *MprBackend) GetRawUnitBytes(id model.ID) ([]byte, error) { return b.reader.GetRawUnitBytes(id) } -func (b *MprBackend) ListRawUnitsByType(typePrefix string) ([]*mpr.RawUnit, error) { - return b.reader.ListRawUnitsByType(typePrefix) +func (b *MprBackend) ListRawUnitsByType(typePrefix string) ([]*types.RawUnit, error) { + return convertRawUnitSlice(b.reader.ListRawUnitsByType(typePrefix)) } -func (b *MprBackend) ListRawUnits(objectType string) ([]*mpr.RawUnitInfo, error) { - return b.reader.ListRawUnits(objectType) +func (b *MprBackend) ListRawUnits(objectType string) ([]*types.RawUnitInfo, error) { + return convertRawUnitInfoSlice(b.reader.ListRawUnits(objectType)) } -func (b *MprBackend) GetRawUnitByName(objectType, qualifiedName string) (*mpr.RawUnitInfo, error) { - return b.reader.GetRawUnitByName(objectType, qualifiedName) +func (b *MprBackend) GetRawUnitByName(objectType, qualifiedName string) (*types.RawUnitInfo, error) { + return convertRawUnitInfoPtr(b.reader.GetRawUnitByName(objectType, qualifiedName)) } func (b *MprBackend) GetRawMicroflowByName(qualifiedName string) ([]byte, error) { return b.reader.GetRawMicroflowByName(qualifiedName) @@ -666,7 +666,7 @@ func (b *MprBackend) UpdateRawUnit(unitID string, contents []byte) error { // --------------------------------------------------------------------------- func (b *MprBackend) ListAllUnitIDs() ([]string, error) { return b.reader.ListAllUnitIDs() } -func (b *MprBackend) ListUnits() ([]*mpr.UnitInfo, error) { return b.reader.ListUnits() } +func (b *MprBackend) ListUnits() ([]*types.UnitInfo, error) { return convertUnitInfoSlice(b.reader.ListUnits()) } func (b *MprBackend) GetUnitTypes() (map[string]int, error) { return b.reader.GetUnitTypes() } func (b *MprBackend) GetProjectRootID() (string, error) { return b.reader.GetProjectRootID() } func (b *MprBackend) ContentsDir() string { return b.reader.ContentsDir() } @@ -677,11 +677,11 @@ func (b *MprBackend) InvalidateCache() { b.reader.Invalidat // WidgetBackend // --------------------------------------------------------------------------- -func (b *MprBackend) FindCustomWidgetType(widgetID string) (*mpr.RawCustomWidgetType, error) { - return b.reader.FindCustomWidgetType(widgetID) +func (b *MprBackend) FindCustomWidgetType(widgetID string) (*types.RawCustomWidgetType, error) { + return convertRawCustomWidgetTypePtr(b.reader.FindCustomWidgetType(widgetID)) } -func (b *MprBackend) FindAllCustomWidgetTypes(widgetID string) ([]*mpr.RawCustomWidgetType, error) { - return b.reader.FindAllCustomWidgetTypes(widgetID) +func (b *MprBackend) FindAllCustomWidgetTypes(widgetID string) ([]*types.RawCustomWidgetType, error) { + return convertRawCustomWidgetTypeSlice(b.reader.FindAllCustomWidgetTypes(widgetID)) } // --------------------------------------------------------------------------- diff --git a/mdl/backend/mpr/convert.go b/mdl/backend/mpr/convert.go new file mode 100644 index 00000000..1cfd39ca --- /dev/null +++ b/mdl/backend/mpr/convert.go @@ -0,0 +1,457 @@ +// SPDX-License-Identifier: Apache-2.0 + +package mprbackend + +import ( + "github.com/mendixlabs/mxcli/mdl/types" + "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/sdk/mpr/version" +) + +// --------------------------------------------------------------------------- +// Conversion helpers: sdk/mpr -> mdl/types +// --------------------------------------------------------------------------- + +func convertMPRVersion(v mpr.MPRVersion) types.MPRVersion { return types.MPRVersion(v) } + +func convertProjectVersion(v *version.ProjectVersion) *types.ProjectVersion { + if v == nil { + return nil + } + return &types.ProjectVersion{ + ProductVersion: v.ProductVersion, + BuildVersion: v.BuildVersion, + FormatVersion: v.FormatVersion, + SchemaHash: v.SchemaHash, + MajorVersion: v.MajorVersion, + MinorVersion: v.MinorVersion, + PatchVersion: v.PatchVersion, + } +} + +func convertFolderInfoSlice(in []*types.FolderInfo, err error) ([]*types.FolderInfo, error) { + if err != nil || in == nil { + return nil, err + } + out := make([]*types.FolderInfo, len(in)) + for i, f := range in { + out[i] = &types.FolderInfo{ID: f.ID, ContainerID: f.ContainerID, Name: f.Name} + } + return out, nil +} + +func convertUnitInfoSlice(in []*types.UnitInfo, err error) ([]*types.UnitInfo, error) { + if err != nil || in == nil { + return nil, err + } + out := make([]*types.UnitInfo, len(in)) + for i, u := range in { + out[i] = &types.UnitInfo{ + ID: u.ID, ContainerID: u.ContainerID, + ContainmentName: u.ContainmentName, Type: u.Type, + } + } + return out, nil +} + +func convertRenameHitSlice(in []mpr.RenameHit, err error) ([]types.RenameHit, error) { + if err != nil || in == nil { + return nil, err + } + out := make([]types.RenameHit, len(in)) + for i, h := range in { + out[i] = types.RenameHit{UnitID: h.UnitID, UnitType: h.UnitType, Name: h.Name, Count: h.Count} + } + return out, nil +} + +func convertRawUnitSlice(in []*types.RawUnit, err error) ([]*types.RawUnit, error) { + if err != nil || in == nil { + return nil, err + } + out := make([]*types.RawUnit, len(in)) + for i, r := range in { + out[i] = &types.RawUnit{ + ID: r.ID, ContainerID: r.ContainerID, Type: r.Type, Contents: r.Contents, + } + } + return out, nil +} + +func convertRawUnitInfoSlice(in []*mpr.RawUnitInfo, err error) ([]*types.RawUnitInfo, error) { + if err != nil || in == nil { + return nil, err + } + out := make([]*types.RawUnitInfo, len(in)) + for i, r := range in { + out[i] = &types.RawUnitInfo{ + ID: r.ID, QualifiedName: r.QualifiedName, Type: r.Type, + ModuleName: r.ModuleName, Contents: r.Contents, + } + } + return out, nil +} + +func convertRawUnitInfoPtr(in *mpr.RawUnitInfo, err error) (*types.RawUnitInfo, error) { + if err != nil || in == nil { + return nil, err + } + return &types.RawUnitInfo{ + ID: in.ID, QualifiedName: in.QualifiedName, Type: in.Type, + ModuleName: in.ModuleName, Contents: in.Contents, + }, nil +} + +func convertRawCustomWidgetTypePtr(in *mpr.RawCustomWidgetType, err error) (*types.RawCustomWidgetType, error) { + if err != nil || in == nil { + return nil, err + } + return &types.RawCustomWidgetType{ + WidgetID: in.WidgetID, RawType: in.RawType, RawObject: in.RawObject, + UnitID: in.UnitID, UnitName: in.UnitName, WidgetName: in.WidgetName, + }, nil +} + +func convertRawCustomWidgetTypeSlice(in []*mpr.RawCustomWidgetType, err error) ([]*types.RawCustomWidgetType, error) { + if err != nil || in == nil { + return nil, err + } + out := make([]*types.RawCustomWidgetType, len(in)) + for i, w := range in { + out[i] = &types.RawCustomWidgetType{ + WidgetID: w.WidgetID, RawType: w.RawType, RawObject: w.RawObject, + UnitID: w.UnitID, UnitName: w.UnitName, WidgetName: w.WidgetName, + } + } + return out, nil +} + +func convertJavaActionSlice(in []*types.JavaAction, err error) ([]*types.JavaAction, error) { + if err != nil || in == nil { + return nil, err + } + out := make([]*types.JavaAction, len(in)) + for i, ja := range in { + out[i] = &types.JavaAction{ + BaseElement: ja.BaseElement, + ContainerID: ja.ContainerID, + Name: ja.Name, + Documentation: ja.Documentation, + } + } + return out, nil +} + +func convertJavaScriptActionSlice(in []*types.JavaScriptAction, err error) ([]*types.JavaScriptAction, error) { + if err != nil || in == nil { + return nil, err + } + out := make([]*types.JavaScriptAction, len(in)) + for i, jsa := range in { + out[i] = convertJavaScriptAction(jsa) + } + return out, nil +} + +func convertJavaScriptActionPtr(in *types.JavaScriptAction, err error) (*types.JavaScriptAction, error) { + if err != nil || in == nil { + return nil, err + } + return convertJavaScriptAction(in), nil +} + +func convertJavaScriptAction(in *types.JavaScriptAction) *types.JavaScriptAction { + return &types.JavaScriptAction{ + BaseElement: in.BaseElement, + ContainerID: in.ContainerID, + Name: in.Name, + Documentation: in.Documentation, + Platform: in.Platform, + Excluded: in.Excluded, + ExportLevel: in.ExportLevel, + ActionDefaultReturnName: in.ActionDefaultReturnName, + ReturnType: in.ReturnType, + Parameters: in.Parameters, + TypeParameters: in.TypeParameters, + MicroflowActionInfo: in.MicroflowActionInfo, + } +} + +func convertNavDocSlice(in []*types.NavigationDocument, err error) ([]*types.NavigationDocument, error) { + if err != nil || in == nil { + return nil, err + } + out := make([]*types.NavigationDocument, len(in)) + for i, nd := range in { + out[i] = convertNavDoc(nd) + } + return out, nil +} + +func convertNavDocPtr(in *types.NavigationDocument, err error) (*types.NavigationDocument, error) { + if err != nil || in == nil { + return nil, err + } + return convertNavDoc(in), nil +} + +func convertNavDoc(in *types.NavigationDocument) *types.NavigationDocument { + nd := &types.NavigationDocument{ + BaseElement: in.BaseElement, + ContainerID: in.ContainerID, + Name: in.Name, + } + if in.Profiles != nil { + nd.Profiles = make([]*types.NavigationProfile, len(in.Profiles)) + for i, p := range in.Profiles { + nd.Profiles[i] = convertNavProfile(p) + } + } + return nd +} + +func convertNavProfile(in *types.NavigationProfile) *types.NavigationProfile { + p := &types.NavigationProfile{ + Name: in.Name, + Kind: in.Kind, + IsNative: in.IsNative, + LoginPage: in.LoginPage, + NotFoundPage: in.NotFoundPage, + } + if in.HomePage != nil { + p.HomePage = &types.NavHomePage{Page: in.HomePage.Page, Microflow: in.HomePage.Microflow} + } + if in.RoleBasedHomePages != nil { + p.RoleBasedHomePages = make([]*types.NavRoleBasedHome, len(in.RoleBasedHomePages)) + for i, rbh := range in.RoleBasedHomePages { + p.RoleBasedHomePages[i] = &types.NavRoleBasedHome{ + UserRole: rbh.UserRole, Page: rbh.Page, Microflow: rbh.Microflow, + } + } + } + if in.MenuItems != nil { + p.MenuItems = make([]*types.NavMenuItem, len(in.MenuItems)) + for i, mi := range in.MenuItems { + p.MenuItems[i] = convertNavMenuItem(mi) + } + } + if in.OfflineEntities != nil { + p.OfflineEntities = make([]*types.NavOfflineEntity, len(in.OfflineEntities)) + for i, oe := range in.OfflineEntities { + p.OfflineEntities[i] = &types.NavOfflineEntity{ + Entity: oe.Entity, SyncMode: oe.SyncMode, Constraint: oe.Constraint, + } + } + } + return p +} + +func convertNavMenuItem(in *types.NavMenuItem) *types.NavMenuItem { + mi := &types.NavMenuItem{ + Caption: in.Caption, Page: in.Page, Microflow: in.Microflow, ActionType: in.ActionType, + } + if in.Items != nil { + mi.Items = make([]*types.NavMenuItem, len(in.Items)) + for i, sub := range in.Items { + mi.Items[i] = convertNavMenuItem(sub) + } + } + return mi +} + +// --------------------------------------------------------------------------- +// Conversion helpers: mdl/types -> sdk/mpr (for write methods) +// --------------------------------------------------------------------------- + +func unconvertNavProfileSpec(s types.NavigationProfileSpec) mpr.NavigationProfileSpec { + out := mpr.NavigationProfileSpec{ + LoginPage: s.LoginPage, + NotFoundPage: s.NotFoundPage, + HasMenu: s.HasMenu, + } + if s.HomePages != nil { + out.HomePages = make([]mpr.NavHomePageSpec, len(s.HomePages)) + for i, hp := range s.HomePages { + out.HomePages[i] = mpr.NavHomePageSpec{IsPage: hp.IsPage, Target: hp.Target, ForRole: hp.ForRole} + } + } + if s.MenuItems != nil { + out.MenuItems = make([]mpr.NavMenuItemSpec, len(s.MenuItems)) + for i, mi := range s.MenuItems { + out.MenuItems[i] = unconvertNavMenuItemSpec(mi) + } + } + return out +} + +func unconvertNavMenuItemSpec(in types.NavMenuItemSpec) mpr.NavMenuItemSpec { + out := mpr.NavMenuItemSpec{Caption: in.Caption, Page: in.Page, Microflow: in.Microflow} + if in.Items != nil { + out.Items = make([]mpr.NavMenuItemSpec, len(in.Items)) + for i, sub := range in.Items { + out.Items[i] = unconvertNavMenuItemSpec(sub) + } + } + return out +} + +func unconvertEntityMemberAccessSlice(in []types.EntityMemberAccess) []mpr.EntityMemberAccess { + if in == nil { + return nil + } + out := make([]mpr.EntityMemberAccess, len(in)) + for i, ma := range in { + out[i] = mpr.EntityMemberAccess{ + AttributeRef: ma.AttributeRef, AssociationRef: ma.AssociationRef, AccessRights: ma.AccessRights, + } + } + return out +} + +func unconvertEntityAccessRevocation(in types.EntityAccessRevocation) mpr.EntityAccessRevocation { + return mpr.EntityAccessRevocation{ + RevokeCreate: in.RevokeCreate, + RevokeDelete: in.RevokeDelete, + RevokeReadMembers: in.RevokeReadMembers, + RevokeWriteMembers: in.RevokeWriteMembers, + RevokeReadAll: in.RevokeReadAll, + RevokeWriteAll: in.RevokeWriteAll, + } +} + +func convertJsonStructureSlice(in []*types.JsonStructure, err error) ([]*types.JsonStructure, error) { + if err != nil || in == nil { + return nil, err + } + out := make([]*types.JsonStructure, len(in)) + for i, js := range in { + out[i] = convertJsonStructure(js) + } + return out, nil +} + +func convertJsonStructurePtr(in *types.JsonStructure, err error) (*types.JsonStructure, error) { + if err != nil || in == nil { + return nil, err + } + return convertJsonStructure(in), nil +} + +func convertJsonStructure(in *types.JsonStructure) *types.JsonStructure { + js := &types.JsonStructure{ + BaseElement: in.BaseElement, + ContainerID: in.ContainerID, + Name: in.Name, + Documentation: in.Documentation, + JsonSnippet: in.JsonSnippet, + Excluded: in.Excluded, + ExportLevel: in.ExportLevel, + } + if in.Elements != nil { + js.Elements = make([]*types.JsonElement, len(in.Elements)) + for i, e := range in.Elements { + js.Elements[i] = convertJsonElement(e) + } + } + return js +} + +func convertJsonElement(in *types.JsonElement) *types.JsonElement { + e := &types.JsonElement{ + ExposedName: in.ExposedName, ExposedItemName: in.ExposedItemName, + Path: in.Path, ElementType: in.ElementType, PrimitiveType: in.PrimitiveType, + MinOccurs: in.MinOccurs, MaxOccurs: in.MaxOccurs, Nillable: in.Nillable, + IsDefaultType: in.IsDefaultType, MaxLength: in.MaxLength, + FractionDigits: in.FractionDigits, TotalDigits: in.TotalDigits, + OriginalValue: in.OriginalValue, + } + if in.Children != nil { + e.Children = make([]*types.JsonElement, len(in.Children)) + for i, c := range in.Children { + e.Children[i] = convertJsonElement(c) + } + } + return e +} + +func unconvertJsonStructure(in *types.JsonStructure) *types.JsonStructure { + js := &types.JsonStructure{ + BaseElement: in.BaseElement, + ContainerID: in.ContainerID, + Name: in.Name, + Documentation: in.Documentation, + JsonSnippet: in.JsonSnippet, + Excluded: in.Excluded, + ExportLevel: in.ExportLevel, + } + if in.Elements != nil { + js.Elements = make([]*types.JsonElement, len(in.Elements)) + for i, e := range in.Elements { + js.Elements[i] = unconvertJsonElement(e) + } + } + return js +} + +func unconvertJsonElement(in *types.JsonElement) *types.JsonElement { + e := &types.JsonElement{ + ExposedName: in.ExposedName, ExposedItemName: in.ExposedItemName, + Path: in.Path, ElementType: in.ElementType, PrimitiveType: in.PrimitiveType, + MinOccurs: in.MinOccurs, MaxOccurs: in.MaxOccurs, Nillable: in.Nillable, + IsDefaultType: in.IsDefaultType, MaxLength: in.MaxLength, + FractionDigits: in.FractionDigits, TotalDigits: in.TotalDigits, + OriginalValue: in.OriginalValue, + } + if in.Children != nil { + e.Children = make([]*types.JsonElement, len(in.Children)) + for i, c := range in.Children { + e.Children[i] = unconvertJsonElement(c) + } + } + return e +} + +func convertImageCollectionSlice(in []*types.ImageCollection, err error) ([]*types.ImageCollection, error) { + if err != nil || in == nil { + return nil, err + } + out := make([]*types.ImageCollection, len(in)) + for i, ic := range in { + out[i] = convertImageCollection(ic) + } + return out, nil +} + +func convertImageCollection(in *types.ImageCollection) *types.ImageCollection { + ic := &types.ImageCollection{ + BaseElement: in.BaseElement, + ContainerID: in.ContainerID, + Name: in.Name, + ExportLevel: in.ExportLevel, + Documentation: in.Documentation, + } + if in.Images != nil { + ic.Images = make([]types.Image, len(in.Images)) + for i, img := range in.Images { + ic.Images[i] = types.Image{ID: img.ID, Name: img.Name, Data: img.Data, Format: img.Format} + } + } + return ic +} + +func unconvertImageCollection(in *types.ImageCollection) *types.ImageCollection { + ic := &types.ImageCollection{ + BaseElement: in.BaseElement, + ContainerID: in.ContainerID, + Name: in.Name, + ExportLevel: in.ExportLevel, + Documentation: in.Documentation, + } + if in.Images != nil { + ic.Images = make([]types.Image, len(in.Images)) + for i, img := range in.Images { + ic.Images[i] = types.Image{ID: img.ID, Name: img.Name, Data: img.Data, Format: img.Format} + } + } + return ic +} diff --git a/mdl/backend/navigation.go b/mdl/backend/navigation.go index 185beaad..3c47cc0d 100644 --- a/mdl/backend/navigation.go +++ b/mdl/backend/navigation.go @@ -3,13 +3,13 @@ package backend import ( + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" ) // NavigationBackend provides navigation document operations. type NavigationBackend interface { - ListNavigationDocuments() ([]*mpr.NavigationDocument, error) - GetNavigation() (*mpr.NavigationDocument, error) - UpdateNavigationProfile(navDocID model.ID, profileName string, spec mpr.NavigationProfileSpec) error + ListNavigationDocuments() ([]*types.NavigationDocument, error) + GetNavigation() (*types.NavigationDocument, error) + UpdateNavigationProfile(navDocID model.ID, profileName string, spec types.NavigationProfileSpec) error } diff --git a/mdl/backend/security.go b/mdl/backend/security.go index 14086cc6..1f383249 100644 --- a/mdl/backend/security.go +++ b/mdl/backend/security.go @@ -3,8 +3,8 @@ package backend import ( + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" "github.com/mendixlabs/mxcli/sdk/security" ) @@ -52,7 +52,7 @@ type EntityAccessRuleParams struct { AllowDelete bool DefaultMemberAccess string XPathConstraint string - MemberAccesses []mpr.EntityMemberAccess + MemberAccesses []types.EntityMemberAccess } // EntityAccessBackend manages entity-level access rules and role assignments. @@ -62,7 +62,7 @@ type EntityAccessBackend interface { RemoveFromAllowedRoles(unitID model.ID, roleName string) (bool, error) AddEntityAccessRule(params EntityAccessRuleParams) error RemoveEntityAccessRule(unitID model.ID, entityName string, roleNames []string) (int, error) - RevokeEntityMemberAccess(unitID model.ID, entityName string, roleNames []string, revocation mpr.EntityAccessRevocation) (int, error) + RevokeEntityMemberAccess(unitID model.ID, entityName string, roleNames []string, revocation types.EntityAccessRevocation) (int, error) RemoveRoleFromAllEntities(unitID model.ID, roleName string) (int, error) ReconcileMemberAccesses(unitID model.ID, moduleName string) (int, error) } diff --git a/mdl/backend/workflow.go b/mdl/backend/workflow.go index b985170e..87ce241d 100644 --- a/mdl/backend/workflow.go +++ b/mdl/backend/workflow.go @@ -3,8 +3,8 @@ package backend import ( + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" "github.com/mendixlabs/mxcli/sdk/workflows" ) @@ -24,8 +24,8 @@ type SettingsBackend interface { // ImageBackend provides image collection operations. type ImageBackend interface { - ListImageCollections() ([]*mpr.ImageCollection, error) - CreateImageCollection(ic *mpr.ImageCollection) error + ListImageCollections() ([]*types.ImageCollection, error) + CreateImageCollection(ic *types.ImageCollection) error DeleteImageCollection(id string) error } diff --git a/mdl/catalog/builder.go b/mdl/catalog/builder.go index 56c62dc9..433fba80 100644 --- a/mdl/catalog/builder.go +++ b/mdl/catalog/builder.go @@ -12,7 +12,7 @@ import ( "github.com/mendixlabs/mxcli/sdk/domainmodel" "github.com/mendixlabs/mxcli/sdk/javaactions" "github.com/mendixlabs/mxcli/sdk/microflows" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/sdk/pages" "github.com/mendixlabs/mxcli/sdk/security" "github.com/mendixlabs/mxcli/sdk/workflows" @@ -23,9 +23,9 @@ import ( type CatalogReader interface { // Infrastructure GetRawUnit(id model.ID) (map[string]any, error) - ListRawUnitsByType(typePrefix string) ([]*mpr.RawUnit, error) - ListUnits() ([]*mpr.UnitInfo, error) - ListFolders() ([]*mpr.FolderInfo, error) + ListRawUnitsByType(typePrefix string) ([]*types.RawUnit, error) + ListUnits() ([]*types.UnitInfo, error) + ListFolders() ([]*types.FolderInfo, error) // Modules ListModules() ([]*model.Module, error) @@ -33,7 +33,7 @@ type CatalogReader interface { // Settings & security GetProjectSettings() (*model.ProjectSettings, error) GetProjectSecurity() (*security.ProjectSecurity, error) - GetNavigation() (*mpr.NavigationDocument, error) + GetNavigation() (*types.NavigationDocument, error) // Domain models & enumerations ListDomainModels() ([]*domainmodel.DomainModel, error) @@ -66,7 +66,7 @@ type CatalogReader interface { // Mappings & JSON structures ListImportMappings() ([]*model.ImportMapping, error) ListExportMappings() ([]*model.ExportMapping, error) - ListJsonStructures() ([]*mpr.JsonStructure, error) + ListJsonStructures() ([]*types.JsonStructure, error) } // DescribeFunc generates MDL source for a given object type and qualified name. diff --git a/mdl/catalog/builder_contract.go b/mdl/catalog/builder_contract.go index 6cce9ea0..f20cfae6 100644 --- a/mdl/catalog/builder_contract.go +++ b/mdl/catalog/builder_contract.go @@ -7,7 +7,7 @@ import ( "fmt" "strings" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) // buildContractEntities parses cached $metadata from consumed OData services @@ -55,7 +55,7 @@ func (b *Builder) buildContractEntities() error { moduleName := b.hierarchy.getModuleName(moduleID) svcQN := moduleName + "." + svc.Name - doc, err := mpr.ParseEdmx(svc.Metadata) + doc, err := types.ParseEdmx(svc.Metadata) if err != nil { continue // skip services with unparseable metadata } @@ -161,7 +161,7 @@ func (b *Builder) buildContractMessages() error { moduleName := b.hierarchy.getModuleName(moduleID) svcQN := moduleName + "." + svc.Name - doc, err := mpr.ParseAsyncAPI(svc.Document) + doc, err := types.ParseAsyncAPI(svc.Document) if err != nil { continue } diff --git a/mdl/catalog/builder_navigation.go b/mdl/catalog/builder_navigation.go index a5bceb93..82bde2ee 100644 --- a/mdl/catalog/builder_navigation.go +++ b/mdl/catalog/builder_navigation.go @@ -6,7 +6,7 @@ import ( "database/sql" "fmt" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) func (b *Builder) buildNavigation() error { @@ -129,7 +129,7 @@ func (b *Builder) buildNavigation() error { } // countMenuItems recursively counts all menu items. -func countMenuItems(items []*mpr.NavMenuItem) int { +func countMenuItems(items []*types.NavMenuItem) int { count := len(items) for _, item := range items { count += countMenuItems(item.Items) @@ -138,7 +138,7 @@ func countMenuItems(items []*mpr.NavMenuItem) int { } // insertMenuItems recursively inserts menu items with hierarchical path encoding. -func insertMenuItems(stmt *sql.Stmt, profileName string, items []*mpr.NavMenuItem, parentPath string, depth int, projectID, snapshotID string) int { +func insertMenuItems(stmt *sql.Stmt, profileName string, items []*types.NavMenuItem, parentPath string, depth int, projectID, snapshotID string) int { count := 0 for i, item := range items { itemPath := fmt.Sprintf("%d", i) diff --git a/mdl/catalog/builder_references.go b/mdl/catalog/builder_references.go index 2d397db3..3fce000a 100644 --- a/mdl/catalog/builder_references.go +++ b/mdl/catalog/builder_references.go @@ -6,9 +6,9 @@ import ( "database/sql" "strings" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/microflows" - "github.com/mendixlabs/mxcli/sdk/mpr" "github.com/mendixlabs/mxcli/sdk/pages" "github.com/mendixlabs/mxcli/sdk/workflows" ) @@ -310,7 +310,7 @@ func (b *Builder) buildReferences() error { } // extractMenuItemRefs extracts page and microflow references from menu items recursively. -func (b *Builder) extractMenuItemRefs(stmt *sql.Stmt, items []*mpr.NavMenuItem, sourceName, projectID, snapshotID string) int { +func (b *Builder) extractMenuItemRefs(stmt *sql.Stmt, items []*types.NavMenuItem, sourceName, projectID, snapshotID string) int { refCount := 0 for _, item := range items { if item.Page != "" { diff --git a/mdl/executor/cmd_businessevents.go b/mdl/executor/cmd_businessevents.go index 294c8908..28fe7eaa 100644 --- a/mdl/executor/cmd_businessevents.go +++ b/mdl/executor/cmd_businessevents.go @@ -9,7 +9,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) // showBusinessEventServices displays a table of all business event service documents. @@ -407,7 +407,7 @@ func dropBusinessEventService(ctx *ExecContext, stmt *ast.DropBusinessEventServi // generateChannelName generates a hex channel name (similar to Mendix Studio Pro). func generateChannelName() string { // Generate a UUID-like hex string - uuid := mpr.GenerateID() + uuid := types.GenerateID() return strings.ReplaceAll(uuid, "-", "") } diff --git a/mdl/executor/cmd_contract.go b/mdl/executor/cmd_contract.go index e8185c49..f94da438 100644 --- a/mdl/executor/cmd_contract.go +++ b/mdl/executor/cmd_contract.go @@ -9,9 +9,9 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/domainmodel" - "github.com/mendixlabs/mxcli/sdk/mpr" ) // showContractEntities handles SHOW CONTRACT ENTITIES FROM Module.Service. @@ -226,7 +226,7 @@ func describeContractAction(ctx *ExecContext, name ast.QualifiedName, format str return err } - var action *mpr.EdmAction + var action *types.EdmAction for _, a := range doc.Actions { if strings.EqualFold(a.Name, actionName) { action = a @@ -263,7 +263,7 @@ func describeContractAction(ctx *ExecContext, name ast.QualifiedName, format str } // outputContractEntityMDL outputs a CREATE EXTERNAL ENTITY statement from contract metadata. -func outputContractEntityMDL(ctx *ExecContext, et *mpr.EdmEntityType, svcQN string, doc *mpr.EdmxDocument) error { +func outputContractEntityMDL(ctx *ExecContext, et *types.EdmEntityType, svcQN string, doc *types.EdmxDocument) error { // Find entity set name entitySetName := et.Name + "s" // fallback for _, es := range doc.EntitySets { @@ -315,7 +315,7 @@ func outputContractEntityMDL(ctx *ExecContext, et *mpr.EdmEntityType, svcQN stri } // parseServiceContract finds a consumed OData service by name and parses its cached $metadata. -func parseServiceContract(ctx *ExecContext, name ast.QualifiedName) (*mpr.EdmxDocument, string, error) { +func parseServiceContract(ctx *ExecContext, name ast.QualifiedName) (*types.EdmxDocument, string, error) { services, err := ctx.Backend.ListConsumedODataServices() if err != nil { return nil, "", mdlerrors.NewBackend("list consumed OData services", err) @@ -340,7 +340,7 @@ func parseServiceContract(ctx *ExecContext, name ast.QualifiedName) (*mpr.EdmxDo return nil, svcQN, mdlerrors.NewValidationf("no cached contract metadata for %s (MetadataUrl: %s). The service metadata has not been downloaded yet", svcQN, svc.MetadataUrl) } - doc, err := mpr.ParseEdmx(svc.Metadata) + doc, err := types.ParseEdmx(svc.Metadata) if err != nil { return nil, svcQN, mdlerrors.NewBackend(fmt.Sprintf("parse contract metadata for %s", svcQN), err) } @@ -371,7 +371,7 @@ func splitContractRef(name ast.QualifiedName) (ast.QualifiedName, string, error) } // formatEdmType returns a human-readable type string for a property. -func formatEdmType(p *mpr.EdmProperty) string { +func formatEdmType(p *types.EdmProperty) string { t := p.Type if p.MaxLength != "" { t += "(" + p.MaxLength + ")" @@ -398,7 +398,7 @@ func shortenEdmType(t string) string { } // edmToMendixType maps an Edm type to a Mendix attribute type string for MDL output. -func edmToMendixType(p *mpr.EdmProperty) string { +func edmToMendixType(p *types.EdmProperty) string { switch p.Type { case "Edm.String": if p.MaxLength != "" && p.MaxLength != "max" { @@ -457,7 +457,7 @@ func createExternalEntities(ctx *ExecContext, s *ast.CreateExternalEntitiesStmt) // Build entity set lookup: entity type qualified name → entity set name esMap := make(map[string]string) - esByType := make(map[string]*mpr.EdmEntitySet) + esByType := make(map[string]*types.EdmEntitySet) for _, es := range doc.EntitySets { esMap[es.EntityType] = es.Name esByType[es.EntityType] = es @@ -491,7 +491,7 @@ func createExternalEntities(ctx *ExecContext, s *ast.CreateExternalEntitiesStmt) } // Build a global type lookup so we can resolve BaseType references across schemas. - typeByQualified := make(map[string]*mpr.EdmEntityType) + typeByQualified := make(map[string]*types.EdmEntityType) for _, schema := range doc.Schemas { for _, et := range schema.EntityTypes { typeByQualified[schema.Namespace+"."+et.Name] = et @@ -533,7 +533,7 @@ func createExternalEntities(ctx *ExecContext, s *ast.CreateExternalEntitiesStmt) // Build key parts from the resolved key (root entity in the chain) var keyParts []*domainmodel.RemoteKeyPart for _, keyName := range keyProps { - var keyProp *mpr.EdmProperty + var keyProp *types.EdmProperty for _, p := range mergedProps { if p.Name == keyName { keyProp = p @@ -621,7 +621,7 @@ func createExternalEntities(ctx *ExecContext, s *ast.CreateExternalEntitiesStmt) Creatable: creatable, Updatable: updatable, } - attr.ID = model.ID(mpr.GenerateID()) + attr.ID = model.ID(types.GenerateID()) attrs = append(attrs, attr) } @@ -646,7 +646,7 @@ func createExternalEntities(ctx *ExecContext, s *ast.CreateExternalEntitiesStmt) Name: mendixName, Location: location, } - newEntity.ID = model.ID(mpr.GenerateID()) + newEntity.ID = model.ID(types.GenerateID()) applyExternalEntityFields(newEntity, et, isTopLevel, serviceRef, entitySet, keyParts, attrs) if err := ctx.Backend.CreateEntity(dm.ID, newEntity); err != nil { fmt.Fprintf(ctx.Output, " FAILED: %s.%s — %v\n", targetModule, mendixName, err) @@ -699,8 +699,8 @@ type assocKey struct { func createPrimitiveCollectionNPEs( ctx *ExecContext, dm *domainmodel.DomainModel, - doc *mpr.EdmxDocument, - typeByQualified map[string]*mpr.EdmEntityType, + doc *types.EdmxDocument, + typeByQualified map[string]*types.EdmEntityType, esMap map[string]string, serviceRef string, ) int { @@ -759,7 +759,7 @@ func createPrimitiveCollectionNPEs( // Build the inner attribute type from the element type innerType := p.Type[len("Collection(") : len(p.Type)-1] - innerProp := &mpr.EdmProperty{ + innerProp := &types.EdmProperty{ Name: singular(p.Name), Type: innerType, MaxLength: p.MaxLength, @@ -773,7 +773,7 @@ func createPrimitiveCollectionNPEs( RemoteType: primitiveCollectionRemoteType(innerType, p.Nullable), IsPrimitiveCollection: true, } - attr.ID = model.ID(mpr.GenerateID()) + attr.ID = model.ID(types.GenerateID()) npe := &domainmodel.Entity{ Name: npeName, @@ -783,7 +783,7 @@ func createPrimitiveCollectionNPEs( Source: "Rest$ODataPrimitiveCollectionEntitySource", RemoteServiceName: serviceRef, } - npe.ID = model.ID(mpr.GenerateID()) + npe.ID = model.ID(types.GenerateID()) if err := ctx.Backend.CreateEntity(dm.ID, npe); err != nil { fmt.Fprintf(ctx.Output, " NPE FAILED: %s — %v\n", npeName, err) @@ -805,7 +805,7 @@ func createPrimitiveCollectionNPEs( StorageFormat: domainmodel.StorageFormatColumn, Source: "Rest$ODataPrimitiveCollectionAssociationSource", } - assoc.ID = model.ID(mpr.GenerateID()) + assoc.ID = model.ID(types.GenerateID()) if err := ctx.Backend.CreateAssociation(dm.ID, assoc); err != nil { fmt.Fprintf(ctx.Output, " NPE ASSOC FAILED: %s — %v\n", assocName, err) } @@ -817,7 +817,7 @@ func createPrimitiveCollectionNPEs( // isInheritedProperty reports whether a property name comes from one of the // entity type's base types (rather than being defined on the type itself). -func isInheritedProperty(et *mpr.EdmEntityType, propName string, byQN map[string]*mpr.EdmEntityType) bool { +func isInheritedProperty(et *types.EdmEntityType, propName string, byQN map[string]*types.EdmEntityType) bool { for _, p := range et.Properties { if p.Name == propName { return false @@ -882,8 +882,8 @@ func singular(name string) string { func createNavigationAssociations( ctx *ExecContext, dm *domainmodel.DomainModel, - doc *mpr.EdmxDocument, - typeByQualified map[string]*mpr.EdmEntityType, + doc *types.EdmxDocument, + typeByQualified map[string]*types.EdmEntityType, esMap map[string]string, serviceRef string, ) int { @@ -895,7 +895,7 @@ func createNavigationAssociations( nonUpdatable map[string]bool } restrictionsByType := make(map[string]navRestrictions) - esByType := make(map[string]*mpr.EdmEntitySet) + esByType := make(map[string]*types.EdmEntitySet) for _, es := range doc.EntitySets { r := navRestrictions{ nonInsertable: make(map[string]bool), @@ -1027,7 +1027,7 @@ func createNavigationAssociations( CreatableFromParent: creatable, UpdatableFromParent: updatable, } - assoc.ID = model.ID(mpr.GenerateID()) + assoc.ID = model.ID(types.GenerateID()) if err := ctx.Backend.CreateAssociation(dm.ID, assoc); err != nil { fmt.Fprintf(ctx.Output, " ASSOC FAILED: %s.%s — %v\n", parentEnt.Name, assocName, err) @@ -1082,10 +1082,10 @@ func uniqueAssocName(base string, dm *domainmodel.DomainModel, existingAssocs ma // optimistic defaults. func applyExternalEntityFields( ent *domainmodel.Entity, - et *mpr.EdmEntityType, + et *types.EdmEntityType, isTopLevel bool, serviceRef string, - entitySet *mpr.EdmEntitySet, + entitySet *types.EdmEntitySet, keyParts []*domainmodel.RemoteKeyPart, attrs []*domainmodel.Attribute, ) { @@ -1133,20 +1133,20 @@ func applyExternalEntityFields( // mergedPropertiesWithKey walks the BaseType chain of an entity type and // returns the merged property list (base properties first, then derived) along // with the key property names from the root of the chain. -func mergedPropertiesWithKey(et *mpr.EdmEntityType, byQualified map[string]*mpr.EdmEntityType) ([]*mpr.EdmProperty, []string) { +func mergedPropertiesWithKey(et *types.EdmEntityType, byQualified map[string]*types.EdmEntityType) ([]*types.EdmProperty, []string) { // Walk to the root, collecting types in order from base → derived. - chain := []*mpr.EdmEntityType{et} + chain := []*types.EdmEntityType{et} current := et for current.BaseType != "" { parent := byQualified[current.BaseType] if parent == nil { break } - chain = append([]*mpr.EdmEntityType{parent}, chain...) + chain = append([]*types.EdmEntityType{parent}, chain...) current = parent } - var merged []*mpr.EdmProperty + var merged []*types.EdmProperty seen := make(map[string]bool) for _, t := range chain { for _, p := range t.Properties { @@ -1176,7 +1176,7 @@ func attrNameForOData(propName, entityName string) string { // edmToDomainModelAttrType converts an EDM property to a domainmodel attribute type. // isKey forces a non-zero length for string keys: Mendix forbids unlimited // strings as part of an external entity key (CE6121). -func edmToDomainModelAttrType(p *mpr.EdmProperty, isKey bool) domainmodel.AttributeType { +func edmToDomainModelAttrType(p *types.EdmProperty, isKey bool) domainmodel.AttributeType { switch p.Type { case "Edm.String": // Studio Pro stores Length=0 (unlimited) for OData strings without MaxLength. @@ -1208,7 +1208,7 @@ func edmToDomainModelAttrType(p *mpr.EdmProperty, isKey bool) domainmodel.Attrib } // edmToAstDataType converts an Edm property to an AST data type. -func edmToAstDataType(p *mpr.EdmProperty) ast.DataType { +func edmToAstDataType(p *types.EdmProperty) ast.DataType { switch p.Type { case "Edm.String": length := 200 @@ -1380,7 +1380,7 @@ func describeContractMessage(ctx *ExecContext, name ast.QualifiedName) error { } // parseAsyncAPIContract finds a business event service by name and parses its cached AsyncAPI document. -func parseAsyncAPIContract(ctx *ExecContext, name ast.QualifiedName) (*mpr.AsyncAPIDocument, string, error) { +func parseAsyncAPIContract(ctx *ExecContext, name ast.QualifiedName) (*types.AsyncAPIDocument, string, error) { services, err := ctx.Backend.ListBusinessEventServices() if err != nil { return nil, "", mdlerrors.NewBackend("list business event services", err) @@ -1405,7 +1405,7 @@ func parseAsyncAPIContract(ctx *ExecContext, name ast.QualifiedName) (*mpr.Async return nil, svcQN, mdlerrors.NewValidationf("no cached AsyncAPI contract for %s. This service has no Document field (it may be a publisher, not a consumer)", svcQN) } - doc, err := mpr.ParseAsyncAPI(svc.Document) + doc, err := types.ParseAsyncAPI(svc.Document) if err != nil { return nil, svcQN, mdlerrors.NewBackend(fmt.Sprintf("parse AsyncAPI contract for %s", svcQN), err) } @@ -1417,7 +1417,7 @@ func parseAsyncAPIContract(ctx *ExecContext, name ast.QualifiedName) (*mpr.Async } // asyncTypeString formats an AsyncAPI property type for display. -func asyncTypeString(p *mpr.AsyncAPIProperty) string { +func asyncTypeString(p *types.AsyncAPIProperty) string { if p.Format != "" { return p.Type + " (" + p.Format + ")" } diff --git a/mdl/executor/cmd_entities.go b/mdl/executor/cmd_entities.go index 8a231239..9517c446 100644 --- a/mdl/executor/cmd_entities.go +++ b/mdl/executor/cmd_entities.go @@ -9,9 +9,9 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/domainmodel" - "github.com/mendixlabs/mxcli/sdk/mpr" ) // execCreateEntity handles CREATE ENTITY statements. @@ -31,7 +31,7 @@ func buildEventHandlers(ctx *ExecContext, defs []ast.EventHandlerDef) ([]*domain } handlers = append(handlers, &domainmodel.EventHandler{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "DomainModels$EventHandler", }, Moment: domainmodel.EventMoment(d.Moment), @@ -134,7 +134,7 @@ func execCreateEntity(ctx *ExecContext, s *ast.CreateEntityStmt) error { } // Generate ID for the attribute so we can reference it in validation rules/indexes - attrID := model.ID(mpr.GenerateID()) + attrID := model.ID(types.GenerateID()) attrNameToID[a.Name] = attrID attr := &domainmodel.Attribute{ @@ -188,12 +188,12 @@ func execCreateEntity(ctx *ExecContext, s *ast.CreateEntityStmt) error { AttributeID: attrID, Type: "Required", } - vr.ID = model.ID(mpr.GenerateID()) + vr.ID = model.ID(types.GenerateID()) if a.NotNullError != "" { vr.ErrorMessage = &model.Text{ Translations: map[string]string{"en_US": a.NotNullError}, } - vr.ErrorMessage.ID = model.ID(mpr.GenerateID()) + vr.ErrorMessage.ID = model.ID(types.GenerateID()) } validationRules = append(validationRules, vr) } @@ -204,12 +204,12 @@ func execCreateEntity(ctx *ExecContext, s *ast.CreateEntityStmt) error { AttributeID: attrID, Type: "Unique", } - vr.ID = model.ID(mpr.GenerateID()) + vr.ID = model.ID(types.GenerateID()) if a.UniqueError != "" { vr.ErrorMessage = &model.Text{ Translations: map[string]string{"en_US": a.UniqueError}, } - vr.ErrorMessage.ID = model.ID(mpr.GenerateID()) + vr.ErrorMessage.ID = model.ID(types.GenerateID()) } validationRules = append(validationRules, vr) } @@ -218,11 +218,11 @@ func execCreateEntity(ctx *ExecContext, s *ast.CreateEntityStmt) error { // Create indexes var indexes []*domainmodel.Index for _, idx := range s.Indexes { - idxID := model.ID(mpr.GenerateID()) + idxID := model.ID(types.GenerateID()) var indexAttrs []*domainmodel.IndexAttribute for _, col := range idx.Columns { if attrID, ok := attrNameToID[col.Name]; ok { - iaID := model.ID(mpr.GenerateID()) + iaID := model.ID(types.GenerateID()) ia := &domainmodel.IndexAttribute{ AttributeID: attrID, Ascending: !col.Descending, @@ -529,7 +529,7 @@ func execAlterEntity(ctx *ExecContext, s *ast.AlterEntityStmt) error { } } - attrID := model.ID(mpr.GenerateID()) + attrID := model.ID(types.GenerateID()) attr := &domainmodel.Attribute{ Name: a.Name, Documentation: a.Documentation, @@ -569,12 +569,12 @@ func execAlterEntity(ctx *ExecContext, s *ast.AlterEntityStmt) error { AttributeID: attrID, Type: "Required", } - vr.ID = model.ID(mpr.GenerateID()) + vr.ID = model.ID(types.GenerateID()) if a.NotNullError != "" { vr.ErrorMessage = &model.Text{ Translations: map[string]string{"en_US": a.NotNullError}, } - vr.ErrorMessage.ID = model.ID(mpr.GenerateID()) + vr.ErrorMessage.ID = model.ID(types.GenerateID()) } entity.ValidationRules = append(entity.ValidationRules, vr) } @@ -583,12 +583,12 @@ func execAlterEntity(ctx *ExecContext, s *ast.AlterEntityStmt) error { AttributeID: attrID, Type: "Unique", } - vr.ID = model.ID(mpr.GenerateID()) + vr.ID = model.ID(types.GenerateID()) if a.UniqueError != "" { vr.ErrorMessage = &model.Text{ Translations: map[string]string{"en_US": a.UniqueError}, } - vr.ErrorMessage.ID = model.ID(mpr.GenerateID()) + vr.ErrorMessage.ID = model.ID(types.GenerateID()) } entity.ValidationRules = append(entity.ValidationRules, vr) } @@ -808,7 +808,7 @@ func execAlterEntity(ctx *ExecContext, s *ast.AlterEntityStmt) error { for _, attr := range entity.Attributes { attrNameToID[attr.Name] = attr.ID } - idxID := model.ID(mpr.GenerateID()) + idxID := model.ID(types.GenerateID()) var indexAttrs []*domainmodel.IndexAttribute for _, col := range s.Index.Columns { if attrID, ok := attrNameToID[col.Name]; ok { @@ -816,7 +816,7 @@ func execAlterEntity(ctx *ExecContext, s *ast.AlterEntityStmt) error { AttributeID: attrID, Ascending: !col.Descending, } - ia.ID = model.ID(mpr.GenerateID()) + ia.ID = model.ID(types.GenerateID()) indexAttrs = append(indexAttrs, ia) } else { return mdlerrors.NewNotFoundMsg("attribute", col.Name, fmt.Sprintf("attribute '%s' not found for index on entity %s", col.Name, s.Name)) diff --git a/mdl/executor/cmd_error_mock_test.go b/mdl/executor/cmd_error_mock_test.go index d60fa2db..cdc959af 100644 --- a/mdl/executor/cmd_error_mock_test.go +++ b/mdl/executor/cmd_error_mock_test.go @@ -8,10 +8,10 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/agenteditor" "github.com/mendixlabs/mxcli/sdk/microflows" - "github.com/mendixlabs/mxcli/sdk/mpr" "github.com/mendixlabs/mxcli/sdk/pages" "github.com/mendixlabs/mxcli/sdk/security" "github.com/mendixlabs/mxcli/sdk/workflows" @@ -131,7 +131,7 @@ func TestShowPublishedRestServices_Mock_BackendError(t *testing.T) { func TestShowJavaActions_Mock_BackendError(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - ListJavaActionsFunc: func() ([]*mpr.JavaAction, error) { return nil, errBackend }, + ListJavaActionsFunc: func() ([]*types.JavaAction, error) { return nil, errBackend }, } ctx, _ := newMockCtx(t, withBackend(mb)) assertError(t, showJavaActions(ctx, "")) @@ -140,7 +140,7 @@ func TestShowJavaActions_Mock_BackendError(t *testing.T) { func TestShowJavaScriptActions_Mock_BackendError(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - ListJavaScriptActionsFunc: func() ([]*mpr.JavaScriptAction, error) { return nil, errBackend }, + ListJavaScriptActionsFunc: func() ([]*types.JavaScriptAction, error) { return nil, errBackend }, } ctx, _ := newMockCtx(t, withBackend(mb)) assertError(t, showJavaScriptActions(ctx, "")) @@ -158,7 +158,7 @@ func TestShowDatabaseConnections_Mock_BackendError(t *testing.T) { func TestShowImageCollections_Mock_BackendError(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - ListImageCollectionsFunc: func() ([]*mpr.ImageCollection, error) { return nil, errBackend }, + ListImageCollectionsFunc: func() ([]*types.ImageCollection, error) { return nil, errBackend }, } ctx, _ := newMockCtx(t, withBackend(mb)) assertError(t, showImageCollections(ctx, "")) @@ -167,7 +167,7 @@ func TestShowImageCollections_Mock_BackendError(t *testing.T) { func TestShowJsonStructures_Mock_BackendError(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - ListJsonStructuresFunc: func() ([]*mpr.JsonStructure, error) { return nil, errBackend }, + ListJsonStructuresFunc: func() ([]*types.JsonStructure, error) { return nil, errBackend }, } ctx, _ := newMockCtx(t, withBackend(mb)) assertError(t, showJsonStructures(ctx, "")) @@ -176,7 +176,7 @@ func TestShowJsonStructures_Mock_BackendError(t *testing.T) { func TestShowNavigation_Mock_BackendError(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - GetNavigationFunc: func() (*mpr.NavigationDocument, error) { return nil, errBackend }, + GetNavigationFunc: func() (*types.NavigationDocument, error) { return nil, errBackend }, } ctx, _ := newMockCtx(t, withBackend(mb)) assertError(t, showNavigation(ctx)) @@ -340,7 +340,7 @@ func TestDescribeWorkflow_Mock_BackendError(t *testing.T) { func TestDescribeNavigation_Mock_BackendError(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - GetNavigationFunc: func() (*mpr.NavigationDocument, error) { return nil, errBackend }, + GetNavigationFunc: func() (*types.NavigationDocument, error) { return nil, errBackend }, } ctx, _ := newMockCtx(t, withBackend(mb)) assertError(t, describeNavigation(ctx, ast.QualifiedName{Module: "M", Name: "N"})) @@ -376,7 +376,7 @@ func TestDescribeRestClient_Mock_BackendError(t *testing.T) { func TestDescribeImageCollection_Mock_BackendError(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - ListImageCollectionsFunc: func() ([]*mpr.ImageCollection, error) { return nil, errBackend }, + ListImageCollectionsFunc: func() ([]*types.ImageCollection, error) { return nil, errBackend }, } ctx, _ := newMockCtx(t, withBackend(mb)) assertError(t, describeImageCollection(ctx, ast.QualifiedName{Module: "M", Name: "I"})) diff --git a/mdl/executor/cmd_export_mappings.go b/mdl/executor/cmd_export_mappings.go index 0745cad6..2633b97c 100644 --- a/mdl/executor/cmd_export_mappings.go +++ b/mdl/executor/cmd_export_mappings.go @@ -11,6 +11,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/sdk/mpr" ) @@ -207,7 +208,7 @@ func execCreateExportMapping(ctx *ExecContext, s *ast.CreateExportMappingStmt) e } // Build a path→element info map from the JSON structure for schema alignment. - jsElems := map[string]*mpr.JsonElement{} + jsElems := map[string]*types.JsonElement{} if s.SchemaKind == "JSON_STRUCTURE" && s.SchemaRef.Module != "" { if js, err2 := ctx.Backend.GetJsonStructureByQualifiedName(s.SchemaRef.Module, s.SchemaRef.Name); err2 == nil { buildJsonElementPathMap(js.Elements, jsElems) @@ -232,7 +233,7 @@ func execCreateExportMapping(ctx *ExecContext, s *ast.CreateExportMappingStmt) e // buildExportMappingElementModel converts an AST element definition to a model element. // It clones properties from the matching JSON structure element and adds mapping bindings. -func buildExportMappingElementModel(moduleName string, def *ast.ExportMappingElementDef, parentEntity, parentPath string, jsElems map[string]*mpr.JsonElement, reader *mpr.Reader, isRoot bool) *model.ExportMappingElement { +func buildExportMappingElementModel(moduleName string, def *ast.ExportMappingElementDef, parentEntity, parentPath string, jsElems map[string]*types.JsonElement, reader *mpr.Reader, isRoot bool) *model.ExportMappingElement { elem := &model.ExportMappingElement{ BaseElement: model.BaseElement{ ID: model.ID(mpr.GenerateID()), diff --git a/mdl/executor/cmd_folders.go b/mdl/executor/cmd_folders.go index 5dafe8f8..15e373eb 100644 --- a/mdl/executor/cmd_folders.go +++ b/mdl/executor/cmd_folders.go @@ -10,11 +10,11 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) // findFolderByPath walks a folder path under a module and returns the folder ID. -func findFolderByPath(ctx *ExecContext, moduleID model.ID, folderPath string, folders []*mpr.FolderInfo) (model.ID, error) { +func findFolderByPath(ctx *ExecContext, moduleID model.ID, folderPath string, folders []*types.FolderInfo) (model.ID, error) { parts := strings.Split(folderPath, "/") currentContainerID := moduleID diff --git a/mdl/executor/cmd_imagecollections.go b/mdl/executor/cmd_imagecollections.go index fdafb88a..9521f0b6 100644 --- a/mdl/executor/cmd_imagecollections.go +++ b/mdl/executor/cmd_imagecollections.go @@ -11,7 +11,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) // execCreateImageCollection handles CREATE IMAGE COLLECTION statements. @@ -33,7 +33,7 @@ func execCreateImageCollection(ctx *ExecContext, s *ast.CreateImageCollectionStm } // Build ImageCollection - ic := &mpr.ImageCollection{ + ic := &types.ImageCollection{ ContainerID: module.ID, Name: s.Name.Name, ExportLevel: s.ExportLevel, @@ -55,7 +55,7 @@ func execCreateImageCollection(ctx *ExecContext, s *ast.CreateImageCollectionStm return mdlerrors.NewBackend(fmt.Sprintf("read image file %q", item.FilePath), err) } format := extToImageFormat(filepath.Ext(filePath)) - ic.Images = append(ic.Images, mpr.Image{ + ic.Images = append(ic.Images, types.Image{ Name: item.Name, Data: data, Format: format, @@ -232,7 +232,7 @@ func showImageCollections(ctx *ExecContext, moduleName string) error { } // findImageCollection finds an image collection by module and name. -func findImageCollection(ctx *ExecContext, moduleName, collectionName string) *mpr.ImageCollection { +func findImageCollection(ctx *ExecContext, moduleName, collectionName string) *types.ImageCollection { collections, err := ctx.Backend.ListImageCollections() if err != nil { return nil diff --git a/mdl/executor/cmd_imagecollections_mock_test.go b/mdl/executor/cmd_imagecollections_mock_test.go index 9b5f7548..eb7650bd 100644 --- a/mdl/executor/cmd_imagecollections_mock_test.go +++ b/mdl/executor/cmd_imagecollections_mock_test.go @@ -7,13 +7,13 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" ) func TestShowImageCollections_Mock(t *testing.T) { mod := mkModule("Icons") - ic := &mpr.ImageCollection{ + ic := &types.ImageCollection{ BaseElement: model.BaseElement{ID: nextID("ic")}, ContainerID: mod.ID, Name: "AppIcons", @@ -25,7 +25,7 @@ func TestShowImageCollections_Mock(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - ListImageCollectionsFunc: func() ([]*mpr.ImageCollection, error) { return []*mpr.ImageCollection{ic}, nil }, + ListImageCollectionsFunc: func() ([]*types.ImageCollection, error) { return []*types.ImageCollection{ic}, nil }, } ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) @@ -38,7 +38,7 @@ func TestShowImageCollections_Mock(t *testing.T) { func TestDescribeImageCollection_Mock(t *testing.T) { mod := mkModule("Icons") - ic := &mpr.ImageCollection{ + ic := &types.ImageCollection{ BaseElement: model.BaseElement{ID: nextID("ic")}, ContainerID: mod.ID, Name: "AppIcons", @@ -50,7 +50,7 @@ func TestDescribeImageCollection_Mock(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - ListImageCollectionsFunc: func() ([]*mpr.ImageCollection, error) { return []*mpr.ImageCollection{ic}, nil }, + ListImageCollectionsFunc: func() ([]*types.ImageCollection, error) { return []*types.ImageCollection{ic}, nil }, } ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) diff --git a/mdl/executor/cmd_import_mappings.go b/mdl/executor/cmd_import_mappings.go index a750fc3f..4837a8c3 100644 --- a/mdl/executor/cmd_import_mappings.go +++ b/mdl/executor/cmd_import_mappings.go @@ -11,6 +11,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/sdk/mpr" ) @@ -216,7 +217,7 @@ func execCreateImportMapping(ctx *ExecContext, s *ast.CreateImportMappingStmt) e } // Build path→JsonElement map from JSON structure — mapping elements clone from this - jsElementsByPath := map[string]*mpr.JsonElement{} + jsElementsByPath := map[string]*types.JsonElement{} if s.SchemaKind == "JSON_STRUCTURE" && s.SchemaRef.Module != "" { if js, err2 := ctx.Backend.GetJsonStructureByQualifiedName(s.SchemaRef.Module, s.SchemaRef.Name); err2 == nil { buildJsonElementPathMap(js.Elements, jsElementsByPath) @@ -243,7 +244,7 @@ func execCreateImportMapping(ctx *ExecContext, s *ast.CreateImportMappingStmt) e // It clones properties from the matching JSON structure element (ExposedName, JsonPath, // MaxOccurs, ElementType, etc.) and adds mapping-specific bindings (Entity, Attribute, // Association, ObjectHandling). -func buildImportMappingElementModel(moduleName string, def *ast.ImportMappingElementDef, parentEntity, parentPath string, reader *mpr.Reader, jsElems map[string]*mpr.JsonElement, isRoot bool) *model.ImportMappingElement { +func buildImportMappingElementModel(moduleName string, def *ast.ImportMappingElementDef, parentEntity, parentPath string, reader *mpr.Reader, jsElems map[string]*types.JsonElement, isRoot bool) *model.ImportMappingElement { elem := &model.ImportMappingElement{ BaseElement: model.BaseElement{ ID: model.ID(mpr.GenerateID()), @@ -336,7 +337,7 @@ func buildImportMappingElementModel(moduleName string, def *ast.ImportMappingEle } // buildJsonElementPathMap recursively builds a map from JSON path → JsonElement. -func buildJsonElementPathMap(elems []*mpr.JsonElement, m map[string]*mpr.JsonElement) { +func buildJsonElementPathMap(elems []*types.JsonElement, m map[string]*types.JsonElement) { for _, e := range elems { if e == nil { continue diff --git a/mdl/executor/cmd_javaactions.go b/mdl/executor/cmd_javaactions.go index eb19cbd8..395d1010 100644 --- a/mdl/executor/cmd_javaactions.go +++ b/mdl/executor/cmd_javaactions.go @@ -12,9 +12,9 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/javaactions" - "github.com/mendixlabs/mxcli/sdk/mpr" ) // showJavaActions handles SHOW JAVA ACTIONS command. @@ -325,7 +325,7 @@ func execCreateJavaAction(ctx *ExecContext, s *ast.CreateJavaActionStmt) error { // Create the Java action ja := &javaactions.JavaAction{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "JavaActions$JavaAction", }, ContainerID: containerID, @@ -339,7 +339,7 @@ func execCreateJavaAction(ctx *ExecContext, s *ast.CreateJavaActionStmt) error { for _, tpName := range s.TypeParameters { tpDef := &javaactions.TypeParameterDef{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), }, Name: tpName, } @@ -360,7 +360,7 @@ func execCreateJavaAction(ctx *ExecContext, s *ast.CreateJavaActionStmt) error { for _, param := range s.Parameters { jaParam := &javaactions.JavaActionParameter{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "JavaActions$JavaActionParameter", }, Name: param.Name, @@ -370,7 +370,7 @@ func execCreateJavaAction(ctx *ExecContext, s *ast.CreateJavaActionStmt) error { // Explicit ENTITY → EntityTypeParameterType (entity type selector) tpName := param.Type.TypeParamName jaParam.ParameterType = &javaactions.EntityTypeParameterType{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, TypeParameterID: typeParamNameToID[tpName], TypeParameterName: tpName, } @@ -378,7 +378,7 @@ func execCreateJavaAction(ctx *ExecContext, s *ast.CreateJavaActionStmt) error { // Bare name matching a type parameter → TypeParameter (ParameterizedEntityType) tpName := getTypeParamRefName(param.Type) jaParam.ParameterType = &javaactions.TypeParameter{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, TypeParameterID: typeParamNameToID[tpName], TypeParameter: tpName, } @@ -392,7 +392,7 @@ func execCreateJavaAction(ctx *ExecContext, s *ast.CreateJavaActionStmt) error { if isTypeParamRef(s.ReturnType, typeParamNames) { tpName := getTypeParamRefName(s.ReturnType) ja.ReturnType = &javaactions.TypeParameter{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, TypeParameterID: typeParamNameToID[tpName], TypeParameter: tpName, } @@ -403,7 +403,7 @@ func execCreateJavaAction(ctx *ExecContext, s *ast.CreateJavaActionStmt) error { // Build MicroflowActionInfo if EXPOSED AS clause is present if s.ExposedCaption != "" { ja.MicroflowActionInfo = &javaactions.MicroflowActionInfo{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Caption: s.ExposedCaption, Category: s.ExposedCategory, } @@ -434,42 +434,42 @@ func astDataTypeToJavaActionParamType(dt ast.DataType) javaactions.CodeActionPar case ast.TypeBoolean: return &javaactions.BooleanType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$BooleanType", }, } case ast.TypeInteger: return &javaactions.IntegerType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$IntegerType", }, } case ast.TypeLong: return &javaactions.LongType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$LongType", }, } case ast.TypeDecimal: return &javaactions.DecimalType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$DecimalType", }, } case ast.TypeString: return &javaactions.StringType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$StringType", }, } case ast.TypeDateTime, ast.TypeDate: return &javaactions.DateTimeType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$DateTimeType", }, } @@ -485,7 +485,7 @@ func astDataTypeToJavaActionParamType(dt ast.DataType) javaactions.CodeActionPar } return &javaactions.EntityType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$EntityType", }, Entity: entityName, @@ -497,7 +497,7 @@ func astDataTypeToJavaActionParamType(dt ast.DataType) javaactions.CodeActionPar } return &javaactions.ListType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$ListType", }, Entity: entityName, @@ -506,7 +506,7 @@ func astDataTypeToJavaActionParamType(dt ast.DataType) javaactions.CodeActionPar // Default to String type for unknown kinds return &javaactions.StringType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$StringType", }, } @@ -519,49 +519,49 @@ func astDataTypeToJavaActionReturnType(dt ast.DataType) javaactions.CodeActionRe case ast.TypeVoid: return &javaactions.VoidType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$VoidType", }, } case ast.TypeBoolean: return &javaactions.BooleanType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$BooleanType", }, } case ast.TypeInteger: return &javaactions.IntegerType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$IntegerType", }, } case ast.TypeLong: return &javaactions.LongType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$LongType", }, } case ast.TypeDecimal: return &javaactions.DecimalType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$DecimalType", }, } case ast.TypeString: return &javaactions.StringType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$StringType", }, } case ast.TypeDateTime, ast.TypeDate: return &javaactions.DateTimeType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$DateTimeType", }, } @@ -576,7 +576,7 @@ func astDataTypeToJavaActionReturnType(dt ast.DataType) javaactions.CodeActionRe } return &javaactions.EntityType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$EntityType", }, Entity: entityName, @@ -588,7 +588,7 @@ func astDataTypeToJavaActionReturnType(dt ast.DataType) javaactions.CodeActionRe } return &javaactions.ListType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$ListType", }, Entity: entityName, @@ -597,7 +597,7 @@ func astDataTypeToJavaActionReturnType(dt ast.DataType) javaactions.CodeActionRe // Default to Boolean type (most common for Java actions) return &javaactions.BooleanType{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "CodeActions$BooleanType", }, } diff --git a/mdl/executor/cmd_javaactions_mock_test.go b/mdl/executor/cmd_javaactions_mock_test.go index bd331139..53860c08 100644 --- a/mdl/executor/cmd_javaactions_mock_test.go +++ b/mdl/executor/cmd_javaactions_mock_test.go @@ -7,14 +7,14 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/javaactions" - "github.com/mendixlabs/mxcli/sdk/mpr" ) func TestShowJavaActions_Mock(t *testing.T) { mod := mkModule("MyModule") - ja := &mpr.JavaAction{ + ja := &types.JavaAction{ BaseElement: model.BaseElement{ID: nextID("ja")}, ContainerID: mod.ID, Name: "DoSomething", @@ -25,7 +25,7 @@ func TestShowJavaActions_Mock(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - ListJavaActionsFunc: func() ([]*mpr.JavaAction, error) { return []*mpr.JavaAction{ja}, nil }, + ListJavaActionsFunc: func() ([]*types.JavaAction, error) { return []*types.JavaAction{ja}, nil }, } ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) diff --git a/mdl/executor/cmd_javascript_actions_mock_test.go b/mdl/executor/cmd_javascript_actions_mock_test.go index dd47c049..3ec07738 100644 --- a/mdl/executor/cmd_javascript_actions_mock_test.go +++ b/mdl/executor/cmd_javascript_actions_mock_test.go @@ -7,13 +7,13 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" ) func TestShowJavaScriptActions_Mock(t *testing.T) { mod := mkModule("WebMod") - jsa := &mpr.JavaScriptAction{ + jsa := &types.JavaScriptAction{ BaseElement: model.BaseElement{ID: nextID("jsa")}, ContainerID: mod.ID, Name: "ShowAlert", @@ -25,7 +25,7 @@ func TestShowJavaScriptActions_Mock(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - ListJavaScriptActionsFunc: func() ([]*mpr.JavaScriptAction, error) { return []*mpr.JavaScriptAction{jsa}, nil }, + ListJavaScriptActionsFunc: func() ([]*types.JavaScriptAction, error) { return []*types.JavaScriptAction{jsa}, nil }, } ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) @@ -41,8 +41,8 @@ func TestDescribeJavaScriptAction_Mock(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - ReadJavaScriptActionByNameFunc: func(qn string) (*mpr.JavaScriptAction, error) { - return &mpr.JavaScriptAction{ + ReadJavaScriptActionByNameFunc: func(qn string) (*types.JavaScriptAction, error) { + return &types.JavaScriptAction{ BaseElement: model.BaseElement{ID: nextID("jsa")}, ContainerID: mod.ID, Name: "ShowAlert", diff --git a/mdl/executor/cmd_json_mock_test.go b/mdl/executor/cmd_json_mock_test.go index 9986fe53..5ee03a8d 100644 --- a/mdl/executor/cmd_json_mock_test.go +++ b/mdl/executor/cmd_json_mock_test.go @@ -6,10 +6,10 @@ import ( "testing" "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/agenteditor" "github.com/mendixlabs/mxcli/sdk/microflows" - "github.com/mendixlabs/mxcli/sdk/mpr" "github.com/mendixlabs/mxcli/sdk/pages" "github.com/mendixlabs/mxcli/sdk/security" "github.com/mendixlabs/mxcli/sdk/workflows" @@ -238,7 +238,7 @@ func TestShowPublishedRestServices_Mock_JSON(t *testing.T) { func TestShowJavaActions_Mock_JSON(t *testing.T) { mod := mkModule("MyModule") h := mkHierarchy(mod) - ja := &mpr.JavaAction{ + ja := &types.JavaAction{ BaseElement: model.BaseElement{ID: nextID("ja")}, ContainerID: mod.ID, Name: "MyJavaAction", @@ -247,7 +247,7 @@ func TestShowJavaActions_Mock_JSON(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - ListJavaActionsFunc: func() ([]*mpr.JavaAction, error) { return []*mpr.JavaAction{ja}, nil }, + ListJavaActionsFunc: func() ([]*types.JavaAction, error) { return []*types.JavaAction{ja}, nil }, } ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) @@ -259,7 +259,7 @@ func TestShowJavaActions_Mock_JSON(t *testing.T) { func TestShowJavaScriptActions_Mock_JSON(t *testing.T) { mod := mkModule("MyModule") h := mkHierarchy(mod) - jsa := &mpr.JavaScriptAction{ + jsa := &types.JavaScriptAction{ BaseElement: model.BaseElement{ID: nextID("jsa")}, ContainerID: mod.ID, Name: "MyJSAction", @@ -268,7 +268,7 @@ func TestShowJavaScriptActions_Mock_JSON(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - ListJavaScriptActionsFunc: func() ([]*mpr.JavaScriptAction, error) { return []*mpr.JavaScriptAction{jsa}, nil }, + ListJavaScriptActionsFunc: func() ([]*types.JavaScriptAction, error) { return []*types.JavaScriptAction{jsa}, nil }, } ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) @@ -301,7 +301,7 @@ func TestShowDatabaseConnections_Mock_JSON(t *testing.T) { func TestShowImageCollections_Mock_JSON(t *testing.T) { mod := mkModule("MyModule") h := mkHierarchy(mod) - ic := &mpr.ImageCollection{ + ic := &types.ImageCollection{ BaseElement: model.BaseElement{ID: nextID("ic")}, ContainerID: mod.ID, Name: "Icons", @@ -310,7 +310,7 @@ func TestShowImageCollections_Mock_JSON(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - ListImageCollectionsFunc: func() ([]*mpr.ImageCollection, error) { return []*mpr.ImageCollection{ic}, nil }, + ListImageCollectionsFunc: func() ([]*types.ImageCollection, error) { return []*types.ImageCollection{ic}, nil }, } ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) @@ -322,7 +322,7 @@ func TestShowImageCollections_Mock_JSON(t *testing.T) { func TestShowJsonStructures_Mock_JSON(t *testing.T) { mod := mkModule("MyModule") h := mkHierarchy(mod) - js := &mpr.JsonStructure{ + js := &types.JsonStructure{ BaseElement: model.BaseElement{ID: nextID("js")}, ContainerID: mod.ID, Name: "OrderSchema", @@ -331,7 +331,7 @@ func TestShowJsonStructures_Mock_JSON(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - ListJsonStructuresFunc: func() ([]*mpr.JsonStructure, error) { return []*mpr.JsonStructure{js}, nil }, + ListJsonStructuresFunc: func() ([]*types.JsonStructure, error) { return []*types.JsonStructure{js}, nil }, } ctx, buf := newMockCtx(t, withBackend(mb), withFormat(FormatJSON), withHierarchy(h)) diff --git a/mdl/executor/cmd_jsonstructures.go b/mdl/executor/cmd_jsonstructures.go index 57a0d9d3..5c908cda 100644 --- a/mdl/executor/cmd_jsonstructures.go +++ b/mdl/executor/cmd_jsonstructures.go @@ -11,7 +11,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) // showJsonStructures handles SHOW JSON STRUCTURES [IN module]. @@ -100,7 +100,7 @@ func describeJsonStructure(ctx *ExecContext, name ast.QualifiedName) error { } if js.JsonSnippet != "" { - snippet := mpr.PrettyPrintJSON(js.JsonSnippet) + snippet := types.PrettyPrintJSON(js.JsonSnippet) if strings.Contains(snippet, "'") || strings.Contains(snippet, "\n") { fmt.Fprintf(ctx.Output, "\n SNIPPET $$%s$$", snippet) } else { @@ -135,7 +135,7 @@ func describeJsonStructure(ctx *ExecContext, name ast.QualifiedName) error { // collectCustomNameMappings walks the element tree and returns JSON key → ExposedName // mappings where the ExposedName differs from the auto-generated default (capitalizeFirst). -func collectCustomNameMappings(elements []*mpr.JsonElement) map[string]string { +func collectCustomNameMappings(elements []*types.JsonElement) map[string]string { mappings := make(map[string]string) for _, elem := range elements { collectCustomNames(elem, mappings) @@ -143,7 +143,7 @@ func collectCustomNameMappings(elements []*mpr.JsonElement) map[string]string { return mappings } -func collectCustomNames(elem *mpr.JsonElement, mappings map[string]string) { +func collectCustomNames(elem *types.JsonElement, mappings map[string]string) { // Extract the JSON key from the last segment of the Path. // Path format: "(Object)|fieldName" or "(Object)|parent|(Object)|child" if parts := strings.Split(elem.Path, "|"); len(parts) > 1 { @@ -207,7 +207,7 @@ func execCreateJsonStructure(ctx *ExecContext, s *ast.CreateJsonStructureStmt) e } // Build element tree from JSON snippet, applying custom name mappings - elements, err := mpr.BuildJsonElementsFromSnippet(s.JsonSnippet, s.CustomNameMap) + elements, err := types.BuildJsonElementsFromSnippet(s.JsonSnippet, s.CustomNameMap) if err != nil { return mdlerrors.NewBackend("build element tree", err) } @@ -217,11 +217,11 @@ func execCreateJsonStructure(ctx *ExecContext, s *ast.CreateJsonStructureStmt) e containerID = existing.ContainerID } - js := &mpr.JsonStructure{ + js := &types.JsonStructure{ ContainerID: containerID, Name: s.Name.Name, Documentation: s.Documentation, - JsonSnippet: mpr.PrettyPrintJSON(s.JsonSnippet), + JsonSnippet: types.PrettyPrintJSON(s.JsonSnippet), Elements: elements, } @@ -260,7 +260,7 @@ func execDropJsonStructure(ctx *ExecContext, s *ast.DropJsonStructureStmt) error } // findJsonStructure finds a JSON structure by module and name. -func findJsonStructure(ctx *ExecContext, moduleName, structName string) *mpr.JsonStructure { +func findJsonStructure(ctx *ExecContext, moduleName, structName string) *types.JsonStructure { structures, err := ctx.Backend.ListJsonStructures() if err != nil { return nil diff --git a/mdl/executor/cmd_jsonstructures_mock_test.go b/mdl/executor/cmd_jsonstructures_mock_test.go index 44409735..57896021 100644 --- a/mdl/executor/cmd_jsonstructures_mock_test.go +++ b/mdl/executor/cmd_jsonstructures_mock_test.go @@ -6,13 +6,13 @@ import ( "testing" "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" ) func TestShowJsonStructures_Mock(t *testing.T) { mod := mkModule("API") - js := &mpr.JsonStructure{ + js := &types.JsonStructure{ BaseElement: model.BaseElement{ID: nextID("js")}, ContainerID: mod.ID, Name: "OrderSchema", @@ -23,7 +23,7 @@ func TestShowJsonStructures_Mock(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - ListJsonStructuresFunc: func() ([]*mpr.JsonStructure, error) { return []*mpr.JsonStructure{js}, nil }, + ListJsonStructuresFunc: func() ([]*types.JsonStructure, error) { return []*types.JsonStructure{js}, nil }, } ctx, buf := newMockCtx(t, withBackend(mb), withHierarchy(h)) diff --git a/mdl/executor/cmd_microflows_builder_actions.go b/mdl/executor/cmd_microflows_builder_actions.go index 633bf0eb..d60a9142 100644 --- a/mdl/executor/cmd_microflows_builder_actions.go +++ b/mdl/executor/cmd_microflows_builder_actions.go @@ -8,10 +8,10 @@ import ( "strings" "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/domainmodel" "github.com/mendixlabs/mxcli/sdk/microflows" - "github.com/mendixlabs/mxcli/sdk/mpr" ) // addCreateVariableAction creates a DECLARE statement as a CreateVariableAction. @@ -29,7 +29,7 @@ func (fb *flowBuilder) addCreateVariableAction(s *ast.DeclareStmt) model.ID { fb.declaredVars[s.Variable] = typeName action := µflows.CreateVariableAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, VariableName: s.Variable, DataType: convertASTToMicroflowDataType(declType, nil), InitialValue: fb.exprToString(s.InitialValue), @@ -38,7 +38,7 @@ func (fb *flowBuilder) addCreateVariableAction(s *ast.DeclareStmt) model.ID { activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -62,7 +62,7 @@ func (fb *flowBuilder) addChangeVariableAction(s *ast.MfSetStmt) model.ID { } action := µflows.ChangeVariableAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, VariableName: s.Target, Value: fb.exprToString(s.Value), } @@ -70,7 +70,7 @@ func (fb *flowBuilder) addChangeVariableAction(s *ast.MfSetStmt) model.ID { activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -87,7 +87,7 @@ func (fb *flowBuilder) addChangeVariableAction(s *ast.MfSetStmt) model.ID { // addCreateObjectAction creates a CREATE OBJECT statement. func (fb *flowBuilder) addCreateObjectAction(s *ast.CreateObjectStmt) model.ID { action := µflows.CreateObjectAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, OutputVariable: s.Variable, Commit: microflows.CommitTypeNo, } @@ -106,7 +106,7 @@ func (fb *flowBuilder) addCreateObjectAction(s *ast.CreateObjectStmt) model.ID { // Build InitialMembers for each SET assignment for _, change := range s.Changes { memberChange := µflows.MemberChange{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Type: microflows.MemberChangeTypeSet, Value: fb.memberExpressionToString(change.Value, entityQN, change.Attribute), } @@ -118,7 +118,7 @@ func (fb *flowBuilder) addCreateObjectAction(s *ast.CreateObjectStmt) model.ID { activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -144,7 +144,7 @@ func (fb *flowBuilder) addCreateObjectAction(s *ast.CreateObjectStmt) model.ID { // addCommitAction creates a COMMIT statement. func (fb *flowBuilder) addCommitAction(s *ast.MfCommitStmt) model.ID { action := µflows.CommitObjectsAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), CommitVariable: s.Variable, WithEvents: s.WithEvents, @@ -155,7 +155,7 @@ func (fb *flowBuilder) addCommitAction(s *ast.MfCommitStmt) model.ID { activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -180,7 +180,7 @@ func (fb *flowBuilder) addCommitAction(s *ast.MfCommitStmt) model.ID { // addDeleteAction creates a DELETE statement. func (fb *flowBuilder) addDeleteAction(s *ast.DeleteObjectStmt) model.ID { action := µflows.DeleteObjectAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, DeleteVariable: s.Variable, } @@ -188,7 +188,7 @@ func (fb *flowBuilder) addDeleteAction(s *ast.DeleteObjectStmt) model.ID { activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -214,7 +214,7 @@ func (fb *flowBuilder) addDeleteAction(s *ast.DeleteObjectStmt) model.ID { // addRollbackAction creates a ROLLBACK statement. func (fb *flowBuilder) addRollbackAction(s *ast.RollbackStmt) model.ID { action := µflows.RollbackObjectAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, RollbackVariable: s.Variable, RefreshInClient: s.RefreshInClient, } @@ -222,7 +222,7 @@ func (fb *flowBuilder) addRollbackAction(s *ast.RollbackStmt) model.ID { activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -240,7 +240,7 @@ func (fb *flowBuilder) addRollbackAction(s *ast.RollbackStmt) model.ID { // addChangeObjectAction creates a CHANGE statement. func (fb *flowBuilder) addChangeObjectAction(s *ast.ChangeObjectStmt) model.ID { action := µflows.ChangeObjectAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ChangeVariable: s.Variable, Commit: microflows.CommitTypeNo, RefreshInClient: false, @@ -255,7 +255,7 @@ func (fb *flowBuilder) addChangeObjectAction(s *ast.ChangeObjectStmt) model.ID { // Build MemberChange items for each SET assignment for _, change := range s.Changes { memberChange := µflows.MemberChange{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Type: microflows.MemberChangeTypeSet, Value: fb.memberExpressionToString(change.Value, entityQN, change.Attribute), } @@ -266,7 +266,7 @@ func (fb *flowBuilder) addChangeObjectAction(s *ast.ChangeObjectStmt) model.ID { activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -304,7 +304,7 @@ func (fb *flowBuilder) addRetrieveAction(s *ast.RetrieveStmt) model.ID { // Reverse traversal on Reference: child → parent (one-to-many) // Use DatabaseRetrieveSource with XPath to get a list of parent entities dbSource := µflows.DatabaseRetrieveSource{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, EntityQualifiedName: assocInfo.parentEntityQN, XPathConstraint: "[" + assocQN + " = $" + s.StartVariable + "]", } @@ -315,7 +315,7 @@ func (fb *flowBuilder) addRetrieveAction(s *ast.RetrieveStmt) model.ID { } else { // Forward traversal or ReferenceSet: use AssociationRetrieveSource source = µflows.AssociationRetrieveSource{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, StartVariable: s.StartVariable, AssociationQualifiedName: assocQN, } @@ -337,7 +337,7 @@ func (fb *flowBuilder) addRetrieveAction(s *ast.RetrieveStmt) model.ID { // Database retrieve: RETRIEVE $List FROM Module.Entity WHERE ... entityQN := s.Source.Module + "." + s.Source.Name dbSource := µflows.DatabaseRetrieveSource{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, EntityQualifiedName: entityQN, } @@ -349,7 +349,7 @@ func (fb *flowBuilder) addRetrieveAction(s *ast.RetrieveStmt) model.ID { rangeType = microflows.RangeTypeFirst } dbSource.Range = µflows.Range{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, RangeType: rangeType, Limit: s.Limit, Offset: s.Offset, @@ -389,7 +389,7 @@ func (fb *flowBuilder) addRetrieveAction(s *ast.RetrieveStmt) model.ID { } dbSource.Sorting = append(dbSource.Sorting, µflows.SortItem{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, AttributeQualifiedName: attrPath, Direction: direction, }) @@ -412,7 +412,7 @@ func (fb *flowBuilder) addRetrieveAction(s *ast.RetrieveStmt) model.ID { } action := µflows.RetrieveAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, OutputVariable: s.Variable, Source: source, } @@ -421,7 +421,7 @@ func (fb *flowBuilder) addRetrieveAction(s *ast.RetrieveStmt) model.ID { activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -451,23 +451,23 @@ func (fb *flowBuilder) addListOperationAction(s *ast.ListOperationStmt) model.ID switch s.Operation { case ast.ListOpHead: operation = µflows.HeadOperation{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ListVariable: s.InputVariable, } case ast.ListOpTail: operation = µflows.TailOperation{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ListVariable: s.InputVariable, } case ast.ListOpFind: operation = µflows.FindOperation{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ListVariable: s.InputVariable, Expression: fb.exprToString(s.Condition), } case ast.ListOpFilter: operation = µflows.FilterOperation{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ListVariable: s.InputVariable, Expression: fb.exprToString(s.Condition), } @@ -494,49 +494,49 @@ func (fb *flowBuilder) addListOperationAction(s *ast.ListOperationStmt) model.ID attrQN = entityType + "." + spec.Attribute } sortItems = append(sortItems, µflows.SortItem{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, AttributeQualifiedName: attrQN, Direction: direction, }) } operation = µflows.SortOperation{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ListVariable: s.InputVariable, Sorting: sortItems, } case ast.ListOpUnion: operation = µflows.UnionOperation{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ListVariable1: s.InputVariable, ListVariable2: s.SecondVariable, } case ast.ListOpIntersect: operation = µflows.IntersectOperation{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ListVariable1: s.InputVariable, ListVariable2: s.SecondVariable, } case ast.ListOpSubtract: operation = µflows.SubtractOperation{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ListVariable1: s.InputVariable, ListVariable2: s.SecondVariable, } case ast.ListOpContains: operation = µflows.ContainsOperation{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ListVariable: s.InputVariable, ObjectVariable: s.SecondVariable, // The item to check } case ast.ListOpEquals: operation = µflows.EqualsOperation{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ListVariable1: s.InputVariable, ListVariable2: s.SecondVariable, } case ast.ListOpRange: rangeOp := µflows.ListRangeOperation{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ListVariable: s.InputVariable, } if s.OffsetExpr != nil { @@ -551,7 +551,7 @@ func (fb *flowBuilder) addListOperationAction(s *ast.ListOperationStmt) model.ID } action := µflows.ListOperationAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Operation: operation, OutputVariable: s.OutputVariable, } @@ -577,7 +577,7 @@ func (fb *flowBuilder) addListOperationAction(s *ast.ListOperationStmt) model.ID activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -610,7 +610,7 @@ func (fb *flowBuilder) addAggregateListAction(s *ast.AggregateListStmt) model.ID } action := µflows.AggregateListAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, InputVariable: s.InputVariable, OutputVariable: s.OutputVariable, Function: function, @@ -631,7 +631,7 @@ func (fb *flowBuilder) addAggregateListAction(s *ast.AggregateListStmt) model.ID activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -653,7 +653,7 @@ func (fb *flowBuilder) addCreateListAction(s *ast.CreateListStmt) model.ID { } action := µflows.CreateListAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, OutputVariable: s.Variable, EntityQualifiedName: entityQN, } @@ -666,7 +666,7 @@ func (fb *flowBuilder) addCreateListAction(s *ast.CreateListStmt) model.ID { activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -683,7 +683,7 @@ func (fb *flowBuilder) addCreateListAction(s *ast.CreateListStmt) model.ID { // addAddToListAction creates an ADD TO list statement. func (fb *flowBuilder) addAddToListAction(s *ast.AddToListStmt) model.ID { action := µflows.ChangeListAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Type: microflows.ChangeListTypeAdd, ChangeVariable: s.List, Value: "$" + s.Item, @@ -692,7 +692,7 @@ func (fb *flowBuilder) addAddToListAction(s *ast.AddToListStmt) model.ID { activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -709,7 +709,7 @@ func (fb *flowBuilder) addAddToListAction(s *ast.AddToListStmt) model.ID { // addRemoveFromListAction creates a REMOVE FROM list statement. func (fb *flowBuilder) addRemoveFromListAction(s *ast.RemoveFromListStmt) model.ID { action := µflows.ChangeListAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Type: microflows.ChangeListTypeRemove, ChangeVariable: s.List, Value: "$" + s.Item, @@ -718,7 +718,7 @@ func (fb *flowBuilder) addRemoveFromListAction(s *ast.RemoveFromListStmt) model. activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, diff --git a/mdl/executor/cmd_microflows_builder_annotations.go b/mdl/executor/cmd_microflows_builder_annotations.go index 668b9511..476863ee 100644 --- a/mdl/executor/cmd_microflows_builder_annotations.go +++ b/mdl/executor/cmd_microflows_builder_annotations.go @@ -7,7 +7,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/microflows" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) // getStatementAnnotations extracts the annotations field from any microflow statement. @@ -151,7 +151,7 @@ func (fb *flowBuilder) addEndEventWithReturn(s *ast.ReturnStmt) model.ID { endEvent := µflows.EndEvent{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: EventSize, Height: EventSize}, }, @@ -169,7 +169,7 @@ func (fb *flowBuilder) addEndEventWithReturn(s *ast.ReturnStmt) model.ID { func (fb *flowBuilder) addErrorEvent() model.ID { errorEvent := µflows.ErrorEvent{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: EventSize, Height: EventSize}, }, @@ -197,7 +197,7 @@ func (fb *flowBuilder) attachAnnotation(text string, activityID model.ID) { annotation := µflows.Annotation{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: actX, Y: actY - 100}, Size: model.Size{Width: 200, Height: 50}, }, @@ -206,7 +206,7 @@ func (fb *flowBuilder) attachAnnotation(text string, activityID model.ID) { fb.objects = append(fb.objects, annotation) fb.annotationFlows = append(fb.annotationFlows, µflows.AnnotationFlow{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, OriginID: annotation.ID, DestinationID: activityID, }) @@ -216,7 +216,7 @@ func (fb *flowBuilder) attachAnnotation(text string, activityID model.ID) { func (fb *flowBuilder) attachFreeAnnotation(text string) { annotation := µflows.Annotation{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY - 100}, Size: model.Size{Width: 200, Height: 50}, }, diff --git a/mdl/executor/cmd_microflows_builder_calls.go b/mdl/executor/cmd_microflows_builder_calls.go index 94a8db0a..a03b21e0 100644 --- a/mdl/executor/cmd_microflows_builder_calls.go +++ b/mdl/executor/cmd_microflows_builder_calls.go @@ -11,7 +11,7 @@ import ( "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/javaactions" "github.com/mendixlabs/mxcli/sdk/microflows" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) // addLogMessageAction creates a LOG statement as a LogMessageAction. @@ -62,11 +62,11 @@ func (fb *flowBuilder) addLogMessageAction(s *ast.LogStmt) model.ID { } action := µflows.LogMessageAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, LogLevel: logLevel, LogNodeName: "'" + s.Node + "'", // Store as expression (e.g., 'TEST') MessageTemplate: &model.Text{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Translations: map[string]string{ "en_US": templateText, }, @@ -77,7 +77,7 @@ func (fb *flowBuilder) addLogMessageAction(s *ast.LogStmt) model.ID { activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -101,7 +101,7 @@ func (fb *flowBuilder) addCallMicroflowAction(s *ast.CallMicroflowStmt) model.ID // Parameter is the full qualified name: Module.Microflow.ParameterName paramQN := mfQN + "." + arg.Name mapping := µflows.MicroflowCallParameterMapping{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Parameter: paramQN, Argument: fb.exprToString(arg.Value), } @@ -110,13 +110,13 @@ func (fb *flowBuilder) addCallMicroflowAction(s *ast.CallMicroflowStmt) model.ID // Create nested MicroflowCall structure mfCall := µflows.MicroflowCall{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Microflow: mfQN, ParameterMappings: mappings, } action := µflows.MicroflowCallAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), MicroflowCall: mfCall, ResultVariableName: s.OutputVariable, @@ -127,7 +127,7 @@ func (fb *flowBuilder) addCallMicroflowAction(s *ast.CallMicroflowStmt) model.ID activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -190,20 +190,20 @@ func (fb *flowBuilder) addCallJavaActionAction(s *ast.CallJavaActionStmt) model. } } value = µflows.EntityTypeCodeActionParameterValue{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Entity: entityName, } } else { // Regular parameter: expression-based value valueExpr := fb.exprToString(arg.Value) value = µflows.BasicCodeActionParameterValue{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Argument: valueExpr, } } mapping := µflows.JavaActionParameterMapping{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Parameter: paramQN, Value: value, } @@ -211,7 +211,7 @@ func (fb *flowBuilder) addCallJavaActionAction(s *ast.CallJavaActionStmt) model. } action := µflows.JavaActionCallAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), JavaAction: actionQN, ParameterMappings: mappings, @@ -223,7 +223,7 @@ func (fb *flowBuilder) addCallJavaActionAction(s *ast.CallJavaActionStmt) model. activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -253,7 +253,7 @@ func (fb *flowBuilder) addCallExternalActionAction(s *ast.CallExternalActionStmt var mappings []*microflows.ExternalActionParameterMapping for _, arg := range s.Arguments { mapping := µflows.ExternalActionParameterMapping{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ParameterName: arg.Name, Argument: fb.exprToString(arg.Value), } @@ -261,7 +261,7 @@ func (fb *flowBuilder) addCallExternalActionAction(s *ast.CallExternalActionStmt } action := µflows.CallExternalAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), ConsumedODataService: serviceQN, Name: s.ActionName, @@ -274,7 +274,7 @@ func (fb *flowBuilder) addCallExternalActionAction(s *ast.CallExternalActionStmt activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -308,7 +308,7 @@ func (fb *flowBuilder) addShowPageAction(s *ast.ShowPageStmt) model.ID { // Parameter qualified name format: Module.Page.ParameterName paramQN := pageQN + "." + arg.ParamName mapping := µflows.PageParameterMapping{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Parameter: paramQN, Argument: fb.exprToString(arg.Value), } @@ -328,7 +328,7 @@ func (fb *flowBuilder) addShowPageAction(s *ast.ShowPageStmt) model.ID { // Create page settings pageSettings := µflows.PageSettings{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Location: location, ModalForm: s.ModalForm, } @@ -337,7 +337,7 @@ func (fb *flowBuilder) addShowPageAction(s *ast.ShowPageStmt) model.ID { // Use PageName (BY_NAME_REFERENCE) instead of PageID (BY_ID_REFERENCE) // The modern Mendix format uses FormSettings.Form as a qualified name string action := µflows.ShowPageAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, PageName: pageQN, // BY_NAME_REFERENCE - qualified name string PageSettings: pageSettings, PageParameterMappings: mappings, @@ -352,7 +352,7 @@ func (fb *flowBuilder) addShowPageAction(s *ast.ShowPageStmt) model.ID { if s.Title != "" { action.OverridePageTitle = &model.Text{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Texts$Text", }, Translations: map[string]string{"en_US": s.Title}, @@ -362,7 +362,7 @@ func (fb *flowBuilder) addShowPageAction(s *ast.ShowPageStmt) model.ID { activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -379,13 +379,13 @@ func (fb *flowBuilder) addShowPageAction(s *ast.ShowPageStmt) model.ID { // addShowHomePageAction creates a SHOW HOME PAGE statement. func (fb *flowBuilder) addShowHomePageAction(s *ast.ShowHomePageStmt) model.ID { action := µflows.ShowHomePageAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, } activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -420,7 +420,7 @@ func (fb *flowBuilder) addShowMessageAction(s *ast.ShowMessageStmt) model.ID { } template := &model.Text{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Translations: map[string]string{"en_US": templateText}, } @@ -430,7 +430,7 @@ func (fb *flowBuilder) addShowMessageAction(s *ast.ShowMessageStmt) model.ID { } action := µflows.ShowMessageAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Template: template, Type: msgType, TemplateParameters: templateParams, @@ -439,7 +439,7 @@ func (fb *flowBuilder) addShowMessageAction(s *ast.ShowMessageStmt) model.ID { activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -461,14 +461,14 @@ func (fb *flowBuilder) addClosePageAction(s *ast.ClosePageStmt) model.ID { } action := µflows.ClosePageAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, NumberOfPages: numPages, } activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -502,7 +502,7 @@ func (fb *flowBuilder) addValidationFeedbackAction(s *ast.ValidationFeedbackStmt // Create template with translations map (default language "en_US") template := &model.Text{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Translations: map[string]string{"en_US": templateText}, } @@ -547,7 +547,7 @@ func (fb *flowBuilder) addValidationFeedbackAction(s *ast.ValidationFeedbackStmt } action := µflows.ValidationFeedbackAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ObjectVariable: varName, AttributeName: attributeName, AssociationName: associationName, @@ -558,7 +558,7 @@ func (fb *flowBuilder) addValidationFeedbackAction(s *ast.ValidationFeedbackStmt activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -576,7 +576,7 @@ func (fb *flowBuilder) addValidationFeedbackAction(s *ast.ValidationFeedbackStmt func (fb *flowBuilder) addRestCallAction(s *ast.RestCallStmt) model.ID { // Build HTTP configuration httpConfig := µflows.HttpConfiguration{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, } // Set HTTP method @@ -610,7 +610,7 @@ func (fb *flowBuilder) addRestCallAction(s *ast.RestCallStmt) model.ID { // Set custom headers for _, header := range s.Headers { h := µflows.HttpHeader{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Name: header.Name, Value: fb.exprToString(header.Value), } @@ -642,7 +642,7 @@ func (fb *flowBuilder) addRestCallAction(s *ast.RestCallStmt) model.ID { templateParams = append(templateParams, fb.exprToString(param.Value)) } requestHandling = µflows.CustomRequestHandling{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Template: template, TemplateParams: templateParams, } @@ -650,21 +650,21 @@ func (fb *flowBuilder) addRestCallAction(s *ast.RestCallStmt) model.ID { // Export mapping mappingQN := s.Body.MappingName.Module + "." + s.Body.MappingName.Name requestHandling = µflows.MappingRequestHandling{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, MappingID: model.ID(mappingQN), // Use qualified name as ID for BY_NAME references ParameterVariable: s.Body.SourceVariable, } default: // No body requestHandling = µflows.CustomRequestHandling{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Template: "", } } } else { // Default: empty custom request handling requestHandling = µflows.CustomRequestHandling{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Template: "", } } @@ -674,11 +674,11 @@ func (fb *flowBuilder) addRestCallAction(s *ast.RestCallStmt) model.ID { switch s.Result.Type { case ast.RestResultString: resultHandling = µflows.ResultHandlingString{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, } case ast.RestResultResponse: resultHandling = µflows.ResultHandlingString{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, } // Note: For HttpResponse, we would need a different result type, but using String for now case ast.RestResultMapping: @@ -702,7 +702,7 @@ func (fb *flowBuilder) addRestCallAction(s *ast.RestCallStmt) model.ID { } } resultHandling = µflows.ResultHandlingMapping{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, MappingID: model.ID(mappingQN), ResultEntityID: model.ID(entityQN), ResultVariable: s.OutputVariable, @@ -710,11 +710,11 @@ func (fb *flowBuilder) addRestCallAction(s *ast.RestCallStmt) model.ID { } case ast.RestResultNone: resultHandling = µflows.ResultHandlingNone{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, } default: resultHandling = µflows.ResultHandlingString{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, } } @@ -727,7 +727,7 @@ func (fb *flowBuilder) addRestCallAction(s *ast.RestCallStmt) model.ID { } action := µflows.RestCallAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, HttpConfiguration: httpConfig, RequestHandling: requestHandling, ResultHandling: resultHandling, @@ -741,7 +741,7 @@ func (fb *flowBuilder) addRestCallAction(s *ast.RestCallStmt) model.ID { activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -783,7 +783,7 @@ func (fb *flowBuilder) addSendRestRequestAction(s *ast.SendRestRequestStmt) mode var outputVar *microflows.RestOutputVar if s.OutputVariable != "" { outputVar = µflows.RestOutputVar{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, VariableName: s.OutputVariable, } } @@ -794,7 +794,7 @@ func (fb *flowBuilder) addSendRestRequestAction(s *ast.SendRestRequestStmt) mode var bodyVar *microflows.RestBodyVar if s.BodyVariable != "" && shouldSetBodyVariable(opDef) { bodyVar = µflows.RestBodyVar{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, VariableName: s.BodyVariable, } } @@ -806,7 +806,7 @@ func (fb *flowBuilder) addSendRestRequestAction(s *ast.SendRestRequestStmt) mode // RestOperationCallAction does not support custom error handling (CE6035). // ON ERROR clauses in the MDL are silently ignored for this action type. action := µflows.RestOperationCallAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Operation: operationQN, OutputVariable: outputVar, BodyVariable: bodyVar, @@ -817,7 +817,7 @@ func (fb *flowBuilder) addSendRestRequestAction(s *ast.SendRestRequestStmt) mode activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -919,7 +919,7 @@ func (fb *flowBuilder) addExecuteDatabaseQueryAction(s *ast.ExecuteDatabaseQuery } action := µflows.ExecuteDatabaseQueryAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), OutputVariableName: s.OutputVariable, Query: s.QueryName, @@ -929,7 +929,7 @@ func (fb *flowBuilder) addExecuteDatabaseQueryAction(s *ast.ExecuteDatabaseQuery // Build parameter mappings from arguments for _, arg := range s.Arguments { pm := µflows.DatabaseQueryParameterMapping{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ParameterName: arg.Name, Value: fb.exprToString(arg.Value), } @@ -939,7 +939,7 @@ func (fb *flowBuilder) addExecuteDatabaseQueryAction(s *ast.ExecuteDatabaseQuery // Build connection parameter mappings (runtime connection override) for _, arg := range s.ConnectionArguments { cm := µflows.DatabaseConnectionParameterMapping{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ParameterName: arg.Name, Value: fb.exprToString(arg.Value), } @@ -950,7 +950,7 @@ func (fb *flowBuilder) addExecuteDatabaseQueryAction(s *ast.ExecuteDatabaseQuery activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -977,13 +977,13 @@ func (fb *flowBuilder) addImportFromMappingAction(s *ast.ImportFromMappingStmt) activityX := fb.posX action := µflows.ImportXmlAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), XmlDocumentVariable: s.SourceVariable, } resultHandling := µflows.ResultHandlingMapping{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, MappingID: model.ID(s.Mapping.String()), ResultVariable: s.OutputVariable, SingleObject: true, @@ -1013,7 +1013,7 @@ func (fb *flowBuilder) addImportFromMappingAction(s *ast.ImportFromMappingStmt) activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -1039,7 +1039,7 @@ func (fb *flowBuilder) addTransformJsonAction(s *ast.TransformJsonStmt) model.ID activityX := fb.posX action := µflows.TransformJsonAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), InputVariableName: s.InputVariable, OutputVariableName: s.OutputVariable, @@ -1049,7 +1049,7 @@ func (fb *flowBuilder) addTransformJsonAction(s *ast.TransformJsonStmt) model.ID activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -1074,11 +1074,11 @@ func (fb *flowBuilder) addExportToMappingAction(s *ast.ExportToMappingStmt) mode activityX := fb.posX action := µflows.ExportXmlAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), OutputVariable: s.OutputVariable, RequestHandling: µflows.MappingRequestHandling{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, MappingID: model.ID(s.Mapping.String()), ParameterVariable: s.SourceVariable, }, @@ -1087,7 +1087,7 @@ func (fb *flowBuilder) addExportToMappingAction(s *ast.ExportToMappingStmt) mode activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, diff --git a/mdl/executor/cmd_microflows_builder_control.go b/mdl/executor/cmd_microflows_builder_control.go index 6191bbd3..98edcf49 100644 --- a/mdl/executor/cmd_microflows_builder_control.go +++ b/mdl/executor/cmd_microflows_builder_control.go @@ -9,7 +9,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/microflows" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) // addIfStatement creates an IF/THEN/ELSE statement using ExclusiveSplit and ExclusiveMerge. @@ -45,13 +45,13 @@ func (fb *flowBuilder) addIfStatement(s *ast.IfStmt) model.ID { // Create ExclusiveSplit with expression condition splitCondition := µflows.ExpressionSplitCondition{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Expression: fb.exprToString(s.Condition), } split := µflows.ExclusiveSplit{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: splitX, Y: centerY}, Size: model.Size{Width: SplitWidth, Height: SplitHeight}, }, @@ -80,7 +80,7 @@ func (fb *flowBuilder) addIfStatement(s *ast.IfStmt) model.ID { if needMerge { merge := µflows.ExclusiveMerge{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: mergeX, Y: centerY}, Size: model.Size{Width: MergeSize, Height: MergeSize}, }, @@ -312,17 +312,17 @@ func (fb *flowBuilder) addLoopStatement(s *ast.LoopStmt) model.ID { // Position is the CENTER point (RelativeMiddlePoint in Mendix) loop := µflows.LoopedActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX + loopWidth/2, Y: fb.posY}, Size: model.Size{Width: loopWidth, Height: loopHeight}, }, LoopSource: µflows.IterableList{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ListVariableName: s.ListVariable, VariableName: s.LoopVariable, }, ObjectCollection: µflows.MicroflowObjectCollection{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Objects: loopBuilder.objects, Flows: nil, // Internal flows go at top-level, not inside the loop's ObjectCollection }, @@ -384,16 +384,16 @@ func (fb *flowBuilder) addWhileStatement(s *ast.WhileStmt) model.ID { loop := µflows.LoopedActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX + loopWidth/2, Y: fb.posY}, Size: model.Size{Width: loopWidth, Height: loopHeight}, }, LoopSource: µflows.WhileLoopCondition{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, WhileExpression: whileExpr, }, ObjectCollection: µflows.MicroflowObjectCollection{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Objects: loopBuilder.objects, Flows: nil, }, diff --git a/mdl/executor/cmd_microflows_builder_flows.go b/mdl/executor/cmd_microflows_builder_flows.go index f0d554a1..78d96239 100644 --- a/mdl/executor/cmd_microflows_builder_flows.go +++ b/mdl/executor/cmd_microflows_builder_flows.go @@ -7,7 +7,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/microflows" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) // convertErrorHandlingType converts AST error handling type to SDK error handling type. @@ -33,7 +33,7 @@ func convertErrorHandlingType(eh *ast.ErrorHandlingClause) microflows.ErrorHandl // connecting from the bottom of the source activity to the left of the error handler. func newErrorHandlerFlow(originID, destinationID model.ID) *microflows.SequenceFlow { return µflows.SequenceFlow{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, OriginID: originID, DestinationID: destinationID, OriginConnectionIndex: AnchorBottom, @@ -113,7 +113,7 @@ func (fb *flowBuilder) handleErrorHandlerMerge(lastErrID model.ID, activityID mo // fall back to empty (works for void microflows). endEvent := µflows.EndEvent{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: errorY}, Size: model.Size{Width: EventSize, Height: EventSize}, }, @@ -126,7 +126,7 @@ func (fb *flowBuilder) handleErrorHandlerMerge(lastErrID model.ID, activityID mo // newHorizontalFlow creates a SequenceFlow with anchors for horizontal left-to-right connection func newHorizontalFlow(originID, destinationID model.ID) *microflows.SequenceFlow { return µflows.SequenceFlow{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, OriginID: originID, DestinationID: destinationID, OriginConnectionIndex: AnchorRight, // Connect from right side of origin @@ -138,7 +138,7 @@ func newHorizontalFlow(originID, destinationID model.ID) *microflows.SequenceFlo func newHorizontalFlowWithCase(originID, destinationID model.ID, caseValue string) *microflows.SequenceFlow { flow := newHorizontalFlow(originID, destinationID) flow.CaseValue = microflows.EnumerationCase{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Value: caseValue, // "true" or "false" as string } return flow @@ -148,13 +148,13 @@ func newHorizontalFlowWithCase(originID, destinationID model.ID, caseValue strin // Used when TRUE path goes below the main line func newDownwardFlowWithCase(originID, destinationID model.ID, caseValue string) *microflows.SequenceFlow { return µflows.SequenceFlow{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, OriginID: originID, DestinationID: destinationID, OriginConnectionIndex: AnchorBottom, // Connect from bottom of origin (going down) DestinationConnectionIndex: AnchorLeft, // Connect to left side of destination CaseValue: microflows.EnumerationCase{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Value: caseValue, // "true" or "false" as string }, } @@ -164,7 +164,7 @@ func newDownwardFlowWithCase(originID, destinationID model.ID, caseValue string) // Used when returning from a lower branch to merge func newUpwardFlow(originID, destinationID model.ID) *microflows.SequenceFlow { return µflows.SequenceFlow{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, OriginID: originID, DestinationID: destinationID, OriginConnectionIndex: AnchorRight, // Connect from right side of origin diff --git a/mdl/executor/cmd_microflows_builder_graph.go b/mdl/executor/cmd_microflows_builder_graph.go index 70ce119a..2b0737ba 100644 --- a/mdl/executor/cmd_microflows_builder_graph.go +++ b/mdl/executor/cmd_microflows_builder_graph.go @@ -7,7 +7,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/microflows" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) // buildFlowGraph converts AST statements to a Microflow flow graph. @@ -42,7 +42,7 @@ func (fb *flowBuilder) buildFlowGraph(stmts []ast.MicroflowStatement, returns *a // Create StartEvent - Position is the CENTER point (RelativeMiddlePoint in Mendix) startEvent := µflows.StartEvent{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: EventSize, Height: EventSize}, }, @@ -98,7 +98,7 @@ func (fb *flowBuilder) buildFlowGraph(stmts []ast.MicroflowStatement, returns *a fb.posY = fb.baseY // Ensure end event is on the happy path center line endEvent := µflows.EndEvent{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: EventSize, Height: EventSize}, }, @@ -116,7 +116,7 @@ func (fb *flowBuilder) buildFlowGraph(stmts []ast.MicroflowStatement, returns *a } return µflows.MicroflowObjectCollection{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Objects: fb.objects, Flows: fb.flows, AnnotationFlows: fb.annotationFlows, diff --git a/mdl/executor/cmd_microflows_builder_workflow.go b/mdl/executor/cmd_microflows_builder_workflow.go index a14d6ef9..aacf40e0 100644 --- a/mdl/executor/cmd_microflows_builder_workflow.go +++ b/mdl/executor/cmd_microflows_builder_workflow.go @@ -6,7 +6,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/microflows" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) // wrapAction wraps a MicroflowAction in an ActionActivity with standard positioning. @@ -15,7 +15,7 @@ func (fb *flowBuilder) wrapAction(action microflows.MicroflowAction, errorHandli activity := µflows.ActionActivity{ BaseActivity: microflows.BaseActivity{ BaseMicroflowObject: microflows.BaseMicroflowObject{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Position: model.Point{X: fb.posX, Y: fb.posY}, Size: model.Size{Width: ActivityWidth, Height: ActivityHeight}, }, @@ -46,7 +46,7 @@ func (fb *flowBuilder) addCallWorkflowAction(s *ast.CallWorkflowStmt) model.ID { } action := µflows.WorkflowCallAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), Workflow: wfQN, WorkflowContextVariable: ctxVar, @@ -58,7 +58,7 @@ func (fb *flowBuilder) addCallWorkflowAction(s *ast.CallWorkflowStmt) model.ID { func (fb *flowBuilder) addGetWorkflowDataAction(s *ast.GetWorkflowDataStmt) model.ID { action := µflows.GetWorkflowDataAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), OutputVariableName: s.OutputVariable, Workflow: s.Workflow.Module + "." + s.Workflow.Name, @@ -69,7 +69,7 @@ func (fb *flowBuilder) addGetWorkflowDataAction(s *ast.GetWorkflowDataStmt) mode func (fb *flowBuilder) addGetWorkflowsAction(s *ast.GetWorkflowsStmt) model.ID { action := µflows.GetWorkflowsAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), OutputVariableName: s.OutputVariable, WorkflowContextVariableName: s.WorkflowContextVariableName, @@ -79,7 +79,7 @@ func (fb *flowBuilder) addGetWorkflowsAction(s *ast.GetWorkflowsStmt) model.ID { func (fb *flowBuilder) addGetWorkflowActivityRecordsAction(s *ast.GetWorkflowActivityRecordsStmt) model.ID { action := µflows.GetWorkflowActivityRecordsAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), OutputVariableName: s.OutputVariable, WorkflowVariable: s.WorkflowVariable, @@ -96,39 +96,39 @@ func (fb *flowBuilder) addWorkflowOperationAction(s *ast.WorkflowOperationStmt) reason = fb.exprToString(s.Reason) } op = µflows.AbortOperation{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, Reason: reason, WorkflowVariable: s.WorkflowVariable, } case "CONTINUE": op = µflows.ContinueOperation{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, WorkflowVariable: s.WorkflowVariable, } case "PAUSE": op = µflows.PauseOperation{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, WorkflowVariable: s.WorkflowVariable, } case "RESTART": op = µflows.RestartOperation{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, WorkflowVariable: s.WorkflowVariable, } case "RETRY": op = µflows.RetryOperation{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, WorkflowVariable: s.WorkflowVariable, } case "UNPAUSE": op = µflows.UnpauseOperation{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, WorkflowVariable: s.WorkflowVariable, } } action := µflows.WorkflowOperationAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), Operation: op, } @@ -137,7 +137,7 @@ func (fb *flowBuilder) addWorkflowOperationAction(s *ast.WorkflowOperationStmt) func (fb *flowBuilder) addSetTaskOutcomeAction(s *ast.SetTaskOutcomeStmt) model.ID { action := µflows.SetTaskOutcomeAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), OutcomeValue: s.OutcomeValue, WorkflowTaskVariable: s.WorkflowTaskVariable, @@ -147,7 +147,7 @@ func (fb *flowBuilder) addSetTaskOutcomeAction(s *ast.SetTaskOutcomeStmt) model. func (fb *flowBuilder) addOpenUserTaskAction(s *ast.OpenUserTaskStmt) model.ID { action := µflows.OpenUserTaskAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), UserTaskVariable: s.UserTaskVariable, } @@ -156,7 +156,7 @@ func (fb *flowBuilder) addOpenUserTaskAction(s *ast.OpenUserTaskStmt) model.ID { func (fb *flowBuilder) addNotifyWorkflowAction(s *ast.NotifyWorkflowStmt) model.ID { action := µflows.NotifyWorkflowAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), OutputVariableName: s.OutputVariable, WorkflowVariable: s.WorkflowVariable, @@ -166,7 +166,7 @@ func (fb *flowBuilder) addNotifyWorkflowAction(s *ast.NotifyWorkflowStmt) model. func (fb *flowBuilder) addOpenWorkflowAction(s *ast.OpenWorkflowStmt) model.ID { action := µflows.OpenWorkflowAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), WorkflowVariable: s.WorkflowVariable, } @@ -175,7 +175,7 @@ func (fb *flowBuilder) addOpenWorkflowAction(s *ast.OpenWorkflowStmt) model.ID { func (fb *flowBuilder) addLockWorkflowAction(s *ast.LockWorkflowStmt) model.ID { action := µflows.LockWorkflowAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), PauseAllWorkflows: s.PauseAllWorkflows, WorkflowVariable: s.WorkflowVariable, @@ -185,7 +185,7 @@ func (fb *flowBuilder) addLockWorkflowAction(s *ast.LockWorkflowStmt) model.ID { func (fb *flowBuilder) addUnlockWorkflowAction(s *ast.UnlockWorkflowStmt) model.ID { action := µflows.UnlockWorkflowAction{ - BaseElement: model.BaseElement{ID: model.ID(mpr.GenerateID())}, + BaseElement: model.BaseElement{ID: model.ID(types.GenerateID())}, ErrorHandlingType: convertErrorHandlingType(s.ErrorHandling), ResumeAllPausedWorkflows: s.ResumeAllPausedWorkflows, WorkflowVariable: s.WorkflowVariable, diff --git a/mdl/executor/cmd_microflows_create.go b/mdl/executor/cmd_microflows_create.go index 350e701e..dedaa545 100644 --- a/mdl/executor/cmd_microflows_create.go +++ b/mdl/executor/cmd_microflows_create.go @@ -9,9 +9,9 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/microflows" - "github.com/mendixlabs/mxcli/sdk/mpr" ) // isBuiltinModuleEntity returns true for modules whose entities are defined @@ -73,7 +73,7 @@ func execCreateMicroflow(ctx *ExecContext, s *ast.CreateMicroflowStmt) error { } // For CREATE OR REPLACE/MODIFY, reuse the existing ID to preserve references - microflowID := model.ID(mpr.GenerateID()) + microflowID := model.ID(types.GenerateID()) if existingID != "" { microflowID = existingID // Keep the original folder unless a new folder is explicitly specified @@ -143,7 +143,7 @@ func execCreateMicroflow(ctx *ExecContext, s *ast.CreateMicroflowStmt) error { } param := µflows.MicroflowParameter{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), }, ContainerID: mf.ID, Name: p.Name, diff --git a/mdl/executor/cmd_misc_mock_test.go b/mdl/executor/cmd_misc_mock_test.go index 7ade8376..14dc5c68 100644 --- a/mdl/executor/cmd_misc_mock_test.go +++ b/mdl/executor/cmd_misc_mock_test.go @@ -6,14 +6,14 @@ import ( "testing" "github.com/mendixlabs/mxcli/mdl/backend/mock" - "github.com/mendixlabs/mxcli/sdk/mpr/version" + "github.com/mendixlabs/mxcli/mdl/types" ) func TestShowVersion_Mock(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - ProjectVersionFunc: func() *version.ProjectVersion { - return &version.ProjectVersion{ + ProjectVersionFunc: func() *types.ProjectVersion { + return &types.ProjectVersion{ ProductVersion: "10.18.0", BuildVersion: "10.18.0.12345", FormatVersion: 2, diff --git a/mdl/executor/cmd_modules_mock_test.go b/mdl/executor/cmd_modules_mock_test.go index 39119b1d..e6371b69 100644 --- a/mdl/executor/cmd_modules_mock_test.go +++ b/mdl/executor/cmd_modules_mock_test.go @@ -6,9 +6,9 @@ import ( "testing" "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/domainmodel" - "github.com/mendixlabs/mxcli/sdk/mpr" ) func TestShowModules_Mock(t *testing.T) { @@ -18,7 +18,7 @@ func TestShowModules_Mock(t *testing.T) { // showModules uses ListUnits to count documents per module. // Provide a unit belonging to mod1 so the count is non-zero. unitID := nextID("unit") - units := []*mpr.UnitInfo{{ID: unitID, ContainerID: mod1.ID}} + units := []*types.UnitInfo{{ID: unitID, ContainerID: mod1.ID}} // Need a hierarchy for getHierarchy — provide modules + units + folders h := mkHierarchy(mod1, mod2) @@ -31,7 +31,7 @@ func TestShowModules_Mock(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, ListModulesFunc: func() ([]*model.Module, error) { return []*model.Module{mod1, mod2}, nil }, - ListUnitsFunc: func() ([]*mpr.UnitInfo, error) { return units, nil }, + ListUnitsFunc: func() ([]*types.UnitInfo, error) { return units, nil }, ListDomainModelsFunc: func() ([]*domainmodel.DomainModel, error) { return []*domainmodel.DomainModel{dm}, nil }, // All other list functions return nil (zero counts) via MockBackend defaults. } diff --git a/mdl/executor/cmd_navigation.go b/mdl/executor/cmd_navigation.go index 912db18f..6ed878dc 100644 --- a/mdl/executor/cmd_navigation.go +++ b/mdl/executor/cmd_navigation.go @@ -9,7 +9,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) // execAlterNavigation handles CREATE [OR REPLACE] NAVIGATION command. @@ -38,12 +38,12 @@ func execAlterNavigation(ctx *ExecContext, s *ast.AlterNavigationStmt) error { } // Convert AST types to writer spec - spec := mpr.NavigationProfileSpec{ + spec := types.NavigationProfileSpec{ HasMenu: s.HasMenuBlock, } for _, hp := range s.HomePages { - hpSpec := mpr.NavHomePageSpec{ + hpSpec := types.NavHomePageSpec{ IsPage: hp.IsPage, Target: hp.Target.String(), } @@ -73,8 +73,8 @@ func execAlterNavigation(ctx *ExecContext, s *ast.AlterNavigationStmt) error { } // convertMenuItemDef converts an AST NavMenuItemDef to a writer NavMenuItemSpec. -func convertMenuItemDef(def ast.NavMenuItemDef) mpr.NavMenuItemSpec { - spec := mpr.NavMenuItemSpec{ +func convertMenuItemDef(def ast.NavMenuItemDef) types.NavMenuItemSpec { + spec := types.NavMenuItemSpec{ Caption: def.Caption, } if def.Page != nil { @@ -90,7 +90,7 @@ func convertMenuItemDef(def ast.NavMenuItemDef) mpr.NavMenuItemSpec { } // profileNames returns a comma-separated list of profile names for error messages. -func profileNames(nav *mpr.NavigationDocument) string { +func profileNames(nav *types.NavigationDocument) string { names := make([]string, len(nav.Profiles)) for i, p := range nav.Profiles { names[i] = p.Name @@ -251,7 +251,7 @@ func describeNavigation(ctx *ExecContext, name ast.QualifiedName) error { } // outputNavigationProfile outputs a single profile in round-trippable CREATE OR REPLACE NAVIGATION format. -func outputNavigationProfile(ctx *ExecContext, p *mpr.NavigationProfile) { +func outputNavigationProfile(ctx *ExecContext, p *types.NavigationProfile) { fmt.Fprintf(ctx.Output, "-- NAVIGATION PROFILE: %s\n", p.Name) fmt.Fprintf(ctx.Output, "-- Kind: %s\n", p.Kind) if p.IsNative { @@ -312,7 +312,7 @@ func outputNavigationProfile(ctx *ExecContext, p *mpr.NavigationProfile) { } // countMenuItems counts the total number of menu items recursively. -func countMenuItems(items []*mpr.NavMenuItem) int { +func countMenuItems(items []*types.NavMenuItem) int { count := len(items) for _, item := range items { count += countMenuItems(item.Items) @@ -321,7 +321,7 @@ func countMenuItems(items []*mpr.NavMenuItem) int { } // printMenuTree prints a menu tree with indentation to an io.Writer. -func printMenuTree(w io.Writer, items []*mpr.NavMenuItem, depth int) { +func printMenuTree(w io.Writer, items []*types.NavMenuItem, depth int) { indent := strings.Repeat(" ", depth+1) for _, item := range items { target := menuItemTarget(item) @@ -333,7 +333,7 @@ func printMenuTree(w io.Writer, items []*mpr.NavMenuItem, depth int) { } // menuItemTarget returns a display string for a menu item's action target. -func menuItemTarget(item *mpr.NavMenuItem) string { +func menuItemTarget(item *types.NavMenuItem) string { if item.Page != "" { return " -> " + item.Page } @@ -344,7 +344,7 @@ func menuItemTarget(item *mpr.NavMenuItem) string { } // printMenuMDL prints menu items in MDL-style format. -func printMenuMDL(w io.Writer, items []*mpr.NavMenuItem, depth int) { +func printMenuMDL(w io.Writer, items []*types.NavMenuItem, depth int) { indent := strings.Repeat(" ", depth) for _, item := range items { if len(item.Items) > 0 { diff --git a/mdl/executor/cmd_navigation_mock_test.go b/mdl/executor/cmd_navigation_mock_test.go index 322e4adc..22feeec0 100644 --- a/mdl/executor/cmd_navigation_mock_test.go +++ b/mdl/executor/cmd_navigation_mock_test.go @@ -7,18 +7,18 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" "github.com/mendixlabs/mxcli/mdl/backend/mock" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) func TestShowNavigation_Mock(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - GetNavigationFunc: func() (*mpr.NavigationDocument, error) { - return &mpr.NavigationDocument{ - Profiles: []*mpr.NavigationProfile{{ + GetNavigationFunc: func() (*types.NavigationDocument, error) { + return &types.NavigationDocument{ + Profiles: []*types.NavigationProfile{{ Name: "Responsive", Kind: "Responsive", - MenuItems: []*mpr.NavMenuItem{ + MenuItems: []*types.NavMenuItem{ {Caption: "Home"}, {Caption: "Admin"}, {Caption: "Settings"}, @@ -38,12 +38,12 @@ func TestShowNavigation_Mock(t *testing.T) { func TestShowNavigationMenu_Mock(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - GetNavigationFunc: func() (*mpr.NavigationDocument, error) { - return &mpr.NavigationDocument{ - Profiles: []*mpr.NavigationProfile{{ + GetNavigationFunc: func() (*types.NavigationDocument, error) { + return &types.NavigationDocument{ + Profiles: []*types.NavigationProfile{{ Name: "Responsive", Kind: "Responsive", - MenuItems: []*mpr.NavMenuItem{ + MenuItems: []*types.NavMenuItem{ {Caption: "Dashboard", Page: "MyModule.Dashboard"}, }, }}, @@ -58,12 +58,12 @@ func TestShowNavigationMenu_Mock(t *testing.T) { func TestShowNavigationHomes_Mock(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - GetNavigationFunc: func() (*mpr.NavigationDocument, error) { - return &mpr.NavigationDocument{ - Profiles: []*mpr.NavigationProfile{{ + GetNavigationFunc: func() (*types.NavigationDocument, error) { + return &types.NavigationDocument{ + Profiles: []*types.NavigationProfile{{ Name: "Responsive", Kind: "Responsive", - HomePage: &mpr.NavHomePage{Page: "MyModule.Home"}, + HomePage: &types.NavHomePage{Page: "MyModule.Home"}, }}, }, nil }, @@ -76,12 +76,12 @@ func TestShowNavigationHomes_Mock(t *testing.T) { func TestDescribeNavigation_Mock(t *testing.T) { mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - GetNavigationFunc: func() (*mpr.NavigationDocument, error) { - return &mpr.NavigationDocument{ - Profiles: []*mpr.NavigationProfile{{ + GetNavigationFunc: func() (*types.NavigationDocument, error) { + return &types.NavigationDocument{ + Profiles: []*types.NavigationProfile{{ Name: "Responsive", Kind: "Responsive", - HomePage: &mpr.NavHomePage{Page: "MyModule.Home"}, + HomePage: &types.NavHomePage{Page: "MyModule.Home"}, }}, }, nil }, diff --git a/mdl/executor/cmd_odata.go b/mdl/executor/cmd_odata.go index 69c22c78..384695cf 100644 --- a/mdl/executor/cmd_odata.go +++ b/mdl/executor/cmd_odata.go @@ -13,10 +13,10 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/domainmodel" "github.com/mendixlabs/mxcli/sdk/microflows" - "github.com/mendixlabs/mxcli/sdk/mpr" ) // outputJavadoc writes a javadoc-style comment block. @@ -789,7 +789,7 @@ func execCreateExternalEntity(ctx *ExecContext, s *ast.CreateExternalEntityStmt) Name: a.Name, Type: convertDataType(a.Type), } - attr.ID = model.ID(mpr.GenerateID()) + attr.ID = model.ID(types.GenerateID()) attrs = append(attrs, attr) } @@ -837,7 +837,7 @@ func execCreateExternalEntity(ctx *ExecContext, s *ast.CreateExternalEntityStmt) Deletable: s.Deletable, Updatable: s.Updatable, } - newEntity.ID = model.ID(mpr.GenerateID()) + newEntity.ID = model.ID(types.GenerateID()) if err := ctx.Backend.CreateEntity(dm.ID, newEntity); err != nil { return mdlerrors.NewBackend("create external entity", err) @@ -1029,7 +1029,7 @@ func createODataClient(ctx *ExecContext, stmt *ast.CreateODataClientStmt) error fmt.Fprintf(ctx.Output, "Created OData client: %s.%s\n", stmt.Name.Module, stmt.Name.Name) if newSvc.Metadata != "" { // Parse to show summary - if doc, err := mpr.ParseEdmx(newSvc.Metadata); err == nil { + if doc, err := types.ParseEdmx(newSvc.Metadata); err == nil { entityCount := 0 actionCount := 0 for _, s := range doc.Schemas { diff --git a/mdl/executor/cmd_pages_builder.go b/mdl/executor/cmd_pages_builder.go index 3de745ba..ed74a533 100644 --- a/mdl/executor/cmd_pages_builder.go +++ b/mdl/executor/cmd_pages_builder.go @@ -10,6 +10,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/domainmodel" "github.com/mendixlabs/mxcli/sdk/microflows" @@ -44,7 +45,7 @@ type pageBuilder struct { layoutsCache []*pages.Layout pagesCache []*pages.Page microflowsCache []*microflows.Microflow - foldersCache []*mpr.FolderInfo + foldersCache []*types.FolderInfo // Entity context for resolving short attribute names inside DataViews entityContext string // Qualified entity name (e.g., "Module.Entity") @@ -253,7 +254,7 @@ func (pb *pageBuilder) getMainPlaceholderRef(layoutName string) string { } // getFolders returns cached folders or loads them. -func (pb *pageBuilder) getFolders() ([]*mpr.FolderInfo, error) { +func (pb *pageBuilder) getFolders() ([]*types.FolderInfo, error) { if pb.foldersCache == nil { var err error pb.foldersCache, err = pb.reader.ListFolders() @@ -286,7 +287,7 @@ func (pb *pageBuilder) resolveFolder(folderPath string) (model.ID, error) { } // Find folder with this name under current container - var foundFolder *mpr.FolderInfo + var foundFolder *types.FolderInfo for _, f := range folders { if f.ContainerID == currentContainerID && f.Name == part { foundFolder = f @@ -305,7 +306,7 @@ func (pb *pageBuilder) resolveFolder(folderPath string) (model.ID, error) { currentContainerID = newFolderID // Add to cache - pb.foldersCache = append(pb.foldersCache, &mpr.FolderInfo{ + pb.foldersCache = append(pb.foldersCache, &types.FolderInfo{ ID: newFolderID, ContainerID: currentContainerID, Name: part, diff --git a/mdl/executor/cmd_pages_builder_v3.go b/mdl/executor/cmd_pages_builder_v3.go index a5838ac9..e4c0a8e7 100644 --- a/mdl/executor/cmd_pages_builder_v3.go +++ b/mdl/executor/cmd_pages_builder_v3.go @@ -11,7 +11,7 @@ import ( "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/domainmodel" "github.com/mendixlabs/mxcli/sdk/microflows" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/sdk/pages" ) @@ -33,7 +33,7 @@ func (pb *pageBuilder) buildPageV3(s *ast.CreatePageStmtV3) (*pages.Page, error) page := &pages.Page{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$Page", }, ContainerID: containerID, @@ -48,7 +48,7 @@ func (pb *pageBuilder) buildPageV3(s *ast.CreatePageStmtV3) (*pages.Page, error) if s.Title != "" { page.Title = &model.Text{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Texts$Text", }, Translations: map[string]string{"en_US": s.Title}, @@ -67,7 +67,7 @@ func (pb *pageBuilder) buildPageV3(s *ast.CreatePageStmtV3) (*pages.Page, error) // Create LayoutCall with arguments for placeholders page.LayoutCall = &pages.LayoutCall{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$LayoutCall", }, LayoutID: layoutID, @@ -80,7 +80,7 @@ func (pb *pageBuilder) buildPageV3(s *ast.CreatePageStmtV3) (*pages.Page, error) for _, param := range s.Parameters { pageParam := &pages.PageParameter{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$PageParameter", }, ContainerID: page.ID, @@ -112,7 +112,7 @@ func (pb *pageBuilder) buildPageV3(s *ast.CreatePageStmtV3) (*pages.Page, error) for _, v := range s.Variables { localVar := &pages.LocalVariable{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$LocalVariable", }, ContainerID: page.ID, @@ -129,7 +129,7 @@ func (pb *pageBuilder) buildPageV3(s *ast.CreatePageStmtV3) (*pages.Page, error) arg := &pages.LayoutCallArgument{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$FormCallArgument", }, ParameterID: model.ID(mainPlaceholderRef), @@ -140,7 +140,7 @@ func (pb *pageBuilder) buildPageV3(s *ast.CreatePageStmtV3) (*pages.Page, error) containerWidget := &pages.Container{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$DivContainer", }, Name: "conditionalVisibilityWidget1", @@ -182,7 +182,7 @@ func (pb *pageBuilder) buildSnippetV3(s *ast.CreateSnippetStmtV3) (*pages.Snippe snippet := &pages.Snippet{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$Snippet", }, ContainerID: containerID, @@ -194,7 +194,7 @@ func (pb *pageBuilder) buildSnippetV3(s *ast.CreateSnippetStmtV3) (*pages.Snippe for _, param := range s.Parameters { snippetParam := &pages.SnippetParameter{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$SnippetParameter", }, ContainerID: snippet.ID, @@ -221,7 +221,7 @@ func (pb *pageBuilder) buildSnippetV3(s *ast.CreateSnippetStmtV3) (*pages.Snippe for _, v := range s.Variables { localVar := &pages.LocalVariable{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$LocalVariable", }, ContainerID: snippet.ID, @@ -382,7 +382,7 @@ func applyConditionalSettings(widget pages.Widget, w *ast.WidgetV3) { if visibleIf := w.GetStringProp("VisibleIf"); visibleIf != "" { bw.ConditionalVisibility = &pages.ConditionalVisibilitySettings{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$ConditionalVisibilitySettings", }, Expression: visibleIf, @@ -392,7 +392,7 @@ func applyConditionalSettings(widget pages.Widget, w *ast.WidgetV3) { if editableIf := w.GetStringProp("EditableIf"); editableIf != "" { bw.ConditionalEditability = &pages.ConditionalEditabilitySettings{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$ConditionalEditabilitySettings", }, Expression: editableIf, @@ -496,7 +496,7 @@ func (pb *pageBuilder) buildDataSourceV3(ds *ast.DataSourceV3) (pages.DataSource // Use DataViewSource with IsSnippetParameter flag return &pages.DataViewSource{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$DataViewSource", }, EntityID: entityID, @@ -517,7 +517,7 @@ func (pb *pageBuilder) buildDataSourceV3(ds *ast.DataSourceV3) (pages.DataSource dbSource := &pages.DatabaseSource{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$DatabaseSource", // Note: actual BSON $Type depends on widget context (grid/listview/dataview) }, EntityID: entityID, @@ -537,7 +537,7 @@ func (pb *pageBuilder) buildDataSourceV3(ds *ast.DataSourceV3) (pages.DataSource } sortItem := &pages.GridSort{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$GridSort", }, AttributePath: pb.resolveAttributePathForEntity(ob.Attribute, ds.Reference), @@ -560,7 +560,7 @@ func (pb *pageBuilder) buildDataSourceV3(ds *ast.DataSourceV3) (pages.DataSource return &pages.MicroflowSource{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$MicroflowSource", }, MicroflowID: mfID, @@ -579,7 +579,7 @@ func (pb *pageBuilder) buildDataSourceV3(ds *ast.DataSourceV3) (pages.DataSource return &pages.NanoflowSource{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$NanoflowSource", }, NanoflowID: nfID, @@ -610,7 +610,7 @@ func (pb *pageBuilder) buildDataSourceV3(ds *ast.DataSourceV3) (pages.DataSource // widget can resolve short attribute names against it. return &pages.AssociationSource{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$AssociationSource", }, EntityPath: path + "/" + destEntity, @@ -630,7 +630,7 @@ func (pb *pageBuilder) buildDataSourceV3(ds *ast.DataSourceV3) (pages.DataSource return &pages.ListenToWidgetSource{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$ListenTargetSource", }, WidgetID: widgetID, @@ -842,7 +842,7 @@ func (pb *pageBuilder) buildClientActionV3(action *ast.ActionV3) (pages.ClientAc case "save": return &pages.SaveChangesClientAction{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$SaveChangesClientAction", }, ClosePage: action.ClosePage, @@ -851,7 +851,7 @@ func (pb *pageBuilder) buildClientActionV3(action *ast.ActionV3) (pages.ClientAc case "cancel": return &pages.CancelChangesClientAction{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$CancelChangesClientAction", }, ClosePage: action.ClosePage, @@ -860,7 +860,7 @@ func (pb *pageBuilder) buildClientActionV3(action *ast.ActionV3) (pages.ClientAc case "close": return &pages.ClosePageClientAction{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$ClosePageClientAction", }, }, nil @@ -868,7 +868,7 @@ func (pb *pageBuilder) buildClientActionV3(action *ast.ActionV3) (pages.ClientAc case "delete": return &pages.DeleteClientAction{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$DeleteClientAction", }, }, nil @@ -884,7 +884,7 @@ func (pb *pageBuilder) buildClientActionV3(action *ast.ActionV3) (pages.ClientAc createAct := &pages.CreateObjectClientAction{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$CreateObjectClientAction", }, EntityID: entityID, @@ -911,7 +911,7 @@ func (pb *pageBuilder) buildClientActionV3(action *ast.ActionV3) (pages.ClientAc pageAction := &pages.PageClientAction{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$PageClientAction", }, PageName: action.Target, @@ -921,7 +921,7 @@ func (pb *pageBuilder) buildClientActionV3(action *ast.ActionV3) (pages.ClientAc for _, arg := range action.Args { mapping := &pages.PageClientParameterMapping{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$PageParameterMapping", }, ParameterName: arg.Name, @@ -950,7 +950,7 @@ func (pb *pageBuilder) buildClientActionV3(action *ast.ActionV3) (pages.ClientAc mfAction := &pages.MicroflowClientAction{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$MicroflowAction", }, MicroflowID: mfID, @@ -961,7 +961,7 @@ func (pb *pageBuilder) buildClientActionV3(action *ast.ActionV3) (pages.ClientAc for _, arg := range action.Args { mapping := &pages.MicroflowParameterMapping{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$MicroflowParameterMapping", }, ParameterName: arg.Name, @@ -990,7 +990,7 @@ func (pb *pageBuilder) buildClientActionV3(action *ast.ActionV3) (pages.ClientAc nfAction := &pages.NanoflowClientAction{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$NanoflowAction", }, NanoflowID: nfID, @@ -1001,7 +1001,7 @@ func (pb *pageBuilder) buildClientActionV3(action *ast.ActionV3) (pages.ClientAc for _, arg := range action.Args { mapping := &pages.NanoflowParameterMapping{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$NanoflowParameterMapping", }, ParameterName: arg.Name, @@ -1025,7 +1025,7 @@ func (pb *pageBuilder) buildClientActionV3(action *ast.ActionV3) (pages.ClientAc case "openLink": return &pages.LinkClientAction{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$LinkClientAction", }, LinkType: pages.LinkTypeWeb, @@ -1035,7 +1035,7 @@ func (pb *pageBuilder) buildClientActionV3(action *ast.ActionV3) (pages.ClientAc case "signOut": return &pages.SignOutClientAction{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$SignOutClientAction", }, }, nil @@ -1043,7 +1043,7 @@ func (pb *pageBuilder) buildClientActionV3(action *ast.ActionV3) (pages.ClientAc case "completeTask": return &pages.SetTaskOutcomeClientAction{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$SetTaskOutcomeClientAction", }, ClosePage: true, diff --git a/mdl/executor/cmd_pages_builder_v3_layout.go b/mdl/executor/cmd_pages_builder_v3_layout.go index cd26f1b3..a7dca672 100644 --- a/mdl/executor/cmd_pages_builder_v3_layout.go +++ b/mdl/executor/cmd_pages_builder_v3_layout.go @@ -6,7 +6,7 @@ import ( "strings" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/sdk/pages" "github.com/mendixlabs/mxcli/mdl/ast" @@ -16,7 +16,7 @@ func (pb *pageBuilder) buildLayoutGridV3(w *ast.WidgetV3) (*pages.LayoutGrid, er lg := &pages.LayoutGrid{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$LayoutGrid", }, Name: w.Name, @@ -40,7 +40,7 @@ func (pb *pageBuilder) buildLayoutGridV3(w *ast.WidgetV3) (*pages.LayoutGrid, er func (pb *pageBuilder) buildLayoutGridRowV3(w *ast.WidgetV3) (*pages.LayoutGridRow, error) { row := &pages.LayoutGridRow{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$LayoutGridRow", }, } @@ -62,7 +62,7 @@ func (pb *pageBuilder) buildLayoutGridRowV3(w *ast.WidgetV3) (*pages.LayoutGridR func (pb *pageBuilder) buildLayoutGridColumnV3(w *ast.WidgetV3) (*pages.LayoutGridColumn, error) { col := &pages.LayoutGridColumn{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$LayoutGridColumn", }, Weight: 1, @@ -121,7 +121,7 @@ func (pb *pageBuilder) buildContainerWithRowV3(w *ast.WidgetV3) (*pages.Containe container := &pages.Container{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$DivContainer", }, Name: w.Name, @@ -131,7 +131,7 @@ func (pb *pageBuilder) buildContainerWithRowV3(w *ast.WidgetV3) (*pages.Containe lg := &pages.LayoutGrid{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$LayoutGrid", }, Name: w.Name + "_grid", @@ -153,7 +153,7 @@ func (pb *pageBuilder) buildContainerWithColumnV3(w *ast.WidgetV3) (*pages.Conta container := &pages.Container{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$DivContainer", }, Name: w.Name, @@ -163,7 +163,7 @@ func (pb *pageBuilder) buildContainerWithColumnV3(w *ast.WidgetV3) (*pages.Conta lg := &pages.LayoutGrid{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$LayoutGrid", }, Name: w.Name + "_grid", @@ -172,7 +172,7 @@ func (pb *pageBuilder) buildContainerWithColumnV3(w *ast.WidgetV3) (*pages.Conta row := &pages.LayoutGridRow{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$LayoutGridRow", }, } @@ -192,7 +192,7 @@ func (pb *pageBuilder) buildContainerV3(w *ast.WidgetV3) (*pages.Container, erro container := &pages.Container{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$DivContainer", }, Name: w.Name, @@ -220,7 +220,7 @@ func (pb *pageBuilder) buildTabContainerV3(w *ast.WidgetV3) (*pages.TabContainer tc := &pages.TabContainer{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$TabControl", }, Name: w.Name, @@ -248,7 +248,7 @@ func (pb *pageBuilder) buildTabContainerV3(w *ast.WidgetV3) (*pages.TabContainer func (pb *pageBuilder) buildTabPageV3(w *ast.WidgetV3) (*pages.TabPage, error) { tp := &pages.TabPage{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$TabPage", }, Name: w.Name, @@ -258,7 +258,7 @@ func (pb *pageBuilder) buildTabPageV3(w *ast.WidgetV3) (*pages.TabPage, error) { if caption := w.GetCaption(); caption != "" { tp.Caption = &model.Text{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Texts$Text", }, Translations: map[string]string{"en_US": caption}, @@ -285,7 +285,7 @@ func (pb *pageBuilder) buildGroupBoxV3(w *ast.WidgetV3) (*pages.GroupBox, error) gb := &pages.GroupBox{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$GroupBox", }, Name: w.Name, @@ -299,7 +299,7 @@ func (pb *pageBuilder) buildGroupBoxV3(w *ast.WidgetV3) (*pages.GroupBox, error) gb.Caption = &pages.ClientTemplate{ Template: &model.Text{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Texts$Text", }, Translations: map[string]string{"en_US": caption}, @@ -347,7 +347,7 @@ func (pb *pageBuilder) buildFooterV3(w *ast.WidgetV3) (*pages.Container, error) footer := &pages.Container{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$DivContainer", }, Name: w.Name, @@ -375,7 +375,7 @@ func (pb *pageBuilder) buildHeaderV3(w *ast.WidgetV3) (*pages.Container, error) header := &pages.Container{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$DivContainer", }, Name: w.Name, @@ -403,7 +403,7 @@ func (pb *pageBuilder) buildControlBarV3(w *ast.WidgetV3) (*pages.Container, err controlBar := &pages.Container{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$DivContainer", }, Name: w.Name, diff --git a/mdl/executor/cmd_pages_builder_v3_widgets.go b/mdl/executor/cmd_pages_builder_v3_widgets.go index 27d98581..52fa365c 100644 --- a/mdl/executor/cmd_pages_builder_v3_widgets.go +++ b/mdl/executor/cmd_pages_builder_v3_widgets.go @@ -11,8 +11,8 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" "github.com/mendixlabs/mxcli/sdk/pages" "github.com/mendixlabs/mxcli/sdk/widgets" ) @@ -21,7 +21,7 @@ func (pb *pageBuilder) buildDataViewV3(w *ast.WidgetV3) (*pages.DataView, error) dv := &pages.DataView{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$DataView", }, Name: w.Name, @@ -91,10 +91,10 @@ func (pb *pageBuilder) buildDataViewV3(w *ast.WidgetV3) (*pages.DataView, error) func (pb *pageBuilder) buildDataGridV3(w *ast.WidgetV3) (*pages.CustomWidget, error) { // Build DataGrid2 as a CustomWidget (pluggable widget) like V2 does. // The built-in DataGrid (Forms$DataGrid) has serialization issues. - widgetID := model.ID(mpr.GenerateID()) + widgetID := model.ID(types.GenerateID()) // Load embedded template (required for pluggable widgets to work) - embeddedType, embeddedObject, embeddedIDs, embeddedObjectTypeID, err := widgets.GetTemplateFullBSON(pages.WidgetIDDataGrid2, mpr.GenerateID, pb.reader.Path()) + embeddedType, embeddedObject, embeddedIDs, embeddedObjectTypeID, err := widgets.GetTemplateFullBSON(pages.WidgetIDDataGrid2, types.GenerateID, pb.reader.Path()) if err != nil { return nil, mdlerrors.NewBackend("load DataGrid2 template", err) } @@ -198,7 +198,7 @@ func (pb *pageBuilder) buildDataGridV3(w *ast.WidgetV3) (*pages.CustomWidget, er func (pb *pageBuilder) buildDataGridColumnV3(w *ast.WidgetV3) (*pages.DataGridColumn, error) { col := &pages.DataGridColumn{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$DataGridColumn", }, Name: w.Name, @@ -214,7 +214,7 @@ func (pb *pageBuilder) buildDataGridColumnV3(w *ast.WidgetV3) (*pages.DataGridCo if caption := w.GetCaption(); caption != "" { col.Caption = &model.Text{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Texts$Text", }, Translations: map[string]string{"en_US": caption}, @@ -228,7 +228,7 @@ func (pb *pageBuilder) buildListViewV3(w *ast.WidgetV3) (*pages.ListView, error) lv := &pages.ListView{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$ListView", }, Name: w.Name, @@ -276,7 +276,7 @@ func (pb *pageBuilder) buildTextBoxV3(w *ast.WidgetV3) (*pages.TextBox, error) { tb := &pages.TextBox{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$TextBox", }, Name: w.Name, @@ -304,7 +304,7 @@ func (pb *pageBuilder) buildTextAreaV3(w *ast.WidgetV3) (*pages.TextArea, error) ta := &pages.TextArea{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$TextArea", }, Name: w.Name, @@ -332,7 +332,7 @@ func (pb *pageBuilder) buildDatePickerV3(w *ast.WidgetV3) (*pages.DatePicker, er dp := &pages.DatePicker{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$DatePicker", }, Name: w.Name, @@ -360,7 +360,7 @@ func (pb *pageBuilder) buildDropdownV3(w *ast.WidgetV3) (*pages.DropDown, error) dd := &pages.DropDown{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$DropDown", }, Name: w.Name, @@ -388,7 +388,7 @@ func (pb *pageBuilder) buildCheckBoxV3(w *ast.WidgetV3) (*pages.CheckBox, error) cb := &pages.CheckBox{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$CheckBox", }, Name: w.Name, @@ -417,7 +417,7 @@ func (pb *pageBuilder) buildRadioButtonsV3(w *ast.WidgetV3) (*pages.RadioButtons rb := &pages.RadioButtons{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$RadioButtonGroup", }, Name: w.Name, @@ -441,7 +441,7 @@ func (pb *pageBuilder) buildTextWidgetV3(w *ast.WidgetV3) (*pages.Text, error) { st := &pages.Text{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$Text", }, Name: w.Name, @@ -453,7 +453,7 @@ func (pb *pageBuilder) buildTextWidgetV3(w *ast.WidgetV3) (*pages.Text, error) { if content := w.GetContent(); content != "" { st.Caption = &model.Text{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Texts$Text", }, Translations: map[string]string{"en_US": content}, @@ -476,7 +476,7 @@ func (pb *pageBuilder) buildDynamicTextV3(w *ast.WidgetV3) (*pages.DynamicText, dt := &pages.DynamicText{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$DynamicText", }, Name: w.Name, @@ -524,12 +524,12 @@ func (pb *pageBuilder) buildDynamicTextV3(w *ast.WidgetV3) (*pages.DynamicText, dt.Content = &pages.ClientTemplate{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$ClientTemplate", }, Template: &model.Text{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Texts$Text", }, Translations: map[string]string{"en_US": content}, @@ -540,7 +540,7 @@ func (pb *pageBuilder) buildDynamicTextV3(w *ast.WidgetV3) (*pages.DynamicText, for _, attrRef := range autoGeneratedParams { param := &pages.ClientTemplateParameter{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$ClientTemplateParameter", }, } @@ -553,7 +553,7 @@ func (pb *pageBuilder) buildDynamicTextV3(w *ast.WidgetV3) (*pages.DynamicText, for _, p := range explicitParams { param := &pages.ClientTemplateParameter{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$ClientTemplateParameter", }, } @@ -585,7 +585,7 @@ func (pb *pageBuilder) buildTitleV3(w *ast.WidgetV3) (*pages.Title, error) { title := &pages.Title{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$Title", }, Name: w.Name, @@ -597,7 +597,7 @@ func (pb *pageBuilder) buildTitleV3(w *ast.WidgetV3) (*pages.Title, error) { if content != "" { title.Caption = &model.Text{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Texts$Text", }, Translations: map[string]string{"en_US": content}, @@ -615,7 +615,7 @@ func (pb *pageBuilder) buildButtonV3(w *ast.WidgetV3) (*pages.ActionButton, erro btn := &pages.ActionButton{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$ActionButton", }, Name: w.Name, @@ -627,12 +627,12 @@ func (pb *pageBuilder) buildButtonV3(w *ast.WidgetV3) (*pages.ActionButton, erro if caption := w.GetCaption(); caption != "" { btn.CaptionTemplate = &pages.ClientTemplate{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$ClientTemplate", }, Template: &model.Text{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Texts$Text", }, Translations: map[string]string{"en_US": caption}, @@ -644,7 +644,7 @@ func (pb *pageBuilder) buildButtonV3(w *ast.WidgetV3) (*pages.ActionButton, erro for _, p := range params { param := &pages.ClientTemplateParameter{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$ClientTemplateParameter", }, } @@ -692,7 +692,7 @@ func (pb *pageBuilder) buildNavigationListV3(w *ast.WidgetV3) (*pages.Navigation navList := &pages.NavigationList{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$NavigationList", }, Name: w.Name, @@ -725,7 +725,7 @@ func (pb *pageBuilder) buildNavigationListItemV3(w *ast.WidgetV3) (*pages.Naviga item := &pages.NavigationListItem{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$NavigationListItem", }, Name: w.Name, @@ -739,7 +739,7 @@ func (pb *pageBuilder) buildNavigationListItemV3(w *ast.WidgetV3) (*pages.Naviga if caption := w.GetCaption(); caption != "" { item.Caption = &model.Text{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Texts$Text", }, Translations: map[string]string{"en_US": caption}, @@ -772,7 +772,7 @@ func (pb *pageBuilder) buildSnippetCallV3(w *ast.WidgetV3) (*pages.SnippetCallWi sc := &pages.SnippetCallWidget{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$SnippetCallWidget", }, Name: w.Name, @@ -801,7 +801,7 @@ func (pb *pageBuilder) buildTemplateV3(w *ast.WidgetV3) (*pages.Container, error container := &pages.Container{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$DivContainer", }, Name: w.Name, @@ -825,7 +825,7 @@ func (pb *pageBuilder) buildFilterV3(w *ast.WidgetV3) (*pages.Container, error) container := &pages.Container{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$DivContainer", }, Name: w.Name, @@ -848,7 +848,7 @@ func (pb *pageBuilder) buildStaticImageV3(w *ast.WidgetV3) (*pages.StaticImage, img := &pages.StaticImage{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$StaticImageViewer", }, Name: w.Name, @@ -874,7 +874,7 @@ func (pb *pageBuilder) buildDynamicImageV3(w *ast.WidgetV3) (*pages.DynamicImage img := &pages.DynamicImage{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Forms$ImageViewer", }, Name: w.Name, diff --git a/mdl/executor/cmd_rename.go b/mdl/executor/cmd_rename.go index 05a96443..441a900d 100644 --- a/mdl/executor/cmd_rename.go +++ b/mdl/executor/cmd_rename.go @@ -9,7 +9,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" ) // execRename handles RENAME statements for all document types. @@ -347,7 +347,7 @@ func execRenameAssociation(ctx *ExecContext, s *ast.RenameStmt) error { } // printRenameReport outputs a dry-run report of what would change. -func printRenameReport(ctx *ExecContext, oldName, newName string, hits []mpr.RenameHit) { +func printRenameReport(ctx *ExecContext, oldName, newName string, hits []types.RenameHit) { fmt.Fprintf(ctx.Output, "Would rename: %s → %s\n", oldName, newName) fmt.Fprintf(ctx.Output, "References found: %d in %d document(s)\n", totalRefCount(hits), len(hits)) @@ -364,7 +364,7 @@ func printRenameReport(ctx *ExecContext, oldName, newName string, hits []mpr.Ren } } -func totalRefCount(hits []mpr.RenameHit) int { +func totalRefCount(hits []types.RenameHit) int { total := 0 for _, h := range hits { total += h.Count @@ -372,9 +372,9 @@ func totalRefCount(hits []mpr.RenameHit) int { return total } -func mergeHits(a, b []mpr.RenameHit) []mpr.RenameHit { +func mergeHits(a, b []types.RenameHit) []types.RenameHit { seen := make(map[string]int) // unitID → index in result - result := make([]mpr.RenameHit, len(a)) + result := make([]types.RenameHit, len(a)) copy(result, a) for i := range result { seen[result[i].UnitID] = i diff --git a/mdl/executor/cmd_security_write.go b/mdl/executor/cmd_security_write.go index ffe7cc0e..e97bf593 100644 --- a/mdl/executor/cmd_security_write.go +++ b/mdl/executor/cmd_security_write.go @@ -11,7 +11,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/backend" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/sdk/security" ) @@ -332,7 +332,7 @@ func execGrantEntityAccess(ctx *ExecContext, s *ast.GrantEntityAccessStmt) error // Build MemberAccess entries for all entity attributes and associations. // Mendix requires explicit MemberAccess entries for every member — an empty // MemberAccesses array triggers CE0066 "Entity access is out of date". - var memberAccesses []mpr.EntityMemberAccess + var memberAccesses []types.EntityMemberAccess // Build sets for specific member overrides (when READ (Name, Email) syntax is used) writeMemberSet := make(map[string]bool) @@ -357,7 +357,7 @@ func execGrantEntityAccess(ctx *ExecContext, s *ast.GrantEntityAccessStmt) error if isCalculated && (rights == "ReadWrite" || rights == "WriteOnly") { rights = "ReadOnly" } - memberAccesses = append(memberAccesses, mpr.EntityMemberAccess{ + memberAccesses = append(memberAccesses, types.EntityMemberAccess{ AttributeRef: module.Name + "." + s.Entity.Name + "." + attr.Name, AccessRights: rights, }) @@ -374,7 +374,7 @@ func execGrantEntityAccess(ctx *ExecContext, s *ast.GrantEntityAccessStmt) error } else if readMemberSet[assoc.Name] { rights = "ReadOnly" } - memberAccesses = append(memberAccesses, mpr.EntityMemberAccess{ + memberAccesses = append(memberAccesses, types.EntityMemberAccess{ AssociationRef: module.Name + "." + assoc.Name, AccessRights: rights, }) @@ -388,7 +388,7 @@ func execGrantEntityAccess(ctx *ExecContext, s *ast.GrantEntityAccessStmt) error } else if readMemberSet[ca.Name] { rights = "ReadOnly" } - memberAccesses = append(memberAccesses, mpr.EntityMemberAccess{ + memberAccesses = append(memberAccesses, types.EntityMemberAccess{ AssociationRef: module.Name + "." + ca.Name, AccessRights: rights, }) @@ -399,13 +399,13 @@ func execGrantEntityAccess(ctx *ExecContext, s *ast.GrantEntityAccessStmt) error // When an entity has HasOwner/HasChangedBy, Mendix implicitly adds // System.owner/System.changedBy associations that require MemberAccess. if entity.HasOwner { - memberAccesses = append(memberAccesses, mpr.EntityMemberAccess{ + memberAccesses = append(memberAccesses, types.EntityMemberAccess{ AssociationRef: "System.owner", AccessRights: defaultMemberAccess, }) } if entity.HasChangedBy { - memberAccesses = append(memberAccesses, mpr.EntityMemberAccess{ + memberAccesses = append(memberAccesses, types.EntityMemberAccess{ AssociationRef: "System.changedBy", AccessRights: defaultMemberAccess, }) @@ -470,7 +470,7 @@ func execRevokeEntityAccess(ctx *ExecContext, s *ast.RevokeEntityAccessStmt) err if len(s.Rights) > 0 { // Partial revoke — downgrade specific rights - revocation := mpr.EntityAccessRevocation{} + revocation := types.EntityAccessRevocation{} for _, right := range s.Rights { switch right.Type { case ast.EntityAccessCreate: diff --git a/mdl/executor/cmd_workflows_write.go b/mdl/executor/cmd_workflows_write.go index db298405..ce2d116d 100644 --- a/mdl/executor/cmd_workflows_write.go +++ b/mdl/executor/cmd_workflows_write.go @@ -11,7 +11,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/sdk/workflows" ) @@ -161,7 +161,7 @@ func execDropWorkflow(ctx *ExecContext, s *ast.DropWorkflowStmt) error { // generateWorkflowUUID generates a UUID for workflow elements. func generateWorkflowUUID() string { - return mpr.GenerateID() + return types.GenerateID() } // buildWorkflowActivities converts AST activity nodes to SDK workflow activities. @@ -334,7 +334,7 @@ func buildCallWorkflowActivity(n *ast.WorkflowCallWorkflowNode) *workflows.CallW Parameter: wfQN + "." + pm.Parameter, Expression: pm.Expression, } - mapping.BaseElement.ID = model.ID(mpr.GenerateID()) + mapping.BaseElement.ID = model.ID(types.GenerateID()) act.ParameterMappings = append(act.ParameterMappings, mapping) } @@ -575,7 +575,7 @@ func uniqueName(name string, nameCount map[string]int) string { func buildAnnotationActivity(n *ast.WorkflowAnnotationActivityNode) *workflows.WorkflowAnnotationActivity { a := &workflows.WorkflowAnnotationActivity{} - a.ID = model.ID(mpr.GenerateID()) + a.ID = model.ID(types.GenerateID()) a.Description = n.Text return a } @@ -702,7 +702,7 @@ func autoBindCallMicroflow(ctx *ExecContext, task *workflows.CallMicroflowTask) Parameter: paramQualifiedName, Expression: "$WorkflowContext", } - mapping.BaseElement.ID = model.ID(mpr.GenerateID()) + mapping.BaseElement.ID = model.ID(types.GenerateID()) task.ParameterMappings = append(task.ParameterMappings, mapping) } @@ -711,8 +711,8 @@ func autoBindCallMicroflow(ctx *ExecContext, task *workflows.CallMicroflowTask) outcome := &workflows.VoidConditionOutcome{ Flow: &workflows.Flow{}, } - outcome.BaseElement.ID = model.ID(mpr.GenerateID()) - outcome.Flow.BaseElement.ID = model.ID(mpr.GenerateID()) + outcome.BaseElement.ID = model.ID(types.GenerateID()) + outcome.Flow.BaseElement.ID = model.ID(types.GenerateID()) task.Outcomes = append(task.Outcomes, outcome) } break @@ -757,7 +757,7 @@ func autoBindCallWorkflow(ctx *ExecContext, act *workflows.CallWorkflowActivity) Parameter: paramName, Expression: "$WorkflowContext", } - mapping.BaseElement.ID = model.ID(mpr.GenerateID()) + mapping.BaseElement.ID = model.ID(types.GenerateID()) act.ParameterMappings = append(act.ParameterMappings, mapping) } break diff --git a/mdl/executor/cmd_write_handlers_mock_test.go b/mdl/executor/cmd_write_handlers_mock_test.go index 6d1130a9..5d442758 100644 --- a/mdl/executor/cmd_write_handlers_mock_test.go +++ b/mdl/executor/cmd_write_handlers_mock_test.go @@ -7,10 +7,10 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" "github.com/mendixlabs/mxcli/mdl/backend/mock" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/domainmodel" "github.com/mendixlabs/mxcli/sdk/microflows" - "github.com/mendixlabs/mxcli/sdk/mpr" "github.com/mendixlabs/mxcli/sdk/pages" ) @@ -257,7 +257,7 @@ func TestExecDropAssociation_Mock(t *testing.T) { func TestExecDropJavaAction_Mock(t *testing.T) { mod := mkModule("MyModule") jaID := nextID("ja") - ja := &mpr.JavaAction{ + ja := &types.JavaAction{ BaseElement: model.BaseElement{ID: jaID}, ContainerID: mod.ID, Name: "MyAction", @@ -268,8 +268,8 @@ func TestExecDropJavaAction_Mock(t *testing.T) { called := false mb := &mock.MockBackend{ IsConnectedFunc: func() bool { return true }, - ListJavaActionsFunc: func() ([]*mpr.JavaAction, error) { - return []*mpr.JavaAction{ja}, nil + ListJavaActionsFunc: func() ([]*types.JavaAction, error) { + return []*types.JavaAction{ja}, nil }, DeleteJavaActionFunc: func(id model.ID) error { called = true @@ -291,7 +291,7 @@ func TestExecDropJavaAction_Mock(t *testing.T) { func TestExecDropFolder_Mock(t *testing.T) { mod := mkModule("MyModule") folderID := nextID("folder") - folder := &mpr.FolderInfo{ + folder := &types.FolderInfo{ ID: folderID, ContainerID: mod.ID, Name: "Resources", @@ -306,8 +306,8 @@ func TestExecDropFolder_Mock(t *testing.T) { ListModulesFunc: func() ([]*model.Module, error) { return []*model.Module{mod}, nil }, - ListFoldersFunc: func() ([]*mpr.FolderInfo, error) { - return []*mpr.FolderInfo{folder}, nil + ListFoldersFunc: func() ([]*types.FolderInfo, error) { + return []*types.FolderInfo{folder}, nil }, DeleteFolderFunc: func(id model.ID) error { called = true diff --git a/mdl/executor/executor.go b/mdl/executor/executor.go index b858fd2c..e68fa89a 100644 --- a/mdl/executor/executor.go +++ b/mdl/executor/executor.go @@ -14,6 +14,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/catalog" "github.com/mendixlabs/mxcli/mdl/diaglog" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/domainmodel" "github.com/mendixlabs/mxcli/sdk/mpr" @@ -23,8 +24,8 @@ import ( // executorCache holds cached data for performance across multiple operations. type executorCache struct { modules []*model.Module - units []*mpr.UnitInfo - folders []*mpr.FolderInfo + units []*types.UnitInfo + folders []*types.FolderInfo domainModels []*domainmodel.DomainModel hierarchy *ContainerHierarchy // pages, layouts, microflows are cached separately as they may change during execution diff --git a/mdl/executor/helpers.go b/mdl/executor/helpers.go index d8e34102..87bf68fe 100644 --- a/mdl/executor/helpers.go +++ b/mdl/executor/helpers.go @@ -10,9 +10,9 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/domainmodel" - "github.com/mendixlabs/mxcli/sdk/mpr" ) // ---------------------------------------------------------------------------- @@ -118,7 +118,7 @@ func resolveFolder(ctx *ExecContext, moduleID model.ID, folderPath string) (mode } // Find folder with this name under current container - var foundFolder *mpr.FolderInfo + var foundFolder *types.FolderInfo for _, f := range folders { if f.ContainerID == currentContainerID && f.Name == part { foundFolder = f @@ -138,7 +138,7 @@ func resolveFolder(ctx *ExecContext, moduleID model.ID, folderPath string) (mode currentContainerID = newFolderID // Add to the list so subsequent lookups find it - folders = append(folders, &mpr.FolderInfo{ + folders = append(folders, &types.FolderInfo{ ID: newFolderID, ContainerID: parentID, Name: part, @@ -153,7 +153,7 @@ func resolveFolder(ctx *ExecContext, moduleID model.ID, folderPath string) (mode func createFolder(ctx *ExecContext, name string, containerID model.ID) (model.ID, error) { folder := &model.Folder{ BaseElement: model.BaseElement{ - ID: model.ID(mpr.GenerateID()), + ID: model.ID(types.GenerateID()), TypeName: "Projects$Folder", }, ContainerID: containerID, diff --git a/mdl/executor/widget_engine.go b/mdl/executor/widget_engine.go index ad11effa..548fa01b 100644 --- a/mdl/executor/widget_engine.go +++ b/mdl/executor/widget_engine.go @@ -8,6 +8,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/mpr" "github.com/mendixlabs/mxcli/sdk/pages" @@ -94,7 +95,7 @@ func (e *PluggableWidgetEngine) Build(def *WidgetDefinition, w *ast.WidgetV3) (* // 1. Load template embeddedType, embeddedObject, embeddedIDs, embeddedObjectTypeID, err := - widgets.GetTemplateFullBSON(def.WidgetID, mpr.GenerateID, e.pageBuilder.reader.Path()) + widgets.GetTemplateFullBSON(def.WidgetID, types.GenerateID, e.pageBuilder.reader.Path()) if err != nil { return nil, mdlerrors.NewBackend("load "+def.MDLName+" template", err) } @@ -328,7 +329,7 @@ func (e *PluggableWidgetEngine) Build(def *WidgetDefinition, w *ast.WidgetV3) (* updatedObject = ensureRequiredObjectLists(updatedObject, propertyTypeIDs) // 5. Build CustomWidget - widgetID := model.ID(mpr.GenerateID()) + widgetID := model.ID(types.GenerateID()) cw := &pages.CustomWidget{ BaseWidget: pages.BaseWidget{ BaseElement: model.BaseElement{ diff --git a/mdl/executor/widget_operations.go b/mdl/executor/widget_operations.go index 8d869633..6d183314 100644 --- a/mdl/executor/widget_operations.go +++ b/mdl/executor/widget_operations.go @@ -5,6 +5,7 @@ package executor import ( "log" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/sdk/mpr" "github.com/mendixlabs/mxcli/sdk/pages" "go.mongodb.org/mongo-driver/bson" @@ -217,7 +218,7 @@ func updateTemplateText(tmpl bson.D, text string) bson.D { updated = append(updated, bson.E{Key: "Items", Value: bson.A{ int32(3), bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: mpr.IDToBsonBinary(types.GenerateID())}, {Key: "$Type", Value: "Texts$Translation"}, {Key: "LanguageCode", Value: "en_US"}, {Key: "Text", Value: text}, diff --git a/mdl/executor/widget_templates.go b/mdl/executor/widget_templates.go index d25459e9..d5d5e9c9 100644 --- a/mdl/executor/widget_templates.go +++ b/mdl/executor/widget_templates.go @@ -8,7 +8,7 @@ import ( "regexp" "strings" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" "go.mongodb.org/mongo-driver/bson" ) @@ -144,7 +144,7 @@ func createDefaultClientTemplateBSON(text string) bson.D { // generateBinaryID creates a new random 16-byte UUID in Microsoft GUID binary format. func generateBinaryID() []byte { - return hexIDToBlob(mpr.GenerateID()) + return hexIDToBlob(types.GenerateID()) } // hexIDToBlob converts a hex UUID string to a 16-byte binary blob in Microsoft GUID format. diff --git a/mdl/linter/rules/page_navigation_security.go b/mdl/linter/rules/page_navigation_security.go index a8e0f01d..33a73f56 100644 --- a/mdl/linter/rules/page_navigation_security.go +++ b/mdl/linter/rules/page_navigation_security.go @@ -7,6 +7,7 @@ import ( "strings" "github.com/mendixlabs/mxcli/mdl/linter" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/sdk/mpr" ) @@ -122,7 +123,7 @@ func (r *PageNavigationSecurityRule) Check(ctx *linter.LintContext) []linter.Vio } // collectMenuPages recursively collects pages from navigation menu items. -func collectMenuPages(items []*mpr.NavMenuItem, profileName string, navPages map[string][]navUsage) { +func collectMenuPages(items []*types.NavMenuItem, profileName string, navPages map[string][]navUsage) { for _, item := range items { if item.Page != "" { navPages[item.Page] = append(navPages[item.Page], diff --git a/mdl/types/asyncapi.go b/mdl/types/asyncapi.go new file mode 100644 index 00000000..692eaae6 --- /dev/null +++ b/mdl/types/asyncapi.go @@ -0,0 +1,205 @@ +// SPDX-License-Identifier: Apache-2.0 + +package types + +import ( + "fmt" + "strings" + + "gopkg.in/yaml.v3" +) + +// AsyncAPIDocument represents a parsed AsyncAPI 2.x document. +type AsyncAPIDocument struct { + Version string // AsyncAPI version (e.g. "2.2.0") + Title string // Service title + DocVersion string // Document version + Description string + Channels []*AsyncAPIChannel // Resolved channels + Messages []*AsyncAPIMessage // Resolved messages (from components) +} + +// AsyncAPIChannel represents a channel in the AsyncAPI document. +type AsyncAPIChannel struct { + Name string // Channel ID/name + OperationType string // "subscribe" or "publish" + OperationID string // e.g. "receiveOrderChangedEventEvents" + MessageRef string // Resolved message name +} + +// AsyncAPIMessage represents a message type. +type AsyncAPIMessage struct { + Name string + Title string + Description string + ContentType string + Properties []*AsyncAPIProperty // Resolved from payload schema +} + +// AsyncAPIProperty represents a property in a message payload schema. +type AsyncAPIProperty struct { + Name string + Type string // "string", "integer", "number", "boolean", "array", "object" + Format string // "int64", "int32", "date-time", "uri-reference", etc. +} + +// ParseAsyncAPI parses an AsyncAPI YAML string into an AsyncAPIDocument. +func ParseAsyncAPI(yamlStr string) (*AsyncAPIDocument, error) { + if yamlStr == "" { + return nil, fmt.Errorf("empty AsyncAPI document") + } + + var raw yamlAsyncAPI + if err := yaml.Unmarshal([]byte(yamlStr), &raw); err != nil { + return nil, fmt.Errorf("failed to parse AsyncAPI YAML: %w", err) + } + + doc := &AsyncAPIDocument{ + Version: raw.AsyncAPI, + Title: raw.Info.Title, + DocVersion: raw.Info.Version, + Description: raw.Info.Description, + } + + // Resolve messages from components + for name, msg := range raw.Components.Messages { + resolved := &AsyncAPIMessage{ + Name: name, + Title: msg.Title, + Description: msg.Description, + ContentType: msg.ContentType, + } + + // Resolve payload schema (follow $ref if present) + schemaName := "" + if msg.Payload.Ref != "" { + schemaName = asyncRefName(msg.Payload.Ref) + } + + if schemaName != "" { + if schema, ok := raw.Components.Schemas[schemaName]; ok { + resolved.Properties = resolveAsyncSchemaProperties(schema) + } + } else if msg.Payload.Properties != nil { + // Inline schema + resolved.Properties = resolveAsyncSchemaProperties(msg.Payload) + } + + doc.Messages = append(doc.Messages, resolved) + } + + // Resolve channels + for channelName, channel := range raw.Channels { + if channel.Subscribe != nil { + msgName := "" + if channel.Subscribe.Message.Ref != "" { + msgName = asyncRefName(channel.Subscribe.Message.Ref) + } + doc.Channels = append(doc.Channels, &AsyncAPIChannel{ + Name: channelName, + OperationType: "subscribe", + OperationID: channel.Subscribe.OperationID, + MessageRef: msgName, + }) + } + if channel.Publish != nil { + msgName := "" + if channel.Publish.Message.Ref != "" { + msgName = asyncRefName(channel.Publish.Message.Ref) + } + doc.Channels = append(doc.Channels, &AsyncAPIChannel{ + Name: channelName, + OperationType: "publish", + OperationID: channel.Publish.OperationID, + MessageRef: msgName, + }) + } + } + + return doc, nil +} + +// FindMessage looks up a message by name. +func (d *AsyncAPIDocument) FindMessage(name string) *AsyncAPIMessage { + for _, m := range d.Messages { + if strings.EqualFold(m.Name, name) { + return m + } + } + return nil +} + +// asyncRefName extracts the last segment from a $ref like "#/components/messages/OrderChangedEvent". +func asyncRefName(ref string) string { + if idx := strings.LastIndex(ref, "/"); idx >= 0 { + return ref[idx+1:] + } + return ref +} + +func resolveAsyncSchemaProperties(schema yamlAsyncSchema) []*AsyncAPIProperty { + var props []*AsyncAPIProperty + for name, prop := range schema.Properties { + props = append(props, &AsyncAPIProperty{ + Name: name, + Type: prop.Type, + Format: prop.Format, + }) + } + return props +} + +// ============================================================================ +// YAML deserialization types (internal) +// ============================================================================ + +type yamlAsyncAPI struct { + AsyncAPI string `yaml:"asyncapi"` + Info yamlAsyncInfo `yaml:"info"` + Channels map[string]yamlAsyncChannel `yaml:"channels"` + Components yamlAsyncComponents `yaml:"components"` +} + +type yamlAsyncInfo struct { + Title string `yaml:"title"` + Version string `yaml:"version"` + Description string `yaml:"description"` +} + +type yamlAsyncChannel struct { + Subscribe *yamlAsyncOperation `yaml:"subscribe"` + Publish *yamlAsyncOperation `yaml:"publish"` +} + +type yamlAsyncOperation struct { + OperationID string `yaml:"operationId"` + Message yamlAsyncRef `yaml:"message"` +} + +type yamlAsyncRef struct { + Ref string `yaml:"$ref"` +} + +type yamlAsyncComponents struct { + Messages map[string]yamlAsyncMessage `yaml:"messages"` + Schemas map[string]yamlAsyncSchema `yaml:"schemas"` +} + +type yamlAsyncMessage struct { + Name string `yaml:"name"` + Title string `yaml:"title"` + Description string `yaml:"description"` + ContentType string `yaml:"contentType"` + Payload yamlAsyncSchema `yaml:"payload"` +} + +type yamlAsyncSchema struct { + Ref string `yaml:"$ref"` + Type string `yaml:"type"` + Properties map[string]yamlAsyncSchemaProperty `yaml:"properties"` +} + +type yamlAsyncSchemaProperty struct { + Type string `yaml:"type"` + Format string `yaml:"format"` +} diff --git a/mdl/types/doc.go b/mdl/types/doc.go new file mode 100644 index 00000000..fe45c6ad --- /dev/null +++ b/mdl/types/doc.go @@ -0,0 +1,7 @@ +// SPDX-License-Identifier: Apache-2.0 + +// Package types defines shared value types used in backend interfaces. +// These types are decoupled from sdk/mpr to enable WASM compilation of +// the mdl/ subtree. Conversion functions between these types and their +// sdk/mpr counterparts live in mdl/backend/mpr/. +package types diff --git a/mdl/types/edmx.go b/mdl/types/edmx.go new file mode 100644 index 00000000..17367b86 --- /dev/null +++ b/mdl/types/edmx.go @@ -0,0 +1,541 @@ +// SPDX-License-Identifier: Apache-2.0 + +package types + +import ( + "encoding/xml" + "fmt" + "strings" +) + +// EdmxDocument represents a parsed OData $metadata document (EDMX/CSDL). +// Supports both OData v3 (CSDL 2.0/3.0) and OData v4 (CSDL 4.0). +type EdmxDocument struct { + Version string // "1.0" (OData3) or "4.0" (OData4) + Schemas []*EdmSchema // Schema definitions + EntitySets []*EdmEntitySet // Entity sets from EntityContainer + Actions []*EdmAction // OData4 actions / OData3 function imports +} + +// EdmSchema represents an EDM schema namespace. +type EdmSchema struct { + Namespace string + EntityTypes []*EdmEntityType + EnumTypes []*EdmEnumType +} + +// EdmEntityType represents an entity type definition. +type EdmEntityType struct { + Name string + BaseType string // Qualified name of base type (e.g. "Microsoft...PlanItem"), empty if none + IsAbstract bool // True if + IsOpen bool // True if + KeyProperties []string + Properties []*EdmProperty + NavigationProperties []*EdmNavigationProperty + Summary string + Description string +} + +// EdmProperty represents a property on an entity type. +type EdmProperty struct { + Name string + Type string // e.g. "Edm.String", "Edm.Int64" + Nullable *bool // nil = not specified (default true) + MaxLength string // e.g. "200", "max" + Scale string // e.g. "variable" + + // Capability annotations (OData Core V1). When true, the property is not + // settable by the client: + // Computed = server-computed, not settable on create or update. + // Immutable = settable on create, but not on update. + Computed bool + Immutable bool +} + +// EdmNavigationProperty represents a navigation property (association). +type EdmNavigationProperty struct { + Name string + Type string // OData4: "DefaultNamespace.Customer" or "Collection(DefaultNamespace.Part)" + Partner string // OData4 partner property name + TargetType string // Resolved target entity type name (without namespace/Collection) + IsMany bool // true if Collection() + ContainsTarget bool // true if + // OData3 fields (from Association) + Relationship string + FromRole string + ToRole string +} + +// EdmEntitySet represents an entity set in the entity container. +type EdmEntitySet struct { + Name string + EntityType string // Qualified name of entity type + + // Capabilities derived from Org.OData.Capabilities.V1 annotations. + // nil = not specified (treat as default true). + Insertable *bool // InsertRestrictions/Insertable + Updatable *bool // UpdateRestrictions/Updatable + Deletable *bool // DeleteRestrictions/Deletable + + // Navigation property names listed under + // Org.OData.Capabilities.V1.{Insert,Update}Restrictions/Non*NavigationProperties. + NonInsertableNavigationProperties []string + NonUpdatableNavigationProperties []string + + // Property names listed under + // Org.OData.Capabilities.V1.{Insert,Update}Restrictions/Non*Properties. + // Structural properties named here cannot be set on insert / update. + NonInsertableProperties []string + NonUpdatableProperties []string +} + +// EdmAction represents an OData4 action or OData3 function import. +type EdmAction struct { + Name string + IsBound bool + Parameters []*EdmActionParameter + ReturnType string +} + +// EdmActionParameter represents a parameter of an action. +type EdmActionParameter struct { + Name string + Type string + Nullable *bool +} + +// EdmEnumType represents an enumeration type. +type EdmEnumType struct { + Name string + Members []*EdmEnumMember +} + +// EdmEnumMember represents a member of an enum type. +type EdmEnumMember struct { + Name string + Value string +} + +// FindEntityType looks up an entity type by name (with or without namespace prefix). +func (d *EdmxDocument) FindEntityType(name string) *EdmEntityType { + // Strip namespace prefix if present + shortName := name + if idx := strings.LastIndex(name, "."); idx >= 0 { + shortName = name[idx+1:] + } + for _, s := range d.Schemas { + for _, et := range s.EntityTypes { + if et.Name == shortName { + return et + } + } + } + return nil +} + +// ParseEdmx parses an OData $metadata XML string into an EdmxDocument. +func ParseEdmx(metadataXML string) (*EdmxDocument, error) { + if metadataXML == "" { + return nil, fmt.Errorf("empty metadata XML") + } + + var edmx xmlEdmx + if err := xml.Unmarshal([]byte(metadataXML), &edmx); err != nil { + return nil, fmt.Errorf("failed to parse EDMX XML: %w", err) + } + + doc := &EdmxDocument{ + Version: edmx.Version, + } + + for _, ds := range edmx.DataServices { + for _, s := range ds.Schemas { + schema := &EdmSchema{ + Namespace: s.Namespace, + } + + // Parse entity types + for _, et := range s.EntityTypes { + entityType := parseXmlEntityType(&et) + schema.EntityTypes = append(schema.EntityTypes, entityType) + } + + // Parse enum types + for _, en := range s.EnumTypes { + enumType := &EdmEnumType{Name: en.Name} + for _, m := range en.Members { + enumType.Members = append(enumType.Members, &EdmEnumMember{ + Name: m.Name, + Value: m.Value, + }) + } + schema.EnumTypes = append(schema.EnumTypes, enumType) + } + + doc.Schemas = append(doc.Schemas, schema) + + // Parse entity container + for _, ec := range s.EntityContainers { + for _, es := range ec.EntitySets { + entitySet := &EdmEntitySet{ + Name: es.Name, + EntityType: es.EntityType, + } + applyCapabilityAnnotations(entitySet, es.Annotations) + doc.EntitySets = append(doc.EntitySets, entitySet) + } + + // OData3 function imports + for _, fi := range ec.FunctionImports { + action := &EdmAction{ + Name: fi.Name, + ReturnType: fi.ReturnType, + } + for _, p := range fi.Parameters { + action.Parameters = append(action.Parameters, &EdmActionParameter{ + Name: p.Name, + Type: p.Type, + }) + } + doc.Actions = append(doc.Actions, action) + } + } + + // OData4 actions + for _, a := range s.Actions { + action := &EdmAction{ + Name: a.Name, + IsBound: a.IsBound == "true", + } + if a.ReturnType != nil { + action.ReturnType = a.ReturnType.Type + } + for _, p := range a.Parameters { + param := &EdmActionParameter{ + Name: p.Name, + Type: p.Type, + } + if p.Nullable != "" { + v := p.Nullable == "true" + param.Nullable = &v + } + action.Parameters = append(action.Parameters, param) + } + doc.Actions = append(doc.Actions, action) + } + + // OData4 functions (treated same as actions for discovery) + for _, f := range s.Functions { + action := &EdmAction{ + Name: f.Name, + IsBound: f.IsBound == "true", + } + if f.ReturnType != nil { + action.ReturnType = f.ReturnType.Type + } + for _, p := range f.Parameters { + param := &EdmActionParameter{ + Name: p.Name, + Type: p.Type, + } + action.Parameters = append(action.Parameters, param) + } + doc.Actions = append(doc.Actions, action) + } + } + } + + return doc, nil +} + +func parseXmlEntityType(et *xmlEntityType) *EdmEntityType { + entityType := &EdmEntityType{ + Name: et.Name, + BaseType: et.BaseType, + IsAbstract: et.Abstract == "true", + IsOpen: et.OpenType == "true", + } + + // Parse key + if et.Key != nil { + for _, pr := range et.Key.PropertyRefs { + entityType.KeyProperties = append(entityType.KeyProperties, pr.Name) + } + } + + // Parse documentation (OData3 style) + if et.Documentation != nil { + entityType.Summary = et.Documentation.Summary + entityType.Description = et.Documentation.LongDescription + } + + // Parse annotations (OData4 style) + for _, ann := range et.Annotations { + switch ann.Term { + case "Org.OData.Core.V1.Description": + entityType.Summary = ann.String + case "Org.OData.Core.V1.LongDescription": + entityType.Description = ann.String + } + } + + // Parse properties + for _, p := range et.Properties { + prop := &EdmProperty{ + Name: p.Name, + Type: p.Type, + MaxLength: p.MaxLength, + Scale: p.Scale, + } + if p.Nullable != "" { + v := p.Nullable != "false" + prop.Nullable = &v + } + for _, ann := range p.Annotations { + switch ann.Term { + case "Org.OData.Core.V1.Computed": + prop.Computed = ann.Bool == "" || ann.Bool == "true" + case "Org.OData.Core.V1.Immutable": + prop.Immutable = ann.Bool == "" || ann.Bool == "true" + } + } + entityType.Properties = append(entityType.Properties, prop) + } + + // Parse navigation properties + for _, np := range et.NavigationProperties { + nav := &EdmNavigationProperty{ + Name: np.Name, + Type: np.Type, + Partner: np.Partner, + ContainsTarget: np.ContainsTarget == "true", + Relationship: np.Relationship, + FromRole: np.FromRole, + ToRole: np.ToRole, + } + + // Resolve target type from OData4 Type field + if np.Type != "" { + nav.TargetType, nav.IsMany = ResolveNavType(np.Type) + } + + entityType.NavigationProperties = append(entityType.NavigationProperties, nav) + } + + return entityType +} + +// applyCapabilityAnnotations reads Org.OData.Capabilities.V1.{Insert,Update, +// Delete}Restrictions annotations on an entity set and stores the relevant +// flags on the EdmEntitySet. +func applyCapabilityAnnotations(es *EdmEntitySet, annotations []xmlCapabilitiesAnnotation) { + for _, ann := range annotations { + if ann.Record == nil { + continue + } + switch ann.Term { + case "Org.OData.Capabilities.V1.InsertRestrictions": + for _, pv := range ann.Record.PropertyValues { + switch pv.Property { + case "Insertable": + if pv.Bool != "" { + v := pv.Bool == "true" + es.Insertable = &v + } + case "NonInsertableNavigationProperties": + if pv.Collection != nil { + es.NonInsertableNavigationProperties = pv.Collection.NavigationPropertyPaths + } + case "NonInsertableProperties": + if pv.Collection != nil { + es.NonInsertableProperties = pv.Collection.PropertyPaths + } + } + } + case "Org.OData.Capabilities.V1.UpdateRestrictions": + for _, pv := range ann.Record.PropertyValues { + switch pv.Property { + case "Updatable": + if pv.Bool != "" { + v := pv.Bool == "true" + es.Updatable = &v + } + case "NonUpdatableNavigationProperties": + if pv.Collection != nil { + es.NonUpdatableNavigationProperties = pv.Collection.NavigationPropertyPaths + } + case "NonUpdatableProperties": + if pv.Collection != nil { + es.NonUpdatableProperties = pv.Collection.PropertyPaths + } + } + } + case "Org.OData.Capabilities.V1.DeleteRestrictions": + for _, pv := range ann.Record.PropertyValues { + if pv.Property == "Deletable" && pv.Bool != "" { + v := pv.Bool == "true" + es.Deletable = &v + } + } + } + } +} + +// ResolveNavType parses "Collection(Namespace.Type)" or "Namespace.Type" into the short type name. +func ResolveNavType(t string) (typeName string, isMany bool) { + if strings.HasPrefix(t, "Collection(") && strings.HasSuffix(t, ")") { + isMany = true + t = t[len("Collection(") : len(t)-1] + } + if idx := strings.LastIndex(t, "."); idx >= 0 { + typeName = t[idx+1:] + } else { + typeName = t + } + return +} + +// ============================================================================ +// XML deserialization types (internal) +// ============================================================================ + +type xmlEdmx struct { + XMLName xml.Name `xml:"Edmx"` + Version string `xml:"Version,attr"` + DataServices []xmlDataServices `xml:"DataServices"` +} + +type xmlDataServices struct { + Schemas []xmlSchema `xml:"Schema"` +} + +type xmlSchema struct { + Namespace string `xml:"Namespace,attr"` + EntityTypes []xmlEntityType `xml:"EntityType"` + EnumTypes []xmlEnumType `xml:"EnumType"` + EntityContainers []xmlEntityContainer `xml:"EntityContainer"` + Actions []xmlAction `xml:"Action"` + Functions []xmlAction `xml:"Function"` +} + +type xmlEntityType struct { + Name string `xml:"Name,attr"` + BaseType string `xml:"BaseType,attr"` + Abstract string `xml:"Abstract,attr"` + OpenType string `xml:"OpenType,attr"` + Key *xmlKey `xml:"Key"` + Properties []xmlProperty `xml:"Property"` + NavigationProperties []xmlNavigationProperty `xml:"NavigationProperty"` + Documentation *xmlDocumentation `xml:"Documentation"` + Annotations []xmlAnnotation `xml:"Annotation"` +} + +type xmlKey struct { + PropertyRefs []xmlPropertyRef `xml:"PropertyRef"` +} + +type xmlPropertyRef struct { + Name string `xml:"Name,attr"` +} + +type xmlProperty struct { + Name string `xml:"Name,attr"` + Type string `xml:"Type,attr"` + Nullable string `xml:"Nullable,attr"` + MaxLength string `xml:"MaxLength,attr"` + Scale string `xml:"Scale,attr"` + Annotations []xmlAnnotation `xml:"Annotation"` +} + +type xmlNavigationProperty struct { + Name string `xml:"Name,attr"` + Type string `xml:"Type,attr"` // OData4 + Partner string `xml:"Partner,attr"` // OData4 + ContainsTarget string `xml:"ContainsTarget,attr"` // OData4: contained nav target (e.g. Person.Trips) + Relationship string `xml:"Relationship,attr"` // OData3 + FromRole string `xml:"FromRole,attr"` // OData3 + ToRole string `xml:"ToRole,attr"` // OData3 +} + +type xmlDocumentation struct { + Summary string `xml:"Summary"` + LongDescription string `xml:"LongDescription"` +} + +type xmlAnnotation struct { + Term string `xml:"Term,attr"` + String string `xml:"String,attr"` + Bool string `xml:"Bool,attr"` +} + +type xmlEntityContainer struct { + Name string `xml:"Name,attr"` + EntitySets []xmlEntitySet `xml:"EntitySet"` + FunctionImports []xmlFunctionImport `xml:"FunctionImport"` +} + +type xmlEntitySet struct { + Name string `xml:"Name,attr"` + EntityType string `xml:"EntityType,attr"` + Annotations []xmlCapabilitiesAnnotation `xml:"Annotation"` +} + +// xmlCapabilitiesAnnotation captures the bits of OData V1 Capabilities +// annotations we care about. The wrapping contains +// and (sometimes) +// +// Trips. +type xmlCapabilitiesAnnotation struct { + Term string `xml:"Term,attr"` + Record *xmlCapabilitiesRecord `xml:"Record"` +} + +type xmlCapabilitiesRecord struct { + PropertyValues []xmlCapabilitiesPropertyValue `xml:"PropertyValue"` +} + +type xmlCapabilitiesPropertyValue struct { + Property string `xml:"Property,attr"` + Bool string `xml:"Bool,attr"` + Collection *xmlCapabilitiesCollection `xml:"Collection"` +} + +type xmlCapabilitiesCollection struct { + NavigationPropertyPaths []string `xml:"NavigationPropertyPath"` + PropertyPaths []string `xml:"PropertyPath"` +} + +type xmlFunctionImport struct { + Name string `xml:"Name,attr"` + ReturnType string `xml:"ReturnType,attr"` + Parameters []xmlActionParam `xml:"Parameter"` +} + +type xmlAction struct { + Name string `xml:"Name,attr"` + IsBound string `xml:"IsBound,attr"` + ReturnType *xmlReturnType `xml:"ReturnType"` + Parameters []xmlActionParam `xml:"Parameter"` +} + +type xmlReturnType struct { + Type string `xml:"Type,attr"` + Nullable string `xml:"Nullable,attr"` +} + +type xmlActionParam struct { + Name string `xml:"Name,attr"` + Type string `xml:"Type,attr"` + Nullable string `xml:"Nullable,attr"` +} + +type xmlEnumType struct { + Name string `xml:"Name,attr"` + Members []xmlEnumMember `xml:"Member"` +} + +type xmlEnumMember struct { + Name string `xml:"Name,attr"` + Value string `xml:"Value,attr"` +} diff --git a/mdl/types/id.go b/mdl/types/id.go new file mode 100644 index 00000000..528f145a --- /dev/null +++ b/mdl/types/id.go @@ -0,0 +1,103 @@ +// SPDX-License-Identifier: Apache-2.0 + +package types + +import ( + "crypto/rand" + "crypto/sha256" + "encoding/hex" + "fmt" + "strings" +) + +// GenerateID generates a new unique UUID v4 for model elements. +func GenerateID() string { + b := make([]byte, 16) + _, _ = rand.Read(b) + b[6] = (b[6] & 0x0f) | 0x40 // Version 4 + b[8] = (b[8] & 0x3f) | 0x80 // Variant is 10 + + return fmt.Sprintf("%02x%02x%02x%02x-%02x%02x-%02x%02x-%02x%02x-%02x%02x%02x%02x%02x%02x", + b[0], b[1], b[2], b[3], + b[4], b[5], + b[6], b[7], + b[8], b[9], + b[10], b[11], b[12], b[13], b[14], b[15]) +} + +// GenerateDeterministicID generates a stable UUID from a seed string. +// Used for System module entities that aren't in the MPR but need consistent IDs. +func GenerateDeterministicID(seed string) string { + h := sha256.Sum256([]byte(seed)) + return fmt.Sprintf("%08x-%04x-%04x-%04x-%012x", + h[0:4], h[4:6], h[6:8], h[8:10], h[10:16]) +} + +// BlobToUUID converts a 16-byte binary ID blob to a UUID string. +func BlobToUUID(data []byte) string { + if len(data) != 16 { + return hex.EncodeToString(data) + } + return fmt.Sprintf("%02x%02x%02x%02x-%02x%02x-%02x%02x-%02x%02x-%02x%02x%02x%02x%02x%02x", + data[3], data[2], data[1], data[0], + data[5], data[4], + data[7], data[6], + data[8], data[9], + data[10], data[11], data[12], data[13], data[14], data[15]) +} + +// UUIDToBlob converts a UUID string to a 16-byte blob in Microsoft GUID format. +func UUIDToBlob(uuid string) []byte { + if uuid == "" { + return nil + } + var clean strings.Builder + for _, c := range uuid { + if c != '-' { + clean.WriteString(string(c)) + } + } + decoded, err := hex.DecodeString(clean.String()) + if err != nil || len(decoded) != 16 { + return nil + } + blob := make([]byte, 16) + blob[0] = decoded[3] + blob[1] = decoded[2] + blob[2] = decoded[1] + blob[3] = decoded[0] + blob[4] = decoded[5] + blob[5] = decoded[4] + blob[6] = decoded[7] + blob[7] = decoded[6] + copy(blob[8:], decoded[8:]) + return blob +} + +// ValidateID checks if an ID is a valid UUID format. +func ValidateID(id string) bool { + if len(id) != 36 { + return false + } + for i, c := range id { + if i == 8 || i == 13 || i == 18 || i == 23 { + if c != '-' { + return false + } + } else { + if !((c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F')) { + return false + } + } + } + return true +} + +// Hash computes a hash for content (used for content deduplication). +func Hash(content []byte) string { + var sum uint64 + for i, b := range content { + sum += uint64(b) * uint64(i+1) + } + return fmt.Sprintf("%016x", sum) +} diff --git a/mdl/types/infrastructure.go b/mdl/types/infrastructure.go new file mode 100644 index 00000000..75b5bf6b --- /dev/null +++ b/mdl/types/infrastructure.go @@ -0,0 +1,93 @@ +// SPDX-License-Identifier: Apache-2.0 + +package types + +import "github.com/mendixlabs/mxcli/model" + +// MPRVersion identifies the MPR file format version. +type MPRVersion int + +const ( + MPRVersionV1 MPRVersion = 1 // single-file format + MPRVersionV2 MPRVersion = 2 // mprcontents folder (Mendix 10.18+) +) + +// ProjectVersion holds the parsed Mendix project version. +type ProjectVersion struct { + ProductVersion string + BuildVersion string + FormatVersion int + SchemaHash string + MajorVersion int + MinorVersion int + PatchVersion int +} + +// FolderInfo is a lightweight folder descriptor. +type FolderInfo struct { + ID model.ID + ContainerID model.ID + Name string +} + +// UnitInfo is a lightweight unit descriptor. +type UnitInfo struct { + ID model.ID + ContainerID model.ID + ContainmentName string + Type string +} + +// RenameHit records a single rename reference replacement. +type RenameHit struct { + UnitID string + UnitType string + Name string + Count int +} + +// RawUnit holds a unit's raw BSON contents. +type RawUnit struct { + ID model.ID + ContainerID model.ID + Type string + Contents []byte +} + +// RawUnitInfo holds a unit's raw contents with metadata. +type RawUnitInfo struct { + ID string + QualifiedName string + Type string + ModuleName string + Contents []byte +} + +// RawCustomWidgetType holds a custom widget's raw type/object data. +// RawType and RawObject are bson.D in sdk/mpr; here they are any to +// avoid a BSON driver dependency. +type RawCustomWidgetType struct { + WidgetID string + RawType any + RawObject any + UnitID string + UnitName string + WidgetName string +} + +// EntityMemberAccess describes access rights for a single entity member. +type EntityMemberAccess struct { + AttributeRef string + AssociationRef string + AccessRights string +} + +// EntityAccessRevocation describes which entity access to revoke. +type EntityAccessRevocation struct { + RevokeCreate bool + RevokeDelete bool + RevokeReadMembers []string + RevokeWriteMembers []string + RevokeReadAll bool + RevokeWriteAll bool +} diff --git a/mdl/types/java.go b/mdl/types/java.go new file mode 100644 index 00000000..f9da1ab8 --- /dev/null +++ b/mdl/types/java.go @@ -0,0 +1,54 @@ +// SPDX-License-Identifier: Apache-2.0 + +package types + +import ( + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/javaactions" +) + +// JavaAction is a lightweight Java action descriptor. +type JavaAction struct { + model.BaseElement + ContainerID model.ID `json:"containerId"` + Name string `json:"name"` + Documentation string `json:"documentation,omitempty"` +} + +// GetName returns the Java action's name. +func (ja *JavaAction) GetName() string { return ja.Name } + +// GetContainerID returns the container ID. +func (ja *JavaAction) GetContainerID() model.ID { return ja.ContainerID } + +// JavaScriptAction is a JavaScript action descriptor. +type JavaScriptAction struct { + model.BaseElement + ContainerID model.ID `json:"containerId"` + Name string `json:"name"` + Documentation string `json:"documentation,omitempty"` + Platform string `json:"platform,omitempty"` + Excluded bool `json:"excluded"` + ExportLevel string `json:"exportLevel,omitempty"` + ActionDefaultReturnName string `json:"actionDefaultReturnName,omitempty"` + ReturnType javaactions.CodeActionReturnType `json:"returnType,omitempty"` + Parameters []*javaactions.JavaActionParameter `json:"parameters,omitempty"` + TypeParameters []*javaactions.TypeParameterDef `json:"typeParameters,omitempty"` + MicroflowActionInfo *javaactions.MicroflowActionInfo `json:"microflowActionInfo,omitempty"` +} + +// GetName returns the JavaScript action's name. +func (jsa *JavaScriptAction) GetName() string { return jsa.Name } + +// GetContainerID returns the container ID. +func (jsa *JavaScriptAction) GetContainerID() model.ID { return jsa.ContainerID } + +// FindTypeParameterName looks up a type parameter name by its ID. +func (jsa *JavaScriptAction) FindTypeParameterName(id model.ID) string { + for _, tp := range jsa.TypeParameters { + if tp.ID == id { + return tp.Name + } + } + return "" +} diff --git a/mdl/types/json_utils.go b/mdl/types/json_utils.go new file mode 100644 index 00000000..7a9d8da8 --- /dev/null +++ b/mdl/types/json_utils.go @@ -0,0 +1,374 @@ +// SPDX-License-Identifier: Apache-2.0 + +package types + +import ( + "bytes" + "encoding/json" + "fmt" + "math" + "regexp" + "strings" + "unicode" +) + +// iso8601Pattern matches common ISO 8601 datetime strings that Mendix Studio Pro +// recognizes as DateTime primitive types in JSON structures. +var iso8601Pattern = regexp.MustCompile( + `^\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2}(:\d{2})?(\.\d+)?(Z|[+-]\d{2}:?\d{2})?$`, +) + +// PrettyPrintJSON re-formats a JSON string with standard indentation. +// Returns the original string if it is not valid JSON. +func PrettyPrintJSON(s string) string { + var buf bytes.Buffer + if err := json.Indent(&buf, []byte(s), "", " "); err != nil { + return s + } + return buf.String() +} + +// normalizeDateTimeValue pads fractional seconds to 7 digits to match +// Studio Pro's .NET DateTime format (e.g., "2015-05-22T14:56:29.000Z" → "2015-05-22T14:56:29.0000000Z"). +func normalizeDateTimeValue(s string) string { + // Find the decimal point after seconds + dotIdx := strings.Index(s, ".") + if dotIdx == -1 { + // No fractional part — insert .0000000 before timezone suffix + if idx := strings.IndexAny(s, "Z+-"); idx > 0 { + return s[:idx] + ".0000000" + s[idx:] + } + return s + } + // Find where fractional digits end (at Z, +, - or end of string) + fracEnd := len(s) + for i := dotIdx + 1; i < len(s); i++ { + if s[i] < '0' || s[i] > '9' { + fracEnd = i + break + } + } + frac := s[dotIdx+1 : fracEnd] + if len(frac) < 7 { + frac = frac + strings.Repeat("0", 7-len(frac)) + } else { + frac = frac[:7] + } + return s[:dotIdx+1] + frac + s[fracEnd:] +} + +// BuildJsonElementsFromSnippet parses a JSON snippet and builds the element tree +// that Mendix Studio Pro would generate. Returns the root element. +// The optional customNameMap maps JSON keys to custom ExposedNames (as set in +// Studio Pro's "Custom name" column). Unmapped keys use auto-generated names. +func BuildJsonElementsFromSnippet(snippet string, customNameMap map[string]string) ([]*JsonElement, error) { + // Validate JSON + if !json.Valid([]byte(snippet)) { + return nil, fmt.Errorf("invalid JSON snippet") + } + + // Detect root type (object or array) + dec := json.NewDecoder(strings.NewReader(snippet)) + tok, err := dec.Token() + if err != nil { + return nil, fmt.Errorf("failed to parse JSON snippet: %w", err) + } + + b := &snippetBuilder{customNameMap: customNameMap} + tracker := &nameTracker{seen: make(map[string]int)} + + switch tok { + case json.Delim('{'): + root := b.buildElementFromRawObject("Root", "(Object)", snippet, tracker) + root.MinOccurs = 0 + root.MaxOccurs = 0 + root.Nillable = true + return []*JsonElement{root}, nil + + case json.Delim('['): + root := b.buildElementFromRawRootArray("Root", "(Array)", snippet, tracker) + root.MinOccurs = 0 + root.MaxOccurs = 0 + root.Nillable = true + return []*JsonElement{root}, nil + + default: + return nil, fmt.Errorf("JSON snippet must be an object or array at root level") + } +} + +// snippetBuilder holds state for building the element tree from a JSON snippet. +type snippetBuilder struct { + customNameMap map[string]string // JSON key → custom ExposedName +} + +// reservedExposedNames are element names that Mendix rejects as ExposedName values. +// Studio Pro handles these by prefixing with underscore and keeping original case. +var reservedExposedNames = map[string]bool{ + "Id": true, "Type": true, +} + +// resolveExposedName returns the custom name if mapped, otherwise capitalizes the JSON key. +// Reserved names (Id, Type, Name) are prefixed with underscore to match Studio Pro behavior. +func (b *snippetBuilder) resolveExposedName(jsonKey string) string { + if b.customNameMap != nil { + if custom, ok := b.customNameMap[jsonKey]; ok { + return custom + } + } + name := capitalizeFirst(jsonKey) + if reservedExposedNames[name] { + return "_" + jsonKey + } + return name +} + +// nameTracker tracks used ExposedNames at each level to handle duplicates. +type nameTracker struct { + seen map[string]int +} + +func (t *nameTracker) uniqueName(base string) string { + t.seen[base]++ + count := t.seen[base] + if count == 1 { + return base + } + return fmt.Sprintf("%s_%d", base, count) +} + +func (t *nameTracker) child() *nameTracker { + return &nameTracker{seen: make(map[string]int)} +} + +// capitalizeFirst capitalizes the first letter of a string for ExposedName. +func capitalizeFirst(s string) string { + if s == "" { + return s + } + runes := []rune(s) + runes[0] = unicode.ToUpper(runes[0]) + return string(runes) +} + +// buildElementFromRawObject builds an Object element by decoding a raw JSON object string, +// preserving the original key order (Go's map[string]any loses order). +func (b *snippetBuilder) buildElementFromRawObject(exposedName, path, rawJSON string, tracker *nameTracker) *JsonElement { + elem := &JsonElement{ + ExposedName: exposedName, + Path: path, + ElementType: "Object", + PrimitiveType: "Unknown", + MinOccurs: 0, + MaxOccurs: 0, + Nillable: true, + MaxLength: -1, + FractionDigits: -1, + TotalDigits: -1, + } + + childTracker := tracker.child() + + // Decode with key order preserved + dec := json.NewDecoder(strings.NewReader(rawJSON)) + if _, err := dec.Token(); err != nil { // opening { + return elem + } + for dec.More() { + tok, err := dec.Token() + if err != nil { + break + } + key, ok := tok.(string) + if !ok { + continue + } + // Capture the raw value to pass down for nested objects/arrays + var rawVal json.RawMessage + if err := dec.Decode(&rawVal); err != nil { + break + } + + childName := childTracker.uniqueName(b.resolveExposedName(key)) + childPath := path + "|" + key + child := b.buildElementFromRawValue(childName, childPath, key, rawVal, childTracker) + elem.Children = append(elem.Children, child) + } + + return elem +} + +// buildElementFromRawValue inspects a json.RawMessage to determine its type and build the element. +func (b *snippetBuilder) buildElementFromRawValue(exposedName, path, jsonKey string, raw json.RawMessage, tracker *nameTracker) *JsonElement { + trimmed := strings.TrimSpace(string(raw)) + + // Object — recurse with raw JSON to preserve key order + if len(trimmed) > 0 && trimmed[0] == '{' { + return b.buildElementFromRawObject(exposedName, path, trimmed, tracker) + } + + // Array + if len(trimmed) > 0 && trimmed[0] == '[' { + return b.buildElementFromRawArray(exposedName, path, jsonKey, trimmed, tracker) + } + + // Primitive — unmarshal to determine type + var val any + json.Unmarshal(raw, &val) + + switch v := val.(type) { + case string: + primitiveType := "String" + if iso8601Pattern.MatchString(v) { + primitiveType = "DateTime" + v = normalizeDateTimeValue(v) + } + return buildValueElement(exposedName, path, primitiveType, fmt.Sprintf("%q", v)) + case float64: + // Check the raw JSON text for a decimal point — Go's %v drops ".0" from 41850.0 + if v == math.Trunc(v) && !strings.Contains(trimmed, ".") { + return buildValueElement(exposedName, path, "Integer", fmt.Sprintf("%v", int64(v))) + } + return buildValueElement(exposedName, path, "Decimal", fmt.Sprintf("%v", v)) + case bool: + return buildValueElement(exposedName, path, "Boolean", fmt.Sprintf("%v", v)) + case nil: + // JSON null → Unknown primitive type (matches Studio Pro) + return buildValueElement(exposedName, path, "Unknown", "") + default: + return buildValueElement(exposedName, path, "String", "") + } +} + +// buildElementFromRawRootArray builds a root-level Array element. +// Studio Pro names the child object "JsonObject" (not "RootItem") for root arrays. +func (b *snippetBuilder) buildElementFromRawRootArray(exposedName, path, rawJSON string, tracker *nameTracker) *JsonElement { + arrayElem := &JsonElement{ + ExposedName: exposedName, + Path: path, + ElementType: "Array", + PrimitiveType: "Unknown", + MinOccurs: 0, + MaxOccurs: 0, + Nillable: true, + MaxLength: -1, + FractionDigits: -1, + TotalDigits: -1, + } + + dec := json.NewDecoder(strings.NewReader(rawJSON)) + dec.Token() // opening [ + if dec.More() { + var firstItem json.RawMessage + dec.Decode(&firstItem) + + itemPath := path + "|(Object)" + trimmed := strings.TrimSpace(string(firstItem)) + + if len(trimmed) > 0 && trimmed[0] == '{' { + itemElem := b.buildElementFromRawObject("JsonObject", itemPath, trimmed, tracker) + itemElem.MinOccurs = 0 + itemElem.MaxOccurs = 0 + itemElem.Nillable = true + arrayElem.Children = append(arrayElem.Children, itemElem) + } else { + child := b.buildElementFromRawValue("JsonObject", itemPath, "", firstItem, tracker) + child.MinOccurs = 0 + child.MaxOccurs = 0 + arrayElem.Children = append(arrayElem.Children, child) + } + } + + return arrayElem +} + +// buildElementFromRawArray builds an Array element, using the first item's raw JSON for ordering. +// For primitive arrays (strings, numbers), Studio Pro creates a Wrapper element with a Value child. +func (b *snippetBuilder) buildElementFromRawArray(exposedName, path, jsonKey, rawJSON string, tracker *nameTracker) *JsonElement { + arrayElem := &JsonElement{ + ExposedName: exposedName, + Path: path, + ElementType: "Array", + PrimitiveType: "Unknown", + MinOccurs: 0, + MaxOccurs: 0, + Nillable: true, + MaxLength: -1, + FractionDigits: -1, + TotalDigits: -1, + } + + // Decode array and get first element as raw JSON + dec := json.NewDecoder(strings.NewReader(rawJSON)) + dec.Token() // opening [ + if dec.More() { + var firstItem json.RawMessage + dec.Decode(&firstItem) + + trimmed := strings.TrimSpace(string(firstItem)) + + if len(trimmed) > 0 && trimmed[0] == '{' { + // Object array: child is NameItem object + itemName := exposedName + "Item" + itemPath := path + "|(Object)" + itemElem := b.buildElementFromRawObject(itemName, itemPath, trimmed, tracker) + itemElem.MinOccurs = 0 + itemElem.MaxOccurs = -1 + itemElem.Nillable = true + arrayElem.Children = append(arrayElem.Children, itemElem) + } else { + // Primitive array: Studio Pro wraps in a Wrapper element with singular name + // e.g., tags: ["a","b"] → Tag (Wrapper) → Value (String) + wrapperName := singularize(exposedName) + wrapperPath := path + "|(Object)" + wrapper := &JsonElement{ + ExposedName: wrapperName, + Path: wrapperPath, + ElementType: "Wrapper", + PrimitiveType: "Unknown", + MinOccurs: 0, + MaxOccurs: 0, + Nillable: true, + MaxLength: -1, + FractionDigits: -1, + TotalDigits: -1, + } + valueElem := b.buildElementFromRawValue("Value", wrapperPath+"|", jsonKey, firstItem, tracker) + valueElem.MinOccurs = 0 + valueElem.MaxOccurs = 0 + wrapper.Children = append(wrapper.Children, valueElem) + arrayElem.Children = append(arrayElem.Children, wrapper) + } + } + + return arrayElem +} + +// singularize returns a simple singular form by stripping trailing "s". +// Handles common cases: Tags→Tag, Items→Item, Addresses→Addresse. +func singularize(s string) string { + if len(s) > 1 && strings.HasSuffix(s, "s") { + return s[:len(s)-1] + } + return s +} + +func buildValueElement(exposedName, path, primitiveType, originalValue string) *JsonElement { + maxLength := -1 + if primitiveType == "String" { + maxLength = 0 + } + return &JsonElement{ + ExposedName: exposedName, + Path: path, + ElementType: "Value", + PrimitiveType: primitiveType, + MinOccurs: 0, + MaxOccurs: 0, + Nillable: true, + MaxLength: maxLength, + FractionDigits: -1, + TotalDigits: -1, + OriginalValue: originalValue, + } +} diff --git a/mdl/types/mapping.go b/mdl/types/mapping.go new file mode 100644 index 00000000..4e3a193e --- /dev/null +++ b/mdl/types/mapping.go @@ -0,0 +1,65 @@ +// SPDX-License-Identifier: Apache-2.0 + +package types + +import "github.com/mendixlabs/mxcli/model" + +// JsonStructure represents a JSON structure document. +type JsonStructure struct { + model.BaseElement + ContainerID model.ID `json:"containerId"` + Name string `json:"name"` + Documentation string `json:"documentation,omitempty"` + JsonSnippet string `json:"jsonSnippet,omitempty"` + Elements []*JsonElement `json:"elements,omitempty"` + Excluded bool `json:"excluded,omitempty"` + ExportLevel string `json:"exportLevel,omitempty"` +} + +// GetName returns the JSON structure's name. +func (js *JsonStructure) GetName() string { return js.Name } + +// GetContainerID returns the container ID. +func (js *JsonStructure) GetContainerID() model.ID { return js.ContainerID } + +// JsonElement represents a single element in a JSON structure (recursive). +type JsonElement struct { + ExposedName string `json:"exposedName"` + ExposedItemName string `json:"exposedItemName,omitempty"` + Path string `json:"path"` + ElementType string `json:"elementType"` + PrimitiveType string `json:"primitiveType"` + MinOccurs int `json:"minOccurs"` + MaxOccurs int `json:"maxOccurs"` + Nillable bool `json:"nillable,omitempty"` + IsDefaultType bool `json:"isDefaultType,omitempty"` + MaxLength int `json:"maxLength"` + FractionDigits int `json:"fractionDigits"` + TotalDigits int `json:"totalDigits"` + OriginalValue string `json:"originalValue,omitempty"` + Children []*JsonElement `json:"children,omitempty"` +} + +// ImageCollection represents an image collection document. +type ImageCollection struct { + model.BaseElement + ContainerID model.ID `json:"containerId"` + Name string `json:"name"` + ExportLevel string `json:"exportLevel,omitempty"` + Documentation string `json:"documentation,omitempty"` + Images []Image `json:"images,omitempty"` +} + +// GetName returns the image collection's name. +func (ic *ImageCollection) GetName() string { return ic.Name } + +// GetContainerID returns the container ID. +func (ic *ImageCollection) GetContainerID() model.ID { return ic.ContainerID } + +// Image represents a single image in an image collection. +type Image struct { + ID model.ID `json:"id"` + Name string `json:"name"` + Data []byte `json:"data,omitempty"` + Format string `json:"format,omitempty"` +} diff --git a/mdl/types/navigation.go b/mdl/types/navigation.go new file mode 100644 index 00000000..b3a14dac --- /dev/null +++ b/mdl/types/navigation.go @@ -0,0 +1,85 @@ +// SPDX-License-Identifier: Apache-2.0 + +package types + +import "github.com/mendixlabs/mxcli/model" + +// NavigationDocument represents a parsed navigation document. +type NavigationDocument struct { + model.BaseElement + ContainerID model.ID `json:"containerId"` + Name string `json:"name"` + Profiles []*NavigationProfile `json:"profiles,omitempty"` +} + +// GetName returns the navigation document's name. +func (nd *NavigationDocument) GetName() string { return nd.Name } + +// GetContainerID returns the container ID. +func (nd *NavigationDocument) GetContainerID() model.ID { return nd.ContainerID } + +// NavigationProfile represents a single navigation profile. +type NavigationProfile struct { + Name string `json:"name"` + Kind string `json:"kind"` + IsNative bool `json:"isNative"` + HomePage *NavHomePage `json:"homePage,omitempty"` + RoleBasedHomePages []*NavRoleBasedHome `json:"roleBasedHomePages,omitempty"` + LoginPage string `json:"loginPage,omitempty"` + NotFoundPage string `json:"notFoundPage,omitempty"` + MenuItems []*NavMenuItem `json:"menuItems,omitempty"` + OfflineEntities []*NavOfflineEntity `json:"offlineEntities,omitempty"` +} + +// NavHomePage holds a profile's default home page. +type NavHomePage struct { + Page string `json:"page,omitempty"` + Microflow string `json:"microflow,omitempty"` +} + +// NavRoleBasedHome maps a user role to a home page. +type NavRoleBasedHome struct { + UserRole string `json:"userRole"` + Page string `json:"page,omitempty"` + Microflow string `json:"microflow,omitempty"` +} + +// NavMenuItem is a recursive navigation menu entry. +type NavMenuItem struct { + Caption string `json:"caption"` + Page string `json:"page,omitempty"` + Microflow string `json:"microflow,omitempty"` + ActionType string `json:"actionType,omitempty"` + Items []*NavMenuItem `json:"items,omitempty"` +} + +// NavOfflineEntity declares offline sync rules for an entity. +type NavOfflineEntity struct { + Entity string `json:"entity"` + SyncMode string `json:"syncMode"` + Constraint string `json:"constraint,omitempty"` +} + +// NavigationProfileSpec specifies changes to a navigation profile. +type NavigationProfileSpec struct { + HomePages []NavHomePageSpec + LoginPage string + NotFoundPage string + MenuItems []NavMenuItemSpec + HasMenu bool +} + +// NavHomePageSpec specifies a home page assignment. +type NavHomePageSpec struct { + IsPage bool + Target string + ForRole string +} + +// NavMenuItemSpec specifies a menu item (recursive). +type NavMenuItemSpec struct { + Caption string + Page string + Microflow string + Items []NavMenuItemSpec +} diff --git a/sdk/mpr/asyncapi.go b/sdk/mpr/asyncapi.go index c0444b63..2bedd790 100644 --- a/sdk/mpr/asyncapi.go +++ b/sdk/mpr/asyncapi.go @@ -3,203 +3,16 @@ package mpr import ( - "fmt" - "strings" - - "gopkg.in/yaml.v3" + "github.com/mendixlabs/mxcli/mdl/types" ) -// AsyncAPIDocument represents a parsed AsyncAPI 2.x document. -type AsyncAPIDocument struct { - Version string // AsyncAPI version (e.g. "2.2.0") - Title string // Service title - DocVersion string // Document version - Description string - Channels []*AsyncAPIChannel // Resolved channels - Messages []*AsyncAPIMessage // Resolved messages (from components) -} - -// AsyncAPIChannel represents a channel in the AsyncAPI document. -type AsyncAPIChannel struct { - Name string // Channel ID/name - OperationType string // "subscribe" or "publish" - OperationID string // e.g. "receiveOrderChangedEventEvents" - MessageRef string // Resolved message name -} - -// AsyncAPIMessage represents a message type. -type AsyncAPIMessage struct { - Name string - Title string - Description string - ContentType string - Properties []*AsyncAPIProperty // Resolved from payload schema -} +// Type aliases — all AsyncAPI types now live in mdl/types. +type AsyncAPIDocument = types.AsyncAPIDocument +type AsyncAPIChannel = types.AsyncAPIChannel +type AsyncAPIMessage = types.AsyncAPIMessage +type AsyncAPIProperty = types.AsyncAPIProperty -// AsyncAPIProperty represents a property in a message payload schema. -type AsyncAPIProperty struct { - Name string - Type string // "string", "integer", "number", "boolean", "array", "object" - Format string // "int64", "int32", "date-time", "uri-reference", etc. -} - -// ParseAsyncAPI parses an AsyncAPI YAML string into an AsyncAPIDocument. +// ParseAsyncAPI delegates to types.ParseAsyncAPI. func ParseAsyncAPI(yamlStr string) (*AsyncAPIDocument, error) { - if yamlStr == "" { - return nil, fmt.Errorf("empty AsyncAPI document") - } - - var raw yamlAsyncAPI - if err := yaml.Unmarshal([]byte(yamlStr), &raw); err != nil { - return nil, fmt.Errorf("failed to parse AsyncAPI YAML: %w", err) - } - - doc := &AsyncAPIDocument{ - Version: raw.AsyncAPI, - Title: raw.Info.Title, - DocVersion: raw.Info.Version, - Description: raw.Info.Description, - } - - // Resolve messages from components - for name, msg := range raw.Components.Messages { - resolved := &AsyncAPIMessage{ - Name: name, - Title: msg.Title, - Description: msg.Description, - ContentType: msg.ContentType, - } - - // Resolve payload schema (follow $ref if present) - schemaName := "" - if msg.Payload.Ref != "" { - schemaName = refName(msg.Payload.Ref) - } - - if schemaName != "" { - if schema, ok := raw.Components.Schemas[schemaName]; ok { - resolved.Properties = resolveSchemaProperties(schema) - } - } else if msg.Payload.Properties != nil { - // Inline schema - resolved.Properties = resolveSchemaProperties(msg.Payload) - } - - doc.Messages = append(doc.Messages, resolved) - } - - // Resolve channels - for channelName, channel := range raw.Channels { - if channel.Subscribe != nil { - msgName := "" - if channel.Subscribe.Message.Ref != "" { - msgName = refName(channel.Subscribe.Message.Ref) - } - doc.Channels = append(doc.Channels, &AsyncAPIChannel{ - Name: channelName, - OperationType: "subscribe", - OperationID: channel.Subscribe.OperationID, - MessageRef: msgName, - }) - } - if channel.Publish != nil { - msgName := "" - if channel.Publish.Message.Ref != "" { - msgName = refName(channel.Publish.Message.Ref) - } - doc.Channels = append(doc.Channels, &AsyncAPIChannel{ - Name: channelName, - OperationType: "publish", - OperationID: channel.Publish.OperationID, - MessageRef: msgName, - }) - } - } - - return doc, nil -} - -// FindMessage looks up a message by name. -func (d *AsyncAPIDocument) FindMessage(name string) *AsyncAPIMessage { - for _, m := range d.Messages { - if strings.EqualFold(m.Name, name) { - return m - } - } - return nil -} - -// refName extracts the last segment from a $ref like "#/components/messages/OrderChangedEvent". -func refName(ref string) string { - if idx := strings.LastIndex(ref, "/"); idx >= 0 { - return ref[idx+1:] - } - return ref -} - -func resolveSchemaProperties(schema yamlSchema) []*AsyncAPIProperty { - var props []*AsyncAPIProperty - for name, prop := range schema.Properties { - props = append(props, &AsyncAPIProperty{ - Name: name, - Type: prop.Type, - Format: prop.Format, - }) - } - return props -} - -// ============================================================================ -// YAML deserialization types (internal) -// ============================================================================ - -type yamlAsyncAPI struct { - AsyncAPI string `yaml:"asyncapi"` - Info yamlInfo `yaml:"info"` - Channels map[string]yamlChannel `yaml:"channels"` - Components yamlComponents `yaml:"components"` -} - -type yamlInfo struct { - Title string `yaml:"title"` - Version string `yaml:"version"` - Description string `yaml:"description"` -} - -type yamlChannel struct { - Subscribe *yamlOperation `yaml:"subscribe"` - Publish *yamlOperation `yaml:"publish"` -} - -type yamlOperation struct { - OperationID string `yaml:"operationId"` - Message yamlRef `yaml:"message"` -} - -type yamlRef struct { - Ref string `yaml:"$ref"` -} - -type yamlComponents struct { - Messages map[string]yamlMessage `yaml:"messages"` - Schemas map[string]yamlSchema `yaml:"schemas"` -} - -type yamlMessage struct { - Name string `yaml:"name"` - Title string `yaml:"title"` - Description string `yaml:"description"` - ContentType string `yaml:"contentType"` - Payload yamlSchema `yaml:"payload"` -} - -type yamlSchema struct { - Ref string `yaml:"$ref"` - Type string `yaml:"type"` - Properties map[string]yamlSchemaProperty `yaml:"properties"` -} - -type yamlSchemaProperty struct { - Type string `yaml:"type"` - Format string `yaml:"format"` + return types.ParseAsyncAPI(yamlStr) } diff --git a/sdk/mpr/edmx.go b/sdk/mpr/edmx.go index 3538d26a..d407b08b 100644 --- a/sdk/mpr/edmx.go +++ b/sdk/mpr/edmx.go @@ -3,539 +3,27 @@ package mpr import ( - "encoding/xml" - "fmt" - "strings" + "github.com/mendixlabs/mxcli/mdl/types" ) -// EdmxDocument represents a parsed OData $metadata document (EDMX/CSDL). -// Supports both OData v3 (CSDL 2.0/3.0) and OData v4 (CSDL 4.0). -type EdmxDocument struct { - Version string // "1.0" (OData3) or "4.0" (OData4) - Schemas []*EdmSchema // Schema definitions - EntitySets []*EdmEntitySet // Entity sets from EntityContainer - Actions []*EdmAction // OData4 actions / OData3 function imports -} - -// EdmSchema represents an EDM schema namespace. -type EdmSchema struct { - Namespace string - EntityTypes []*EdmEntityType - EnumTypes []*EdmEnumType -} - -// EdmEntityType represents an entity type definition. -type EdmEntityType struct { - Name string - BaseType string // Qualified name of base type (e.g. "Microsoft...PlanItem"), empty if none - IsAbstract bool // True if - IsOpen bool // True if - KeyProperties []string - Properties []*EdmProperty - NavigationProperties []*EdmNavigationProperty - Summary string - Description string -} - -// EdmProperty represents a property on an entity type. -type EdmProperty struct { - Name string - Type string // e.g. "Edm.String", "Edm.Int64" - Nullable *bool // nil = not specified (default true) - MaxLength string // e.g. "200", "max" - Scale string // e.g. "variable" - - // Capability annotations (OData Core V1). When true, the property is not - // settable by the client: - // Computed = server-computed, not settable on create or update. - // Immutable = settable on create, but not on update. - Computed bool - Immutable bool -} - -// EdmNavigationProperty represents a navigation property (association). -type EdmNavigationProperty struct { - Name string - Type string // OData4: "DefaultNamespace.Customer" or "Collection(DefaultNamespace.Part)" - Partner string // OData4 partner property name - TargetType string // Resolved target entity type name (without namespace/Collection) - IsMany bool // true if Collection() - ContainsTarget bool // true if - // OData3 fields (from Association) - Relationship string - FromRole string - ToRole string -} - -// EdmEntitySet represents an entity set in the entity container. -type EdmEntitySet struct { - Name string - EntityType string // Qualified name of entity type - - // Capabilities derived from Org.OData.Capabilities.V1 annotations. - // nil = not specified (treat as default true). - Insertable *bool // InsertRestrictions/Insertable - Updatable *bool // UpdateRestrictions/Updatable - Deletable *bool // DeleteRestrictions/Deletable - - // Navigation property names listed under - // Org.OData.Capabilities.V1.{Insert,Update}Restrictions/Non*NavigationProperties. - NonInsertableNavigationProperties []string - NonUpdatableNavigationProperties []string - - // Property names listed under - // Org.OData.Capabilities.V1.{Insert,Update}Restrictions/Non*Properties. - // Structural properties named here cannot be set on insert / update. - NonInsertableProperties []string - NonUpdatableProperties []string -} - -// EdmAction represents an OData4 action or OData3 function import. -type EdmAction struct { - Name string - IsBound bool - Parameters []*EdmActionParameter - ReturnType string -} - -// EdmActionParameter represents a parameter of an action. -type EdmActionParameter struct { - Name string - Type string - Nullable *bool -} - -// EdmEnumType represents an enumeration type. -type EdmEnumType struct { - Name string - Members []*EdmEnumMember -} - -// EdmEnumMember represents a member of an enum type. -type EdmEnumMember struct { - Name string - Value string -} - -// ParseEdmx parses an OData $metadata XML string into an EdmxDocument. +// Type aliases — all EDMX types now live in mdl/types. +type EdmxDocument = types.EdmxDocument +type EdmSchema = types.EdmSchema +type EdmEntityType = types.EdmEntityType +type EdmProperty = types.EdmProperty +type EdmNavigationProperty = types.EdmNavigationProperty +type EdmEntitySet = types.EdmEntitySet +type EdmAction = types.EdmAction +type EdmActionParameter = types.EdmActionParameter +type EdmEnumType = types.EdmEnumType +type EdmEnumMember = types.EdmEnumMember + +// ParseEdmx delegates to types.ParseEdmx. func ParseEdmx(metadataXML string) (*EdmxDocument, error) { - if metadataXML == "" { - return nil, fmt.Errorf("empty metadata XML") - } - - var edmx xmlEdmx - if err := xml.Unmarshal([]byte(metadataXML), &edmx); err != nil { - return nil, fmt.Errorf("failed to parse EDMX XML: %w", err) - } - - doc := &EdmxDocument{ - Version: edmx.Version, - } - - for _, ds := range edmx.DataServices { - for _, s := range ds.Schemas { - schema := &EdmSchema{ - Namespace: s.Namespace, - } - - // Parse entity types - for _, et := range s.EntityTypes { - entityType := parseXmlEntityType(&et) - schema.EntityTypes = append(schema.EntityTypes, entityType) - } - - // Parse enum types - for _, en := range s.EnumTypes { - enumType := &EdmEnumType{Name: en.Name} - for _, m := range en.Members { - enumType.Members = append(enumType.Members, &EdmEnumMember{ - Name: m.Name, - Value: m.Value, - }) - } - schema.EnumTypes = append(schema.EnumTypes, enumType) - } - - doc.Schemas = append(doc.Schemas, schema) - - // Parse entity container - for _, ec := range s.EntityContainers { - for _, es := range ec.EntitySets { - entitySet := &EdmEntitySet{ - Name: es.Name, - EntityType: es.EntityType, - } - applyCapabilityAnnotations(entitySet, es.Annotations) - doc.EntitySets = append(doc.EntitySets, entitySet) - } - - // OData3 function imports - for _, fi := range ec.FunctionImports { - action := &EdmAction{ - Name: fi.Name, - ReturnType: fi.ReturnType, - } - for _, p := range fi.Parameters { - action.Parameters = append(action.Parameters, &EdmActionParameter{ - Name: p.Name, - Type: p.Type, - }) - } - doc.Actions = append(doc.Actions, action) - } - } - - // OData4 actions - for _, a := range s.Actions { - action := &EdmAction{ - Name: a.Name, - IsBound: a.IsBound == "true", - } - if a.ReturnType != nil { - action.ReturnType = a.ReturnType.Type - } - for _, p := range a.Parameters { - param := &EdmActionParameter{ - Name: p.Name, - Type: p.Type, - } - if p.Nullable != "" { - v := p.Nullable == "true" - param.Nullable = &v - } - action.Parameters = append(action.Parameters, param) - } - doc.Actions = append(doc.Actions, action) - } - - // OData4 functions (treated same as actions for discovery) - for _, f := range s.Functions { - action := &EdmAction{ - Name: f.Name, - IsBound: f.IsBound == "true", - } - if f.ReturnType != nil { - action.ReturnType = f.ReturnType.Type - } - for _, p := range f.Parameters { - param := &EdmActionParameter{ - Name: p.Name, - Type: p.Type, - } - action.Parameters = append(action.Parameters, param) - } - doc.Actions = append(doc.Actions, action) - } - } - } - - return doc, nil -} - -// FindEntityType looks up an entity type by name (with or without namespace prefix). -func (d *EdmxDocument) FindEntityType(name string) *EdmEntityType { - // Strip namespace prefix if present - shortName := name - if idx := strings.LastIndex(name, "."); idx >= 0 { - shortName = name[idx+1:] - } - for _, s := range d.Schemas { - for _, et := range s.EntityTypes { - if et.Name == shortName { - return et - } - } - } - return nil -} - -func parseXmlEntityType(et *xmlEntityType) *EdmEntityType { - entityType := &EdmEntityType{ - Name: et.Name, - BaseType: et.BaseType, - IsAbstract: et.Abstract == "true", - IsOpen: et.OpenType == "true", - } - - // Parse key - if et.Key != nil { - for _, pr := range et.Key.PropertyRefs { - entityType.KeyProperties = append(entityType.KeyProperties, pr.Name) - } - } - - // Parse documentation (OData3 style) - if et.Documentation != nil { - entityType.Summary = et.Documentation.Summary - entityType.Description = et.Documentation.LongDescription - } - - // Parse annotations (OData4 style) - for _, ann := range et.Annotations { - switch ann.Term { - case "Org.OData.Core.V1.Description": - entityType.Summary = ann.String - case "Org.OData.Core.V1.LongDescription": - entityType.Description = ann.String - } - } - - // Parse properties - for _, p := range et.Properties { - prop := &EdmProperty{ - Name: p.Name, - Type: p.Type, - MaxLength: p.MaxLength, - Scale: p.Scale, - } - if p.Nullable != "" { - v := p.Nullable != "false" - prop.Nullable = &v - } - for _, ann := range p.Annotations { - switch ann.Term { - case "Org.OData.Core.V1.Computed": - prop.Computed = ann.Bool == "" || ann.Bool == "true" - case "Org.OData.Core.V1.Immutable": - prop.Immutable = ann.Bool == "" || ann.Bool == "true" - } - } - entityType.Properties = append(entityType.Properties, prop) - } - - // Parse navigation properties - for _, np := range et.NavigationProperties { - nav := &EdmNavigationProperty{ - Name: np.Name, - Type: np.Type, - Partner: np.Partner, - ContainsTarget: np.ContainsTarget == "true", - Relationship: np.Relationship, - FromRole: np.FromRole, - ToRole: np.ToRole, - } - - // Resolve target type from OData4 Type field - if np.Type != "" { - nav.TargetType, nav.IsMany = resolveNavType(np.Type) - } - - entityType.NavigationProperties = append(entityType.NavigationProperties, nav) - } - - return entityType -} - -// applyCapabilityAnnotations reads Org.OData.Capabilities.V1.{Insert,Update, -// Delete}Restrictions annotations on an entity set and stores the relevant -// flags on the EdmEntitySet. -func applyCapabilityAnnotations(es *EdmEntitySet, annotations []xmlCapabilitiesAnnotation) { - for _, ann := range annotations { - if ann.Record == nil { - continue - } - switch ann.Term { - case "Org.OData.Capabilities.V1.InsertRestrictions": - for _, pv := range ann.Record.PropertyValues { - switch pv.Property { - case "Insertable": - if pv.Bool != "" { - v := pv.Bool == "true" - es.Insertable = &v - } - case "NonInsertableNavigationProperties": - if pv.Collection != nil { - es.NonInsertableNavigationProperties = pv.Collection.NavigationPropertyPaths - } - case "NonInsertableProperties": - if pv.Collection != nil { - es.NonInsertableProperties = pv.Collection.PropertyPaths - } - } - } - case "Org.OData.Capabilities.V1.UpdateRestrictions": - for _, pv := range ann.Record.PropertyValues { - switch pv.Property { - case "Updatable": - if pv.Bool != "" { - v := pv.Bool == "true" - es.Updatable = &v - } - case "NonUpdatableNavigationProperties": - if pv.Collection != nil { - es.NonUpdatableNavigationProperties = pv.Collection.NavigationPropertyPaths - } - case "NonUpdatableProperties": - if pv.Collection != nil { - es.NonUpdatableProperties = pv.Collection.PropertyPaths - } - } - } - case "Org.OData.Capabilities.V1.DeleteRestrictions": - for _, pv := range ann.Record.PropertyValues { - if pv.Property == "Deletable" && pv.Bool != "" { - v := pv.Bool == "true" - es.Deletable = &v - } - } - } - } -} - -// resolveNavType parses "Collection(Namespace.Type)" or "Namespace.Type" into the short type name. -func resolveNavType(t string) (typeName string, isMany bool) { - if strings.HasPrefix(t, "Collection(") && strings.HasSuffix(t, ")") { - isMany = true - t = t[len("Collection(") : len(t)-1] - } - if idx := strings.LastIndex(t, "."); idx >= 0 { - typeName = t[idx+1:] - } else { - typeName = t - } - return -} - -// ============================================================================ -// XML deserialization types (internal) -// ============================================================================ - -type xmlEdmx struct { - XMLName xml.Name `xml:"Edmx"` - Version string `xml:"Version,attr"` - DataServices []xmlDataServices `xml:"DataServices"` -} - -type xmlDataServices struct { - Schemas []xmlSchema `xml:"Schema"` -} - -type xmlSchema struct { - Namespace string `xml:"Namespace,attr"` - EntityTypes []xmlEntityType `xml:"EntityType"` - EnumTypes []xmlEnumType `xml:"EnumType"` - EntityContainers []xmlEntityContainer `xml:"EntityContainer"` - Actions []xmlAction `xml:"Action"` - Functions []xmlAction `xml:"Function"` -} - -type xmlEntityType struct { - Name string `xml:"Name,attr"` - BaseType string `xml:"BaseType,attr"` - Abstract string `xml:"Abstract,attr"` - OpenType string `xml:"OpenType,attr"` - Key *xmlKey `xml:"Key"` - Properties []xmlProperty `xml:"Property"` - NavigationProperties []xmlNavigationProperty `xml:"NavigationProperty"` - Documentation *xmlDocumentation `xml:"Documentation"` - Annotations []xmlAnnotation `xml:"Annotation"` -} - -type xmlKey struct { - PropertyRefs []xmlPropertyRef `xml:"PropertyRef"` -} - -type xmlPropertyRef struct { - Name string `xml:"Name,attr"` -} - -type xmlProperty struct { - Name string `xml:"Name,attr"` - Type string `xml:"Type,attr"` - Nullable string `xml:"Nullable,attr"` - MaxLength string `xml:"MaxLength,attr"` - Scale string `xml:"Scale,attr"` - Annotations []xmlAnnotation `xml:"Annotation"` -} - -type xmlNavigationProperty struct { - Name string `xml:"Name,attr"` - Type string `xml:"Type,attr"` // OData4 - Partner string `xml:"Partner,attr"` // OData4 - ContainsTarget string `xml:"ContainsTarget,attr"` // OData4: contained nav target (e.g. Person.Trips) - Relationship string `xml:"Relationship,attr"` // OData3 - FromRole string `xml:"FromRole,attr"` // OData3 - ToRole string `xml:"ToRole,attr"` // OData3 -} - -type xmlDocumentation struct { - Summary string `xml:"Summary"` - LongDescription string `xml:"LongDescription"` -} - -type xmlAnnotation struct { - Term string `xml:"Term,attr"` - String string `xml:"String,attr"` - Bool string `xml:"Bool,attr"` -} - -type xmlEntityContainer struct { - Name string `xml:"Name,attr"` - EntitySets []xmlEntitySet `xml:"EntitySet"` - FunctionImports []xmlFunctionImport `xml:"FunctionImport"` -} - -type xmlEntitySet struct { - Name string `xml:"Name,attr"` - EntityType string `xml:"EntityType,attr"` - Annotations []xmlCapabilitiesAnnotation `xml:"Annotation"` -} - -// xmlCapabilitiesAnnotation captures the bits of OData V1 Capabilities -// annotations we care about. The wrapping contains -// and (sometimes) -// -// Trips. -type xmlCapabilitiesAnnotation struct { - Term string `xml:"Term,attr"` - Record *xmlCapabilitiesRecord `xml:"Record"` -} - -type xmlCapabilitiesRecord struct { - PropertyValues []xmlCapabilitiesPropertyValue `xml:"PropertyValue"` -} - -type xmlCapabilitiesPropertyValue struct { - Property string `xml:"Property,attr"` - Bool string `xml:"Bool,attr"` - Collection *xmlCapabilitiesCollection `xml:"Collection"` -} - -type xmlCapabilitiesCollection struct { - NavigationPropertyPaths []string `xml:"NavigationPropertyPath"` - PropertyPaths []string `xml:"PropertyPath"` -} - -type xmlFunctionImport struct { - Name string `xml:"Name,attr"` - ReturnType string `xml:"ReturnType,attr"` - Parameters []xmlActionParam `xml:"Parameter"` -} - -type xmlAction struct { - Name string `xml:"Name,attr"` - IsBound string `xml:"IsBound,attr"` - ReturnType *xmlReturnType `xml:"ReturnType"` - Parameters []xmlActionParam `xml:"Parameter"` -} - -type xmlReturnType struct { - Type string `xml:"Type,attr"` - Nullable string `xml:"Nullable,attr"` -} - -type xmlActionParam struct { - Name string `xml:"Name,attr"` - Type string `xml:"Type,attr"` - Nullable string `xml:"Nullable,attr"` -} - -type xmlEnumType struct { - Name string `xml:"Name,attr"` - Members []xmlEnumMember `xml:"Member"` + return types.ParseEdmx(metadataXML) } -type xmlEnumMember struct { - Name string `xml:"Name,attr"` - Value string `xml:"Value,attr"` +// resolveNavType delegates to types.ResolveNavType (kept for test compatibility). +func resolveNavType(t string) (string, bool) { + return types.ResolveNavType(t) } diff --git a/sdk/mpr/parser_misc.go b/sdk/mpr/parser_misc.go index 6e4cdb30..662aa404 100644 --- a/sdk/mpr/parser_misc.go +++ b/sdk/mpr/parser_misc.go @@ -9,6 +9,7 @@ import ( "path/filepath" "strings" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/javaactions" "github.com/mendixlabs/mxcli/sdk/pages" @@ -80,7 +81,7 @@ func (r *Reader) parseSnippet(unitID, containerID string, contents []byte) (*pag } // parseJavaAction parses Java action contents from BSON. -func (r *Reader) parseJavaAction(unitID, containerID string, contents []byte) (*JavaAction, error) { +func (r *Reader) parseJavaAction(unitID, containerID string, contents []byte) (*types.JavaAction, error) { contents, err := r.resolveContents(unitID, contents) if err != nil { return nil, err @@ -91,7 +92,7 @@ func (r *Reader) parseJavaAction(unitID, containerID string, contents []byte) (* return nil, fmt.Errorf("failed to unmarshal BSON: %w", err) } - ja := &JavaAction{} + ja := &types.JavaAction{} ja.ID = model.ID(unitID) ja.TypeName = "JavaActions$JavaAction" ja.ContainerID = model.ID(containerID) @@ -137,7 +138,7 @@ func WriteJSON(element any) ([]byte, error) { } // parseJavaScriptAction parses JavaScript action contents from BSON. -func (r *Reader) parseJavaScriptAction(unitID, containerID string, contents []byte) (*JavaScriptAction, error) { +func (r *Reader) parseJavaScriptAction(unitID, containerID string, contents []byte) (*types.JavaScriptAction, error) { contents, err := r.resolveContents(unitID, contents) if err != nil { return nil, err @@ -148,7 +149,7 @@ func (r *Reader) parseJavaScriptAction(unitID, containerID string, contents []by return nil, fmt.Errorf("failed to unmarshal BSON: %w", err) } - jsa := &JavaScriptAction{} + jsa := &types.JavaScriptAction{} jsa.ID = model.ID(unitID) jsa.TypeName = "JavaScriptActions$JavaScriptAction" jsa.ContainerID = model.ID(containerID) @@ -249,7 +250,7 @@ func (r *Reader) parseJavaScriptAction(unitID, containerID string, contents []by } // ReadJavaScriptActionByName reads a JavaScript action by qualified name (Module.ActionName). -func (r *Reader) ReadJavaScriptActionByName(qualifiedName string) (*JavaScriptAction, error) { +func (r *Reader) ReadJavaScriptActionByName(qualifiedName string) (*types.JavaScriptAction, error) { units, err := r.listUnitsByType("JavaScriptActions$JavaScriptAction") if err != nil { return nil, err @@ -363,7 +364,7 @@ func (r *Reader) parsePageTemplate(unitID, containerID string, contents []byte) } // parseNavigationDocument parses navigation document contents from BSON. -func (r *Reader) parseNavigationDocument(unitID, containerID string, contents []byte) (*NavigationDocument, error) { +func (r *Reader) parseNavigationDocument(unitID, containerID string, contents []byte) (*types.NavigationDocument, error) { contents, err := r.resolveContents(unitID, contents) if err != nil { return nil, err @@ -374,7 +375,7 @@ func (r *Reader) parseNavigationDocument(unitID, containerID string, contents [] return nil, fmt.Errorf("failed to unmarshal BSON: %w", err) } - nav := &NavigationDocument{} + nav := &types.NavigationDocument{} nav.ID = model.ID(unitID) nav.TypeName = "Navigation$NavigationDocument" nav.ContainerID = model.ID(containerID) @@ -399,9 +400,9 @@ func (r *Reader) parseNavigationDocument(unitID, containerID string, contents [] } // parseNavigationProfile parses a single navigation profile from BSON. -func parseNavigationProfile(raw map[string]any) *NavigationProfile { +func parseNavigationProfile(raw map[string]any) *types.NavigationProfile { typeName := extractString(raw["$Type"]) - profile := &NavigationProfile{ + profile := &types.NavigationProfile{ Name: extractString(raw["Name"]), Kind: extractString(raw["Kind"]), } @@ -413,13 +414,13 @@ func parseNavigationProfile(raw map[string]any) *NavigationProfile { page := extractString(hp["HomePagePage"]) nanoflow := extractString(hp["HomePageNanoflow"]) if page != "" || nanoflow != "" { - profile.HomePage = &NavHomePage{Page: page, Microflow: nanoflow} + profile.HomePage = &types.NavHomePage{Page: page, Microflow: nanoflow} } } // Native role-based home pages for _, item := range extractBsonArray(raw["RoleBasedNativeHomePages"]) { if rbMap, ok := item.(map[string]any); ok { - rbh := &NavRoleBasedHome{ + rbh := &types.NavRoleBasedHome{ UserRole: extractString(rbMap["UserRole"]), Page: extractString(rbMap["HomePagePage"]), Microflow: extractString(rbMap["HomePageNanoflow"]), @@ -445,13 +446,13 @@ func parseNavigationProfile(raw map[string]any) *NavigationProfile { page := extractString(hp["Page"]) mf := extractString(hp["Microflow"]) if page != "" || mf != "" { - profile.HomePage = &NavHomePage{Page: page, Microflow: mf} + profile.HomePage = &types.NavHomePage{Page: page, Microflow: mf} } } // Role-based home pages (stored as "HomeItems") for _, item := range extractBsonArray(raw["HomeItems"]) { if rbMap, ok := item.(map[string]any); ok { - rbh := &NavRoleBasedHome{ + rbh := &types.NavRoleBasedHome{ UserRole: extractString(rbMap["UserRole"]), Page: extractString(rbMap["Page"]), Microflow: extractString(rbMap["Microflow"]), @@ -488,7 +489,7 @@ func parseNavigationProfile(raw map[string]any) *NavigationProfile { // Offline entity configs (both web and native) for _, item := range extractBsonArray(raw["OfflineEntityConfigs"]) { if oeMap, ok := item.(map[string]any); ok { - oe := &NavOfflineEntity{ + oe := &types.NavOfflineEntity{ Entity: extractString(oeMap["Entity"]), SyncMode: extractString(oeMap["SyncMode"]), Constraint: extractString(oeMap["Constraint"]), @@ -503,8 +504,8 @@ func parseNavigationProfile(raw map[string]any) *NavigationProfile { } // parseNavMenuItem parses a Menus$MenuItem from BSON. -func parseNavMenuItem(raw map[string]any) *NavMenuItem { - mi := &NavMenuItem{} +func parseNavMenuItem(raw map[string]any) *types.NavMenuItem { + mi := &types.NavMenuItem{} // Extract caption text (Caption → Items → first Translation → Text) if caption, ok := raw["Caption"].(map[string]any); ok { @@ -552,8 +553,8 @@ func parseNavMenuItem(raw map[string]any) *NavMenuItem { } // parseNavMenuItemFromBottomBar parses a NativePages$BottomBarItem as a NavMenuItem. -func parseNavMenuItemFromBottomBar(raw map[string]any) *NavMenuItem { - mi := &NavMenuItem{} +func parseNavMenuItemFromBottomBar(raw map[string]any) *types.NavMenuItem { + mi := &types.NavMenuItem{} if caption, ok := raw["Caption"].(map[string]any); ok { mi.Caption = extractTextFromBson(caption) } @@ -589,7 +590,7 @@ func extractTextFromBson(raw map[string]any) string { } // parseImageCollection parses image collection contents from BSON. -func (r *Reader) parseImageCollection(unitID, containerID string, contents []byte) (*ImageCollection, error) { +func (r *Reader) parseImageCollection(unitID, containerID string, contents []byte) (*types.ImageCollection, error) { contents, err := r.resolveContents(unitID, contents) if err != nil { return nil, err @@ -600,7 +601,7 @@ func (r *Reader) parseImageCollection(unitID, containerID string, contents []byt return nil, fmt.Errorf("failed to unmarshal BSON: %w", err) } - ic := &ImageCollection{} + ic := &types.ImageCollection{} ic.ID = model.ID(unitID) ic.TypeName = "Images$ImageCollection" ic.ContainerID = model.ID(containerID) @@ -619,7 +620,7 @@ func (r *Reader) parseImageCollection(unitID, containerID string, contents []byt if images, ok := raw["Images"].(bson.A); ok { for _, img := range images { if imgMap, ok := img.(map[string]any); ok { - image := Image{} + image := types.Image{} if id := extractID(imgMap["$ID"]); id != "" { image.ID = model.ID(id) } @@ -643,7 +644,7 @@ func (r *Reader) parseImageCollection(unitID, containerID string, contents []byt } // parseJsonStructure parses JSON structure contents from BSON. -func (r *Reader) parseJsonStructure(unitID, containerID string, contents []byte) (*JsonStructure, error) { +func (r *Reader) parseJsonStructure(unitID, containerID string, contents []byte) (*types.JsonStructure, error) { contents, err := r.resolveContents(unitID, contents) if err != nil { return nil, err @@ -654,7 +655,7 @@ func (r *Reader) parseJsonStructure(unitID, containerID string, contents []byte) return nil, fmt.Errorf("failed to unmarshal BSON: %w", err) } - js := &JsonStructure{} + js := &types.JsonStructure{} js.ID = model.ID(unitID) js.TypeName = "JsonStructures$JsonStructure" js.ContainerID = model.ID(containerID) @@ -688,8 +689,8 @@ func (r *Reader) parseJsonStructure(unitID, containerID string, contents []byte) } // parseJsonElement recursively parses a JsonStructures$JsonElement from BSON. -func parseJsonElement(raw map[string]any) *JsonElement { - elem := &JsonElement{ +func parseJsonElement(raw map[string]any) *types.JsonElement { + elem := &types.JsonElement{ MaxLength: -1, FractionDigits: -1, TotalDigits: -1, diff --git a/sdk/mpr/reader.go b/sdk/mpr/reader.go index b9e7f9dd..50db5fae 100644 --- a/sdk/mpr/reader.go +++ b/sdk/mpr/reader.go @@ -11,6 +11,7 @@ import ( "os" "path/filepath" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/sdk/mpr/version" _ "modernc.org/sqlite" @@ -241,19 +242,9 @@ func (r *Reader) GetMendixVersion() (string, error) { return version, nil } -// blobToUUID converts a 16-byte blob to a UUID string using Microsoft GUID format. -// The first 3 groups are little-endian (byte-swapped), last 2 groups are big-endian. -// This is the standard format used by Mendix for all UUID representations. +// blobToUUID delegates to types.BlobToUUID. func blobToUUID(blob []byte) string { - if len(blob) != 16 { - return hex.EncodeToString(blob) - } - return fmt.Sprintf("%02x%02x%02x%02x-%02x%02x-%02x%02x-%02x%02x-%02x%02x%02x%02x%02x%02x", - blob[3], blob[2], blob[1], blob[0], - blob[5], blob[4], - blob[7], blob[6], - blob[8], blob[9], - blob[10], blob[11], blob[12], blob[13], blob[14], blob[15]) + return types.BlobToUUID(blob) } // blobToUUIDSwapped converts a 16-byte blob to a UUID string using Microsoft GUID format. diff --git a/sdk/mpr/reader_types.go b/sdk/mpr/reader_types.go index 485fc67c..6b9099a9 100644 --- a/sdk/mpr/reader_types.go +++ b/sdk/mpr/reader_types.go @@ -7,39 +7,21 @@ import ( "encoding/json" "fmt" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/javaactions" "github.com/mendixlabs/mxcli/sdk/pages" "go.mongodb.org/mongo-driver/bson" ) -// JavaAction represents a Java action. -type JavaAction struct { - model.BaseElement - ContainerID model.ID `json:"containerId"` - Name string `json:"name"` - Documentation string `json:"documentation,omitempty"` -} - -// GetName returns the Java action's name. -func (ja *JavaAction) GetName() string { - return ja.Name -} - -// GetContainerID returns the container ID. -func (ja *JavaAction) GetContainerID() model.ID { - return ja.ContainerID -} - // ListJavaActions returns all Java actions in the project. -func (r *Reader) ListJavaActions() ([]*JavaAction, error) { +func (r *Reader) ListJavaActions() ([]*types.JavaAction, error) { units, err := r.listUnitsByType("JavaActions$JavaAction") if err != nil { return nil, err } - var result []*JavaAction + var result []*types.JavaAction for _, u := range units { ja, err := r.parseJavaAction(u.ID, u.ContainerID, u.Contents) if err != nil { @@ -51,50 +33,14 @@ func (r *Reader) ListJavaActions() ([]*JavaAction, error) { return result, nil } -// JavaScriptAction represents a JavaScript action. -type JavaScriptAction struct { - model.BaseElement - ContainerID model.ID `json:"containerId"` - Name string `json:"name"` - Documentation string `json:"documentation,omitempty"` - Platform string `json:"platform,omitempty"` - Excluded bool `json:"excluded"` - ExportLevel string `json:"exportLevel,omitempty"` - ActionDefaultReturnName string `json:"actionDefaultReturnName,omitempty"` - ReturnType javaactions.CodeActionReturnType `json:"returnType,omitempty"` - Parameters []*javaactions.JavaActionParameter `json:"parameters,omitempty"` - TypeParameters []*javaactions.TypeParameterDef `json:"typeParameters,omitempty"` - MicroflowActionInfo *javaactions.MicroflowActionInfo `json:"microflowActionInfo,omitempty"` -} - -// GetName returns the JavaScript action's name. -func (jsa *JavaScriptAction) GetName() string { - return jsa.Name -} - -// GetContainerID returns the container ID. -func (jsa *JavaScriptAction) GetContainerID() model.ID { - return jsa.ContainerID -} - -// FindTypeParameterName looks up a type parameter name by its ID. -func (jsa *JavaScriptAction) FindTypeParameterName(id model.ID) string { - for _, tp := range jsa.TypeParameters { - if tp.ID == id { - return tp.Name - } - } - return "" -} - // ListJavaScriptActions returns all JavaScript actions in the project. -func (r *Reader) ListJavaScriptActions() ([]*JavaScriptAction, error) { +func (r *Reader) ListJavaScriptActions() ([]*types.JavaScriptAction, error) { units, err := r.listUnitsByType("JavaScriptActions$JavaScriptAction") if err != nil { return nil, err } - var result []*JavaScriptAction + var result []*types.JavaScriptAction for _, u := range units { jsa, err := r.parseJavaScriptAction(u.ID, u.ContainerID, u.Contents) if err != nil { @@ -144,74 +90,14 @@ func (r *Reader) ListPageTemplates() ([]*pages.PageTemplate, error) { return result, nil } -// NavigationDocument represents a navigation document. -type NavigationDocument struct { - model.BaseElement - ContainerID model.ID `json:"containerId"` - Name string `json:"name"` - Profiles []*NavigationProfile `json:"profiles,omitempty"` -} - -// GetName returns the navigation document's name. -func (nd *NavigationDocument) GetName() string { - return nd.Name -} - -// GetContainerID returns the container ID. -func (nd *NavigationDocument) GetContainerID() model.ID { - return nd.ContainerID -} - -// NavigationProfile represents a navigation profile (web or native). -type NavigationProfile struct { - Name string `json:"name"` - Kind string `json:"kind"` // Responsive, Phone, Tablet, etc. - IsNative bool `json:"isNative"` - HomePage *NavHomePage `json:"homePage,omitempty"` - RoleBasedHomePages []*NavRoleBasedHome `json:"roleBasedHomePages,omitempty"` - LoginPage string `json:"loginPage,omitempty"` // qualified page name - NotFoundPage string `json:"notFoundPage,omitempty"` // qualified page name - MenuItems []*NavMenuItem `json:"menuItems,omitempty"` - OfflineEntities []*NavOfflineEntity `json:"offlineEntities,omitempty"` -} - -// NavHomePage represents a default home page (page or microflow). -type NavHomePage struct { - Page string `json:"page,omitempty"` // qualified page name - Microflow string `json:"microflow,omitempty"` // qualified microflow name -} - -// NavRoleBasedHome represents a role-specific home page override. -type NavRoleBasedHome struct { - UserRole string `json:"userRole"` // qualified user role name - Page string `json:"page,omitempty"` // qualified page name - Microflow string `json:"microflow,omitempty"` // qualified microflow name -} - -// NavMenuItem represents a menu item (recursive for sub-menus). -type NavMenuItem struct { - Caption string `json:"caption"` - Page string `json:"page,omitempty"` // target page qualified name - Microflow string `json:"microflow,omitempty"` // target microflow qualified name - ActionType string `json:"actionType,omitempty"` // PageAction, MicroflowAction, NoAction, OpenLinkAction - Items []*NavMenuItem `json:"items,omitempty"` -} - -// NavOfflineEntity represents an offline entity sync configuration. -type NavOfflineEntity struct { - Entity string `json:"entity"` // qualified entity name - SyncMode string `json:"syncMode"` // All, Constrained, Never, etc. - Constraint string `json:"constraint,omitempty"` // XPath -} - // ListNavigationDocuments returns all navigation documents in the project. -func (r *Reader) ListNavigationDocuments() ([]*NavigationDocument, error) { +func (r *Reader) ListNavigationDocuments() ([]*types.NavigationDocument, error) { units, err := r.listUnitsByType("Navigation$NavigationDocument") if err != nil { return nil, err } - var result []*NavigationDocument + var result []*types.NavigationDocument for _, u := range units { nav, err := r.parseNavigationDocument(u.ID, u.ContainerID, u.Contents) if err != nil { @@ -224,7 +110,7 @@ func (r *Reader) ListNavigationDocuments() ([]*NavigationDocument, error) { } // GetNavigation returns the project's navigation document (singleton). -func (r *Reader) GetNavigation() (*NavigationDocument, error) { +func (r *Reader) GetNavigation() (*types.NavigationDocument, error) { docs, err := r.ListNavigationDocuments() if err != nil { return nil, err @@ -235,42 +121,14 @@ func (r *Reader) GetNavigation() (*NavigationDocument, error) { return docs[0], nil } -// ImageCollection represents an image collection. -type ImageCollection struct { - model.BaseElement - ContainerID model.ID `json:"containerId"` - Name string `json:"name"` - ExportLevel string `json:"exportLevel,omitempty"` - Documentation string `json:"documentation,omitempty"` - Images []Image `json:"images,omitempty"` -} - -// Image represents an image in a collection. -type Image struct { - ID model.ID `json:"id"` - Name string `json:"name"` - Data []byte `json:"data,omitempty"` // raw image bytes - Format string `json:"format,omitempty"` // "Png", "Svg", "Gif", "Jpeg", "Bmp" -} - -// GetName returns the image collection's name. -func (ic *ImageCollection) GetName() string { - return ic.Name -} - -// GetContainerID returns the container ID. -func (ic *ImageCollection) GetContainerID() model.ID { - return ic.ContainerID -} - // ListImageCollections returns all image collections in the project. -func (r *Reader) ListImageCollections() ([]*ImageCollection, error) { +func (r *Reader) ListImageCollections() ([]*types.ImageCollection, error) { units, err := r.listUnitsByType("Images$ImageCollection") if err != nil { return nil, err } - var result []*ImageCollection + var result []*types.ImageCollection for _, u := range units { ic, err := r.parseImageCollection(u.ID, u.ContainerID, u.Contents) if err != nil { @@ -282,54 +140,14 @@ func (r *Reader) ListImageCollections() ([]*ImageCollection, error) { return result, nil } -// JsonStructure represents a JSON structure document. -type JsonStructure struct { - model.BaseElement - ContainerID model.ID `json:"containerId"` - Name string `json:"name"` - Documentation string `json:"documentation,omitempty"` - JsonSnippet string `json:"jsonSnippet,omitempty"` - Elements []*JsonElement `json:"elements,omitempty"` - Excluded bool `json:"excluded,omitempty"` - ExportLevel string `json:"exportLevel,omitempty"` -} - -// JsonElement represents an element in a JSON structure's element tree. -type JsonElement struct { - ExposedName string `json:"exposedName"` - ExposedItemName string `json:"exposedItemName,omitempty"` - Path string `json:"path"` - ElementType string `json:"elementType"` // "Object", "Array", "Value", "Choice" - PrimitiveType string `json:"primitiveType"` // "String", "Integer", "Boolean", "Decimal", "Unknown" - MinOccurs int `json:"minOccurs"` - MaxOccurs int `json:"maxOccurs"` // -1 = unbounded - Nillable bool `json:"nillable,omitempty"` - IsDefaultType bool `json:"isDefaultType,omitempty"` - MaxLength int `json:"maxLength"` // -1 = unset - FractionDigits int `json:"fractionDigits"` // -1 = unset - TotalDigits int `json:"totalDigits"` // -1 = unset - OriginalValue string `json:"originalValue,omitempty"` - Children []*JsonElement `json:"children,omitempty"` -} - -// GetName returns the JSON structure's name. -func (js *JsonStructure) GetName() string { - return js.Name -} - -// GetContainerID returns the container ID. -func (js *JsonStructure) GetContainerID() model.ID { - return js.ContainerID -} - // ListJsonStructures returns all JSON structures in the project. -func (r *Reader) ListJsonStructures() ([]*JsonStructure, error) { +func (r *Reader) ListJsonStructures() ([]*types.JsonStructure, error) { units, err := r.listUnitsByType("JsonStructures$JsonStructure") if err != nil { return nil, err } - var result []*JsonStructure + var result []*types.JsonStructure for _, u := range units { js, err := r.parseJsonStructure(u.ID, u.ContainerID, u.Contents) if err != nil { @@ -342,7 +160,7 @@ func (r *Reader) ListJsonStructures() ([]*JsonStructure, error) { } // GetJsonStructureByQualifiedName retrieves a JSON structure by its qualified name (Module.Name). -func (r *Reader) GetJsonStructureByQualifiedName(moduleName, name string) (*JsonStructure, error) { +func (r *Reader) GetJsonStructureByQualifiedName(moduleName, name string) (*types.JsonStructure, error) { all, err := r.ListJsonStructures() if err != nil { return nil, err @@ -369,38 +187,22 @@ func (r *Reader) GetJsonStructureByQualifiedName(moduleName, name string) (*Json return nil, fmt.Errorf("JSON structure %s.%s not found", moduleName, name) } -// UnitInfo contains basic information about a unit. -type UnitInfo struct { - ID model.ID - ContainerID model.ID - ContainmentName string - Type string -} - -// RawUnit holds raw unit data with BSON contents. -type RawUnit struct { - ID model.ID - ContainerID model.ID - Type string - Contents []byte -} - // ListRawUnitsByType returns all raw units matching the given type prefix, // including their BSON contents. This is useful for scanning BSON directly // without full parsing. -func (r *Reader) ListRawUnitsByType(typePrefix string) ([]*RawUnit, error) { +func (r *Reader) ListRawUnitsByType(typePrefix string) ([]*types.RawUnit, error) { units, err := r.listUnitsByType(typePrefix) if err != nil { return nil, err } - var result []*RawUnit + var result []*types.RawUnit for _, u := range units { contents, err := r.resolveContents(u.ID, u.Contents) if err != nil { continue } - result = append(result, &RawUnit{ + result = append(result, &types.RawUnit{ ID: model.ID(u.ID), ContainerID: model.ID(u.ContainerID), Type: u.Type, @@ -411,15 +213,15 @@ func (r *Reader) ListRawUnitsByType(typePrefix string) ([]*RawUnit, error) { } // ListUnits returns all units with their IDs and types. -func (r *Reader) ListUnits() ([]*UnitInfo, error) { +func (r *Reader) ListUnits() ([]*types.UnitInfo, error) { units, err := r.listUnitsByType("") if err != nil { return nil, err } - var result []*UnitInfo + var result []*types.UnitInfo for _, u := range units { - result = append(result, &UnitInfo{ + result = append(result, &types.UnitInfo{ ID: model.ID(u.ID), ContainerID: model.ID(u.ContainerID), ContainmentName: u.ContainmentName, @@ -430,21 +232,14 @@ func (r *Reader) ListUnits() ([]*UnitInfo, error) { return result, nil } -// FolderInfo contains information about a project folder. -type FolderInfo struct { - ID model.ID - ContainerID model.ID - Name string -} - // ListFolders returns all project folders with their names. -func (r *Reader) ListFolders() ([]*FolderInfo, error) { +func (r *Reader) ListFolders() ([]*types.FolderInfo, error) { units, err := r.listUnitsByType("Projects$Folder") if err != nil { return nil, err } - var result []*FolderInfo + var result []*types.FolderInfo for _, u := range units { name := "" if len(u.Contents) > 0 { @@ -455,7 +250,7 @@ func (r *Reader) ListFolders() ([]*FolderInfo, error) { } } } - result = append(result, &FolderInfo{ + result = append(result, &types.FolderInfo{ ID: model.ID(u.ID), ContainerID: model.ID(u.ContainerID), Name: name, diff --git a/sdk/mpr/utils.go b/sdk/mpr/utils.go index ffca1a2a..1acc6739 100644 --- a/sdk/mpr/utils.go +++ b/sdk/mpr/utils.go @@ -3,66 +3,48 @@ package mpr import ( - "crypto/sha256" - "fmt" - + "github.com/mendixlabs/mxcli/mdl/types" "go.mongodb.org/mongo-driver/bson/primitive" ) // GenerateID generates a new unique ID for model elements. func GenerateID() string { - return generateUUID() + return types.GenerateID() } // GenerateDeterministicID generates a stable UUID from a seed string. -// Used for System module entities that aren't in the MPR but need consistent IDs. func GenerateDeterministicID(seed string) string { - h := sha256.Sum256([]byte(seed)) - return fmt.Sprintf("%08x-%04x-%04x-%04x-%012x", - h[0:4], h[4:6], h[6:8], h[8:10], h[10:16]) + return types.GenerateDeterministicID(seed) } // BlobToUUID converts a binary ID blob to a UUID string. func BlobToUUID(data []byte) string { - return blobToUUID(data) + return types.BlobToUUID(data) } // IDToBsonBinary converts a UUID string to a BSON binary value. func IDToBsonBinary(id string) primitive.Binary { - return idToBsonBinary(id) + blob := types.UUIDToBlob(id) + if blob == nil || len(blob) != 16 { + blob = types.UUIDToBlob(types.GenerateID()) + } + return primitive.Binary{ + Subtype: 0x00, + Data: blob, + } } // BsonBinaryToID converts a BSON binary value to a UUID string. func BsonBinaryToID(bin primitive.Binary) string { - return BlobToUUID(bin.Data) + return types.BlobToUUID(bin.Data) } // Hash computes a hash for content (used for content deduplication). func Hash(content []byte) string { - // Simple hash for now - could use crypto/sha256 for better hashing - var sum uint64 - for i, b := range content { - sum += uint64(b) * uint64(i+1) - } - return fmt.Sprintf("%016x", sum) + return types.Hash(content) } // ValidateID checks if an ID is valid. func ValidateID(id string) bool { - if len(id) != 36 { - return false - } - // Check UUID format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - for i, c := range id { - if i == 8 || i == 13 || i == 18 || i == 23 { - if c != '-' { - return false - } - } else { - if !((c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F')) { - return false - } - } - } - return true + return types.ValidateID(id) } diff --git a/sdk/mpr/writer_core.go b/sdk/mpr/writer_core.go index 62e1fada..2f191150 100644 --- a/sdk/mpr/writer_core.go +++ b/sdk/mpr/writer_core.go @@ -3,26 +3,23 @@ package mpr import ( - "crypto/rand" "crypto/sha256" "database/sql" "encoding/base64" - "encoding/hex" "fmt" "os" "path/filepath" - "strings" + "github.com/mendixlabs/mxcli/mdl/types" "go.mongodb.org/mongo-driver/bson/primitive" ) // idToBsonBinary converts a UUID string to BSON Binary format. // Mendix stores IDs as Binary with Subtype 0. func idToBsonBinary(id string) primitive.Binary { - blob := uuidToBlob(id) + blob := types.UUIDToBlob(id) if blob == nil || len(blob) != 16 { - // Generate a new UUID if the provided one is invalid - blob = uuidToBlob(generateUUID()) + blob = types.UUIDToBlob(types.GenerateID()) } return primitive.Binary{ Subtype: 0x00, @@ -198,59 +195,12 @@ func (wt *WriteTransaction) cleanupTempFiles() { } } -// generateUUID generates a new UUID v4 for model elements. -// Returns format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx +// generateUUID delegates to types.GenerateID. func generateUUID() string { - b := make([]byte, 16) - _, _ = rand.Read(b) - b[6] = (b[6] & 0x0f) | 0x40 // Version 4 - b[8] = (b[8] & 0x3f) | 0x80 // Variant is 10 - - return fmt.Sprintf("%02x%02x%02x%02x-%02x%02x-%02x%02x-%02x%02x-%02x%02x%02x%02x%02x%02x", - b[0], b[1], b[2], b[3], - b[4], b[5], - b[6], b[7], - b[8], b[9], - b[10], b[11], b[12], b[13], b[14], b[15]) + return types.GenerateID() } -// uuidToBlob converts a UUID string to a 16-byte blob in Microsoft GUID format. -// UUID format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -// Microsoft GUID format byte-swaps the first 3 groups (little-endian): -// - First 4 bytes: reversed -// - Next 2 bytes: reversed -// - Next 2 bytes: reversed -// - Last 8 bytes: unchanged +// uuidToBlob delegates to types.UUIDToBlob. func uuidToBlob(uuid string) []byte { - if uuid == "" { - return nil - } - // Remove dashes - var clean strings.Builder - for _, c := range uuid { - if c != '-' { - clean.WriteString(string(c)) - } - } - // Decode hex to bytes - decoded, err := hex.DecodeString(clean.String()) - if err != nil || len(decoded) != 16 { - return nil - } - // Swap bytes to Microsoft GUID format - blob := make([]byte, 16) - // First 4 bytes: reversed - blob[0] = decoded[3] - blob[1] = decoded[2] - blob[2] = decoded[1] - blob[3] = decoded[0] - // Next 2 bytes: reversed - blob[4] = decoded[5] - blob[5] = decoded[4] - // Next 2 bytes: reversed - blob[6] = decoded[7] - blob[7] = decoded[6] - // Last 8 bytes: unchanged - copy(blob[8:], decoded[8:]) - return blob + return types.UUIDToBlob(uuid) } diff --git a/sdk/mpr/writer_imagecollection.go b/sdk/mpr/writer_imagecollection.go index 88a777af..42389328 100644 --- a/sdk/mpr/writer_imagecollection.go +++ b/sdk/mpr/writer_imagecollection.go @@ -3,13 +3,14 @@ package mpr import ( + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson/primitive" ) // CreateImageCollection creates a new empty image collection unit in the MPR. -func (w *Writer) CreateImageCollection(ic *ImageCollection) error { +func (w *Writer) CreateImageCollection(ic *types.ImageCollection) error { if ic.ID == "" { ic.ID = model.ID(generateUUID()) } @@ -31,7 +32,7 @@ func (w *Writer) DeleteImageCollection(id string) error { return w.deleteUnit(id) } -func serializeImageCollection(ic *ImageCollection) ([]byte, error) { +func serializeImageCollection(ic *types.ImageCollection) ([]byte, error) { // Images array always starts with the array marker int32(3) images := bson.A{int32(3)} for i := range ic.Images { diff --git a/sdk/mpr/writer_imagecollection_test.go b/sdk/mpr/writer_imagecollection_test.go index d7e0442e..22d89490 100644 --- a/sdk/mpr/writer_imagecollection_test.go +++ b/sdk/mpr/writer_imagecollection_test.go @@ -5,12 +5,13 @@ package mpr import ( "testing" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "go.mongodb.org/mongo-driver/bson" ) func TestSerializeImageCollection_EmptyImages(t *testing.T) { - ic := &ImageCollection{ + ic := &types.ImageCollection{ BaseElement: model.BaseElement{ID: "ic-test-1"}, ContainerID: model.ID("module-id-1"), Name: "TestIcons", @@ -58,7 +59,7 @@ func TestSerializeImageCollection_EmptyImages(t *testing.T) { } func TestSerializeImageCollection_DefaultExportLevel(t *testing.T) { - ic := &ImageCollection{ + ic := &types.ImageCollection{ BaseElement: model.BaseElement{ID: "ic-test-2"}, ContainerID: model.ID("module-id-1"), Name: "Icons", diff --git a/sdk/mpr/writer_jsonstructure.go b/sdk/mpr/writer_jsonstructure.go index 47a89006..d21e6b47 100644 --- a/sdk/mpr/writer_jsonstructure.go +++ b/sdk/mpr/writer_jsonstructure.go @@ -3,65 +3,21 @@ package mpr import ( - "bytes" - "encoding/json" - "fmt" - "math" - "regexp" - "strings" - "unicode" - + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "go.mongodb.org/mongo-driver/bson" ) -// iso8601Pattern matches common ISO 8601 datetime strings that Mendix Studio Pro -// recognizes as DateTime primitive types in JSON structures. -var iso8601Pattern = regexp.MustCompile( - `^\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2}(:\d{2})?(\.\d+)?(Z|[+-]\d{2}:?\d{2})?$`, -) - -// PrettyPrintJSON re-formats a JSON string with standard indentation. -// Returns the original string if it is not valid JSON. -func PrettyPrintJSON(s string) string { - var buf bytes.Buffer - if err := json.Indent(&buf, []byte(s), "", " "); err != nil { - return s - } - return buf.String() -} +// PrettyPrintJSON delegates to types.PrettyPrintJSON. +func PrettyPrintJSON(s string) string { return types.PrettyPrintJSON(s) } -// normalizeDateTimeValue pads fractional seconds to 7 digits to match -// Studio Pro's .NET DateTime format (e.g., "2015-05-22T14:56:29.000Z" → "2015-05-22T14:56:29.0000000Z"). -func normalizeDateTimeValue(s string) string { - // Find the decimal point after seconds - dotIdx := strings.Index(s, ".") - if dotIdx == -1 { - // No fractional part — insert .0000000 before timezone suffix - if idx := strings.IndexAny(s, "Z+-"); idx > 0 { - return s[:idx] + ".0000000" + s[idx:] - } - return s - } - // Find where fractional digits end (at Z, +, - or end of string) - fracEnd := len(s) - for i := dotIdx + 1; i < len(s); i++ { - if s[i] < '0' || s[i] > '9' { - fracEnd = i - break - } - } - frac := s[dotIdx+1 : fracEnd] - if len(frac) < 7 { - frac = frac + strings.Repeat("0", 7-len(frac)) - } else { - frac = frac[:7] - } - return s[:dotIdx+1] + frac + s[fracEnd:] +// BuildJsonElementsFromSnippet delegates to types.BuildJsonElementsFromSnippet. +func BuildJsonElementsFromSnippet(snippet string, customNameMap map[string]string) ([]*types.JsonElement, error) { + return types.BuildJsonElementsFromSnippet(snippet, customNameMap) } // CreateJsonStructure creates a new JSON structure unit in the MPR. -func (w *Writer) CreateJsonStructure(js *JsonStructure) error { +func (w *Writer) CreateJsonStructure(js *types.JsonStructure) error { if js.ID == "" { js.ID = model.ID(generateUUID()) } @@ -83,7 +39,7 @@ func (w *Writer) DeleteJsonStructure(id string) error { return w.deleteUnit(id) } -func serializeJsonStructure(js *JsonStructure) ([]byte, error) { +func serializeJsonStructure(js *types.JsonStructure) ([]byte, error) { elements := bson.A{int32(2)} for _, elem := range js.Elements { elements = append(elements, serializeJsonElement(elem)) @@ -106,7 +62,7 @@ func serializeJsonStructure(js *JsonStructure) ([]byte, error) { // serializeJsonElement serializes a single JsonElement to BSON. // Note: JsonStructures$JsonElement uses int32 for numeric properties (MinOccurs, MaxOccurs, etc.), // unlike most other Mendix document types which use int64. Verified against Studio Pro-generated BSON. -func serializeJsonElement(elem *JsonElement) bson.D { +func serializeJsonElement(elem *types.JsonElement) bson.D { children := bson.A{int32(2)} for _, child := range elem.Children { children = append(children, serializeJsonElement(child)) @@ -134,318 +90,4 @@ func serializeJsonElement(elem *JsonElement) bson.D { } } -// BuildJsonElementsFromSnippet parses a JSON snippet and builds the element tree -// that Mendix Studio Pro would generate. Returns the root element. -// The optional customNameMap maps JSON keys to custom ExposedNames (as set in -// Studio Pro's "Custom name" column). Unmapped keys use auto-generated names. -func BuildJsonElementsFromSnippet(snippet string, customNameMap map[string]string) ([]*JsonElement, error) { - // Validate JSON - if !json.Valid([]byte(snippet)) { - return nil, fmt.Errorf("invalid JSON snippet") - } - - // Detect root type (object or array) - dec := json.NewDecoder(strings.NewReader(snippet)) - tok, err := dec.Token() - if err != nil { - return nil, fmt.Errorf("failed to parse JSON snippet: %w", err) - } - - b := &snippetBuilder{customNameMap: customNameMap} - tracker := &nameTracker{seen: make(map[string]int)} - - switch tok { - case json.Delim('{'): - root := b.buildElementFromRawObject("Root", "(Object)", snippet, tracker) - root.MinOccurs = 0 - root.MaxOccurs = 0 - root.Nillable = true - return []*JsonElement{root}, nil - - case json.Delim('['): - root := b.buildElementFromRawRootArray("Root", "(Array)", snippet, tracker) - root.MinOccurs = 0 - root.MaxOccurs = 0 - root.Nillable = true - return []*JsonElement{root}, nil - - default: - return nil, fmt.Errorf("JSON snippet must be an object or array at root level") - } -} - -// snippetBuilder holds state for building the element tree from a JSON snippet. -type snippetBuilder struct { - customNameMap map[string]string // JSON key → custom ExposedName -} - -// reservedExposedNames are element names that Mendix rejects as ExposedName values. -// Studio Pro handles these by prefixing with underscore and keeping original case. -var reservedExposedNames = map[string]bool{ - "Id": true, "Type": true, -} - -// resolveExposedName returns the custom name if mapped, otherwise capitalizes the JSON key. -// Reserved names (Id, Type, Name) are prefixed with underscore to match Studio Pro behavior. -func (b *snippetBuilder) resolveExposedName(jsonKey string) string { - if b.customNameMap != nil { - if custom, ok := b.customNameMap[jsonKey]; ok { - return custom - } - } - name := capitalizeFirst(jsonKey) - if reservedExposedNames[name] { - return "_" + jsonKey - } - return name -} - -// nameTracker tracks used ExposedNames at each level to handle duplicates. -type nameTracker struct { - seen map[string]int -} - -func (t *nameTracker) uniqueName(base string) string { - t.seen[base]++ - count := t.seen[base] - if count == 1 { - return base - } - return fmt.Sprintf("%s_%d", base, count) -} - -func (t *nameTracker) child() *nameTracker { - return &nameTracker{seen: make(map[string]int)} -} - -// capitalizeFirst capitalizes the first letter of a string for ExposedName. -func capitalizeFirst(s string) string { - if s == "" { - return s - } - runes := []rune(s) - runes[0] = unicode.ToUpper(runes[0]) - return string(runes) -} - -// buildElementFromRawObject builds an Object element by decoding a raw JSON object string, -// preserving the original key order (Go's map[string]any loses order). -func (b *snippetBuilder) buildElementFromRawObject(exposedName, path, rawJSON string, tracker *nameTracker) *JsonElement { - elem := &JsonElement{ - ExposedName: exposedName, - Path: path, - ElementType: "Object", - PrimitiveType: "Unknown", - MinOccurs: 0, - MaxOccurs: 0, - Nillable: true, - MaxLength: -1, - FractionDigits: -1, - TotalDigits: -1, - } - - childTracker := tracker.child() - - // Decode with key order preserved - dec := json.NewDecoder(strings.NewReader(rawJSON)) - if _, err := dec.Token(); err != nil { // opening { - return elem - } - for dec.More() { - tok, err := dec.Token() - if err != nil { - break - } - key, ok := tok.(string) - if !ok { - continue - } - // Capture the raw value to pass down for nested objects/arrays - var rawVal json.RawMessage - if err := dec.Decode(&rawVal); err != nil { - break - } - - childName := childTracker.uniqueName(b.resolveExposedName(key)) - childPath := path + "|" + key - child := b.buildElementFromRawValue(childName, childPath, key, rawVal, childTracker) - elem.Children = append(elem.Children, child) - } - - return elem -} - -// buildElementFromRawValue inspects a json.RawMessage to determine its type and build the element. -func (b *snippetBuilder) buildElementFromRawValue(exposedName, path, jsonKey string, raw json.RawMessage, tracker *nameTracker) *JsonElement { - trimmed := strings.TrimSpace(string(raw)) - - // Object — recurse with raw JSON to preserve key order - if len(trimmed) > 0 && trimmed[0] == '{' { - return b.buildElementFromRawObject(exposedName, path, trimmed, tracker) - } - - // Array - if len(trimmed) > 0 && trimmed[0] == '[' { - return b.buildElementFromRawArray(exposedName, path, jsonKey, trimmed, tracker) - } - - // Primitive — unmarshal to determine type - var val any - json.Unmarshal(raw, &val) - - switch v := val.(type) { - case string: - primitiveType := "String" - if iso8601Pattern.MatchString(v) { - primitiveType = "DateTime" - v = normalizeDateTimeValue(v) - } - return buildValueElement(exposedName, path, primitiveType, fmt.Sprintf("%q", v)) - case float64: - // Check the raw JSON text for a decimal point — Go's %v drops ".0" from 41850.0 - if v == math.Trunc(v) && !strings.Contains(trimmed, ".") { - return buildValueElement(exposedName, path, "Integer", fmt.Sprintf("%v", int64(v))) - } - return buildValueElement(exposedName, path, "Decimal", fmt.Sprintf("%v", v)) - case bool: - return buildValueElement(exposedName, path, "Boolean", fmt.Sprintf("%v", v)) - case nil: - // JSON null → Unknown primitive type (matches Studio Pro) - return buildValueElement(exposedName, path, "Unknown", "") - default: - return buildValueElement(exposedName, path, "String", "") - } -} - -// buildElementFromRawRootArray builds a root-level Array element. -// Studio Pro names the child object "JsonObject" (not "RootItem") for root arrays. -func (b *snippetBuilder) buildElementFromRawRootArray(exposedName, path, rawJSON string, tracker *nameTracker) *JsonElement { - arrayElem := &JsonElement{ - ExposedName: exposedName, - Path: path, - ElementType: "Array", - PrimitiveType: "Unknown", - MinOccurs: 0, - MaxOccurs: 0, - Nillable: true, - MaxLength: -1, - FractionDigits: -1, - TotalDigits: -1, - } - - dec := json.NewDecoder(strings.NewReader(rawJSON)) - dec.Token() // opening [ - if dec.More() { - var firstItem json.RawMessage - dec.Decode(&firstItem) - - itemPath := path + "|(Object)" - trimmed := strings.TrimSpace(string(firstItem)) - - if len(trimmed) > 0 && trimmed[0] == '{' { - itemElem := b.buildElementFromRawObject("JsonObject", itemPath, trimmed, tracker) - itemElem.MinOccurs = 0 - itemElem.MaxOccurs = 0 - itemElem.Nillable = true - arrayElem.Children = append(arrayElem.Children, itemElem) - } else { - child := b.buildElementFromRawValue("JsonObject", itemPath, "", firstItem, tracker) - child.MinOccurs = 0 - child.MaxOccurs = 0 - arrayElem.Children = append(arrayElem.Children, child) - } - } - - return arrayElem -} - -// buildElementFromRawArray builds an Array element, using the first item's raw JSON for ordering. -// For primitive arrays (strings, numbers), Studio Pro creates a Wrapper element with a Value child. -func (b *snippetBuilder) buildElementFromRawArray(exposedName, path, jsonKey, rawJSON string, tracker *nameTracker) *JsonElement { - arrayElem := &JsonElement{ - ExposedName: exposedName, - Path: path, - ElementType: "Array", - PrimitiveType: "Unknown", - MinOccurs: 0, - MaxOccurs: 0, - Nillable: true, - MaxLength: -1, - FractionDigits: -1, - TotalDigits: -1, - } - - // Decode array and get first element as raw JSON - dec := json.NewDecoder(strings.NewReader(rawJSON)) - dec.Token() // opening [ - if dec.More() { - var firstItem json.RawMessage - dec.Decode(&firstItem) - trimmed := strings.TrimSpace(string(firstItem)) - - if len(trimmed) > 0 && trimmed[0] == '{' { - // Object array: child is NameItem object - itemName := exposedName + "Item" - itemPath := path + "|(Object)" - itemElem := b.buildElementFromRawObject(itemName, itemPath, trimmed, tracker) - itemElem.MinOccurs = 0 - itemElem.MaxOccurs = -1 - itemElem.Nillable = true - arrayElem.Children = append(arrayElem.Children, itemElem) - } else { - // Primitive array: Studio Pro wraps in a Wrapper element with singular name - // e.g., tags: ["a","b"] → Tag (Wrapper) → Value (String) - wrapperName := singularize(exposedName) - wrapperPath := path + "|(Object)" - wrapper := &JsonElement{ - ExposedName: wrapperName, - Path: wrapperPath, - ElementType: "Wrapper", - PrimitiveType: "Unknown", - MinOccurs: 0, - MaxOccurs: 0, - Nillable: true, - MaxLength: -1, - FractionDigits: -1, - TotalDigits: -1, - } - valueElem := b.buildElementFromRawValue("Value", wrapperPath+"|", jsonKey, firstItem, tracker) - valueElem.MinOccurs = 0 - valueElem.MaxOccurs = 0 - wrapper.Children = append(wrapper.Children, valueElem) - arrayElem.Children = append(arrayElem.Children, wrapper) - } - } - - return arrayElem -} - -// singularize returns a simple singular form by stripping trailing "s". -// Handles common cases: Tags→Tag, Items→Item, Addresses→Addresse. -func singularize(s string) string { - if len(s) > 1 && strings.HasSuffix(s, "s") { - return s[:len(s)-1] - } - return s -} - -func buildValueElement(exposedName, path, primitiveType, originalValue string) *JsonElement { - maxLength := -1 - if primitiveType == "String" { - maxLength = 0 - } - return &JsonElement{ - ExposedName: exposedName, - Path: path, - ElementType: "Value", - PrimitiveType: primitiveType, - MinOccurs: 0, - MaxOccurs: 0, - Nillable: true, - MaxLength: maxLength, - FractionDigits: -1, - TotalDigits: -1, - OriginalValue: originalValue, - } -} From dbfb477cf42a18dbd1ebc09f1bd2d7b14260fc8f Mon Sep 17 00:00:00 2001 From: Andrew Vasilyev Date: Sun, 19 Apr 2026 19:46:16 +0200 Subject: [PATCH 03/16] =?UTF-8?q?fix:=20address=20copilot=20review=20?= =?UTF-8?q?=E2=80=94=20rand.Read=20error,=20folder=20cache=20bug,=20IDToBs?= =?UTF-8?q?onBinary=20dedup?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- mdl/executor/cmd_pages_builder.go | 3 ++- mdl/types/id.go | 4 +++- sdk/mpr/utils.go | 9 +-------- 3 files changed, 6 insertions(+), 10 deletions(-) diff --git a/mdl/executor/cmd_pages_builder.go b/mdl/executor/cmd_pages_builder.go index ed74a533..1bb43fcf 100644 --- a/mdl/executor/cmd_pages_builder.go +++ b/mdl/executor/cmd_pages_builder.go @@ -303,12 +303,13 @@ func (pb *pageBuilder) resolveFolder(folderPath string) (model.ID, error) { if err != nil { return "", mdlerrors.NewBackend(fmt.Sprintf("create folder %s", part), err) } + parentContainerID := currentContainerID currentContainerID = newFolderID // Add to cache pb.foldersCache = append(pb.foldersCache, &types.FolderInfo{ ID: newFolderID, - ContainerID: currentContainerID, + ContainerID: parentContainerID, Name: part, }) } diff --git a/mdl/types/id.go b/mdl/types/id.go index 528f145a..85a83246 100644 --- a/mdl/types/id.go +++ b/mdl/types/id.go @@ -13,7 +13,9 @@ import ( // GenerateID generates a new unique UUID v4 for model elements. func GenerateID() string { b := make([]byte, 16) - _, _ = rand.Read(b) + if _, err := rand.Read(b); err != nil { + panic("crypto/rand.Read failed: " + err.Error()) + } b[6] = (b[6] & 0x0f) | 0x40 // Version 4 b[8] = (b[8] & 0x3f) | 0x80 // Variant is 10 diff --git a/sdk/mpr/utils.go b/sdk/mpr/utils.go index 1acc6739..fc3e317b 100644 --- a/sdk/mpr/utils.go +++ b/sdk/mpr/utils.go @@ -24,14 +24,7 @@ func BlobToUUID(data []byte) string { // IDToBsonBinary converts a UUID string to a BSON binary value. func IDToBsonBinary(id string) primitive.Binary { - blob := types.UUIDToBlob(id) - if blob == nil || len(blob) != 16 { - blob = types.UUIDToBlob(types.GenerateID()) - } - return primitive.Binary{ - Subtype: 0x00, - Data: blob, - } + return idToBsonBinary(id) } // BsonBinaryToID converts a BSON binary value to a UUID string. From 62ce850f246daf6a2368be6f11d75500adfa8d75 Mon Sep 17 00:00:00 2001 From: Andrew Vasilyev Date: Sun, 19 Apr 2026 19:52:40 +0200 Subject: [PATCH 04/16] fix: add type aliases in sdk/mpr for backward compatibility --- sdk/mpr/reader_types.go | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/sdk/mpr/reader_types.go b/sdk/mpr/reader_types.go index 6b9099a9..1bf115a5 100644 --- a/sdk/mpr/reader_types.go +++ b/sdk/mpr/reader_types.go @@ -14,6 +14,26 @@ import ( "go.mongodb.org/mongo-driver/bson" ) +// Type aliases for backward compatibility — these types are now defined in mdl/types. +type ( + JavaAction = types.JavaAction + JavaScriptAction = types.JavaScriptAction + NavigationDocument = types.NavigationDocument + NavigationProfile = types.NavigationProfile + NavHomePage = types.NavHomePage + NavRoleBasedHome = types.NavRoleBasedHome + NavMenuItem = types.NavMenuItem + NavOfflineEntity = types.NavOfflineEntity + JsonStructure = types.JsonStructure + JsonElement = types.JsonElement + ImageCollection = types.ImageCollection + Image = types.Image + FolderInfo = types.FolderInfo + UnitInfo = types.UnitInfo + RawUnit = types.RawUnit + ProjectVersion = types.ProjectVersion +) + // ListJavaActions returns all Java actions in the project. func (r *Reader) ListJavaActions() ([]*types.JavaAction, error) { units, err := r.listUnitsByType("JavaActions$JavaAction") From 349b5797258d34ddc7d36544fe3746e7ddc6d1cd Mon Sep 17 00:00:00 2001 From: Andrew Vasilyev Date: Sun, 19 Apr 2026 20:00:38 +0200 Subject: [PATCH 05/16] fix: optimize UUIDToBlob allocation, sort async map iterations --- mdl/types/asyncapi.go | 29 ++++++++++++++++++++++++----- mdl/types/id.go | 3 ++- 2 files changed, 26 insertions(+), 6 deletions(-) diff --git a/mdl/types/asyncapi.go b/mdl/types/asyncapi.go index 692eaae6..9baf57e3 100644 --- a/mdl/types/asyncapi.go +++ b/mdl/types/asyncapi.go @@ -4,6 +4,7 @@ package types import ( "fmt" + "sort" "strings" "gopkg.in/yaml.v3" @@ -61,8 +62,14 @@ func ParseAsyncAPI(yamlStr string) (*AsyncAPIDocument, error) { Description: raw.Info.Description, } - // Resolve messages from components - for name, msg := range raw.Components.Messages { + // Resolve messages from components (sorted for deterministic output) + messageNames := make([]string, 0, len(raw.Components.Messages)) + for name := range raw.Components.Messages { + messageNames = append(messageNames, name) + } + sort.Strings(messageNames) + for _, name := range messageNames { + msg := raw.Components.Messages[name] resolved := &AsyncAPIMessage{ Name: name, Title: msg.Title, @@ -88,8 +95,14 @@ func ParseAsyncAPI(yamlStr string) (*AsyncAPIDocument, error) { doc.Messages = append(doc.Messages, resolved) } - // Resolve channels - for channelName, channel := range raw.Channels { + // Resolve channels (sorted for deterministic output) + channelNames := make([]string, 0, len(raw.Channels)) + for name := range raw.Channels { + channelNames = append(channelNames, name) + } + sort.Strings(channelNames) + for _, channelName := range channelNames { + channel := raw.Channels[channelName] if channel.Subscribe != nil { msgName := "" if channel.Subscribe.Message.Ref != "" { @@ -138,8 +151,14 @@ func asyncRefName(ref string) string { } func resolveAsyncSchemaProperties(schema yamlAsyncSchema) []*AsyncAPIProperty { + names := make([]string, 0, len(schema.Properties)) + for name := range schema.Properties { + names = append(names, name) + } + sort.Strings(names) var props []*AsyncAPIProperty - for name, prop := range schema.Properties { + for _, name := range names { + prop := schema.Properties[name] props = append(props, &AsyncAPIProperty{ Name: name, Type: prop.Type, diff --git a/mdl/types/id.go b/mdl/types/id.go index 85a83246..f3039b21 100644 --- a/mdl/types/id.go +++ b/mdl/types/id.go @@ -54,9 +54,10 @@ func UUIDToBlob(uuid string) []byte { return nil } var clean strings.Builder + clean.Grow(32) for _, c := range uuid { if c != '-' { - clean.WriteString(string(c)) + clean.WriteByte(byte(c)) } } decoded, err := hex.DecodeString(clean.String()) From 8282e9270572ee83fa2df9beef644f24252c1768 Mon Sep 17 00:00:00 2001 From: Andrew Vasilyev Date: Sun, 19 Apr 2026 20:08:14 +0200 Subject: [PATCH 06/16] fix: use local type aliases in sdk/mpr public signatures --- sdk/mpr/parser_misc.go | 51 +++++++++++++++---------------- sdk/mpr/writer_imagecollection.go | 5 ++- sdk/mpr/writer_jsonstructure.go | 8 ++--- 3 files changed, 31 insertions(+), 33 deletions(-) diff --git a/sdk/mpr/parser_misc.go b/sdk/mpr/parser_misc.go index 662aa404..6e4cdb30 100644 --- a/sdk/mpr/parser_misc.go +++ b/sdk/mpr/parser_misc.go @@ -9,7 +9,6 @@ import ( "path/filepath" "strings" - "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/javaactions" "github.com/mendixlabs/mxcli/sdk/pages" @@ -81,7 +80,7 @@ func (r *Reader) parseSnippet(unitID, containerID string, contents []byte) (*pag } // parseJavaAction parses Java action contents from BSON. -func (r *Reader) parseJavaAction(unitID, containerID string, contents []byte) (*types.JavaAction, error) { +func (r *Reader) parseJavaAction(unitID, containerID string, contents []byte) (*JavaAction, error) { contents, err := r.resolveContents(unitID, contents) if err != nil { return nil, err @@ -92,7 +91,7 @@ func (r *Reader) parseJavaAction(unitID, containerID string, contents []byte) (* return nil, fmt.Errorf("failed to unmarshal BSON: %w", err) } - ja := &types.JavaAction{} + ja := &JavaAction{} ja.ID = model.ID(unitID) ja.TypeName = "JavaActions$JavaAction" ja.ContainerID = model.ID(containerID) @@ -138,7 +137,7 @@ func WriteJSON(element any) ([]byte, error) { } // parseJavaScriptAction parses JavaScript action contents from BSON. -func (r *Reader) parseJavaScriptAction(unitID, containerID string, contents []byte) (*types.JavaScriptAction, error) { +func (r *Reader) parseJavaScriptAction(unitID, containerID string, contents []byte) (*JavaScriptAction, error) { contents, err := r.resolveContents(unitID, contents) if err != nil { return nil, err @@ -149,7 +148,7 @@ func (r *Reader) parseJavaScriptAction(unitID, containerID string, contents []by return nil, fmt.Errorf("failed to unmarshal BSON: %w", err) } - jsa := &types.JavaScriptAction{} + jsa := &JavaScriptAction{} jsa.ID = model.ID(unitID) jsa.TypeName = "JavaScriptActions$JavaScriptAction" jsa.ContainerID = model.ID(containerID) @@ -250,7 +249,7 @@ func (r *Reader) parseJavaScriptAction(unitID, containerID string, contents []by } // ReadJavaScriptActionByName reads a JavaScript action by qualified name (Module.ActionName). -func (r *Reader) ReadJavaScriptActionByName(qualifiedName string) (*types.JavaScriptAction, error) { +func (r *Reader) ReadJavaScriptActionByName(qualifiedName string) (*JavaScriptAction, error) { units, err := r.listUnitsByType("JavaScriptActions$JavaScriptAction") if err != nil { return nil, err @@ -364,7 +363,7 @@ func (r *Reader) parsePageTemplate(unitID, containerID string, contents []byte) } // parseNavigationDocument parses navigation document contents from BSON. -func (r *Reader) parseNavigationDocument(unitID, containerID string, contents []byte) (*types.NavigationDocument, error) { +func (r *Reader) parseNavigationDocument(unitID, containerID string, contents []byte) (*NavigationDocument, error) { contents, err := r.resolveContents(unitID, contents) if err != nil { return nil, err @@ -375,7 +374,7 @@ func (r *Reader) parseNavigationDocument(unitID, containerID string, contents [] return nil, fmt.Errorf("failed to unmarshal BSON: %w", err) } - nav := &types.NavigationDocument{} + nav := &NavigationDocument{} nav.ID = model.ID(unitID) nav.TypeName = "Navigation$NavigationDocument" nav.ContainerID = model.ID(containerID) @@ -400,9 +399,9 @@ func (r *Reader) parseNavigationDocument(unitID, containerID string, contents [] } // parseNavigationProfile parses a single navigation profile from BSON. -func parseNavigationProfile(raw map[string]any) *types.NavigationProfile { +func parseNavigationProfile(raw map[string]any) *NavigationProfile { typeName := extractString(raw["$Type"]) - profile := &types.NavigationProfile{ + profile := &NavigationProfile{ Name: extractString(raw["Name"]), Kind: extractString(raw["Kind"]), } @@ -414,13 +413,13 @@ func parseNavigationProfile(raw map[string]any) *types.NavigationProfile { page := extractString(hp["HomePagePage"]) nanoflow := extractString(hp["HomePageNanoflow"]) if page != "" || nanoflow != "" { - profile.HomePage = &types.NavHomePage{Page: page, Microflow: nanoflow} + profile.HomePage = &NavHomePage{Page: page, Microflow: nanoflow} } } // Native role-based home pages for _, item := range extractBsonArray(raw["RoleBasedNativeHomePages"]) { if rbMap, ok := item.(map[string]any); ok { - rbh := &types.NavRoleBasedHome{ + rbh := &NavRoleBasedHome{ UserRole: extractString(rbMap["UserRole"]), Page: extractString(rbMap["HomePagePage"]), Microflow: extractString(rbMap["HomePageNanoflow"]), @@ -446,13 +445,13 @@ func parseNavigationProfile(raw map[string]any) *types.NavigationProfile { page := extractString(hp["Page"]) mf := extractString(hp["Microflow"]) if page != "" || mf != "" { - profile.HomePage = &types.NavHomePage{Page: page, Microflow: mf} + profile.HomePage = &NavHomePage{Page: page, Microflow: mf} } } // Role-based home pages (stored as "HomeItems") for _, item := range extractBsonArray(raw["HomeItems"]) { if rbMap, ok := item.(map[string]any); ok { - rbh := &types.NavRoleBasedHome{ + rbh := &NavRoleBasedHome{ UserRole: extractString(rbMap["UserRole"]), Page: extractString(rbMap["Page"]), Microflow: extractString(rbMap["Microflow"]), @@ -489,7 +488,7 @@ func parseNavigationProfile(raw map[string]any) *types.NavigationProfile { // Offline entity configs (both web and native) for _, item := range extractBsonArray(raw["OfflineEntityConfigs"]) { if oeMap, ok := item.(map[string]any); ok { - oe := &types.NavOfflineEntity{ + oe := &NavOfflineEntity{ Entity: extractString(oeMap["Entity"]), SyncMode: extractString(oeMap["SyncMode"]), Constraint: extractString(oeMap["Constraint"]), @@ -504,8 +503,8 @@ func parseNavigationProfile(raw map[string]any) *types.NavigationProfile { } // parseNavMenuItem parses a Menus$MenuItem from BSON. -func parseNavMenuItem(raw map[string]any) *types.NavMenuItem { - mi := &types.NavMenuItem{} +func parseNavMenuItem(raw map[string]any) *NavMenuItem { + mi := &NavMenuItem{} // Extract caption text (Caption → Items → first Translation → Text) if caption, ok := raw["Caption"].(map[string]any); ok { @@ -553,8 +552,8 @@ func parseNavMenuItem(raw map[string]any) *types.NavMenuItem { } // parseNavMenuItemFromBottomBar parses a NativePages$BottomBarItem as a NavMenuItem. -func parseNavMenuItemFromBottomBar(raw map[string]any) *types.NavMenuItem { - mi := &types.NavMenuItem{} +func parseNavMenuItemFromBottomBar(raw map[string]any) *NavMenuItem { + mi := &NavMenuItem{} if caption, ok := raw["Caption"].(map[string]any); ok { mi.Caption = extractTextFromBson(caption) } @@ -590,7 +589,7 @@ func extractTextFromBson(raw map[string]any) string { } // parseImageCollection parses image collection contents from BSON. -func (r *Reader) parseImageCollection(unitID, containerID string, contents []byte) (*types.ImageCollection, error) { +func (r *Reader) parseImageCollection(unitID, containerID string, contents []byte) (*ImageCollection, error) { contents, err := r.resolveContents(unitID, contents) if err != nil { return nil, err @@ -601,7 +600,7 @@ func (r *Reader) parseImageCollection(unitID, containerID string, contents []byt return nil, fmt.Errorf("failed to unmarshal BSON: %w", err) } - ic := &types.ImageCollection{} + ic := &ImageCollection{} ic.ID = model.ID(unitID) ic.TypeName = "Images$ImageCollection" ic.ContainerID = model.ID(containerID) @@ -620,7 +619,7 @@ func (r *Reader) parseImageCollection(unitID, containerID string, contents []byt if images, ok := raw["Images"].(bson.A); ok { for _, img := range images { if imgMap, ok := img.(map[string]any); ok { - image := types.Image{} + image := Image{} if id := extractID(imgMap["$ID"]); id != "" { image.ID = model.ID(id) } @@ -644,7 +643,7 @@ func (r *Reader) parseImageCollection(unitID, containerID string, contents []byt } // parseJsonStructure parses JSON structure contents from BSON. -func (r *Reader) parseJsonStructure(unitID, containerID string, contents []byte) (*types.JsonStructure, error) { +func (r *Reader) parseJsonStructure(unitID, containerID string, contents []byte) (*JsonStructure, error) { contents, err := r.resolveContents(unitID, contents) if err != nil { return nil, err @@ -655,7 +654,7 @@ func (r *Reader) parseJsonStructure(unitID, containerID string, contents []byte) return nil, fmt.Errorf("failed to unmarshal BSON: %w", err) } - js := &types.JsonStructure{} + js := &JsonStructure{} js.ID = model.ID(unitID) js.TypeName = "JsonStructures$JsonStructure" js.ContainerID = model.ID(containerID) @@ -689,8 +688,8 @@ func (r *Reader) parseJsonStructure(unitID, containerID string, contents []byte) } // parseJsonElement recursively parses a JsonStructures$JsonElement from BSON. -func parseJsonElement(raw map[string]any) *types.JsonElement { - elem := &types.JsonElement{ +func parseJsonElement(raw map[string]any) *JsonElement { + elem := &JsonElement{ MaxLength: -1, FractionDigits: -1, TotalDigits: -1, diff --git a/sdk/mpr/writer_imagecollection.go b/sdk/mpr/writer_imagecollection.go index 42389328..88a777af 100644 --- a/sdk/mpr/writer_imagecollection.go +++ b/sdk/mpr/writer_imagecollection.go @@ -3,14 +3,13 @@ package mpr import ( - "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson/primitive" ) // CreateImageCollection creates a new empty image collection unit in the MPR. -func (w *Writer) CreateImageCollection(ic *types.ImageCollection) error { +func (w *Writer) CreateImageCollection(ic *ImageCollection) error { if ic.ID == "" { ic.ID = model.ID(generateUUID()) } @@ -32,7 +31,7 @@ func (w *Writer) DeleteImageCollection(id string) error { return w.deleteUnit(id) } -func serializeImageCollection(ic *types.ImageCollection) ([]byte, error) { +func serializeImageCollection(ic *ImageCollection) ([]byte, error) { // Images array always starts with the array marker int32(3) images := bson.A{int32(3)} for i := range ic.Images { diff --git a/sdk/mpr/writer_jsonstructure.go b/sdk/mpr/writer_jsonstructure.go index d21e6b47..ee5b9428 100644 --- a/sdk/mpr/writer_jsonstructure.go +++ b/sdk/mpr/writer_jsonstructure.go @@ -12,12 +12,12 @@ import ( func PrettyPrintJSON(s string) string { return types.PrettyPrintJSON(s) } // BuildJsonElementsFromSnippet delegates to types.BuildJsonElementsFromSnippet. -func BuildJsonElementsFromSnippet(snippet string, customNameMap map[string]string) ([]*types.JsonElement, error) { +func BuildJsonElementsFromSnippet(snippet string, customNameMap map[string]string) ([]*JsonElement, error) { return types.BuildJsonElementsFromSnippet(snippet, customNameMap) } // CreateJsonStructure creates a new JSON structure unit in the MPR. -func (w *Writer) CreateJsonStructure(js *types.JsonStructure) error { +func (w *Writer) CreateJsonStructure(js *JsonStructure) error { if js.ID == "" { js.ID = model.ID(generateUUID()) } @@ -39,7 +39,7 @@ func (w *Writer) DeleteJsonStructure(id string) error { return w.deleteUnit(id) } -func serializeJsonStructure(js *types.JsonStructure) ([]byte, error) { +func serializeJsonStructure(js *JsonStructure) ([]byte, error) { elements := bson.A{int32(2)} for _, elem := range js.Elements { elements = append(elements, serializeJsonElement(elem)) @@ -62,7 +62,7 @@ func serializeJsonStructure(js *types.JsonStructure) ([]byte, error) { // serializeJsonElement serializes a single JsonElement to BSON. // Note: JsonStructures$JsonElement uses int32 for numeric properties (MinOccurs, MaxOccurs, etc.), // unlike most other Mendix document types which use int64. Verified against Studio Pro-generated BSON. -func serializeJsonElement(elem *types.JsonElement) bson.D { +func serializeJsonElement(elem *JsonElement) bson.D { children := bson.A{int32(2)} for _, child := range elem.Children { children = append(children, serializeJsonElement(child)) From 2df1c831f5e49471fb3dfa65aaffd97a3ff5ddfa Mon Sep 17 00:00:00 2001 From: Andrew Vasilyev Date: Sun, 19 Apr 2026 20:14:37 +0200 Subject: [PATCH 07/16] fix: iterate bytes in UUIDToBlob, use local aliases in tests --- mdl/types/id.go | 6 +++--- sdk/mpr/writer_imagecollection_test.go | 5 ++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/mdl/types/id.go b/mdl/types/id.go index f3039b21..ea7299ec 100644 --- a/mdl/types/id.go +++ b/mdl/types/id.go @@ -55,9 +55,9 @@ func UUIDToBlob(uuid string) []byte { } var clean strings.Builder clean.Grow(32) - for _, c := range uuid { - if c != '-' { - clean.WriteByte(byte(c)) + for i := 0; i < len(uuid); i++ { + if uuid[i] != '-' { + clean.WriteByte(uuid[i]) } } decoded, err := hex.DecodeString(clean.String()) diff --git a/sdk/mpr/writer_imagecollection_test.go b/sdk/mpr/writer_imagecollection_test.go index 22d89490..d7e0442e 100644 --- a/sdk/mpr/writer_imagecollection_test.go +++ b/sdk/mpr/writer_imagecollection_test.go @@ -5,13 +5,12 @@ package mpr import ( "testing" - "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "go.mongodb.org/mongo-driver/bson" ) func TestSerializeImageCollection_EmptyImages(t *testing.T) { - ic := &types.ImageCollection{ + ic := &ImageCollection{ BaseElement: model.BaseElement{ID: "ic-test-1"}, ContainerID: model.ID("module-id-1"), Name: "TestIcons", @@ -59,7 +58,7 @@ func TestSerializeImageCollection_EmptyImages(t *testing.T) { } func TestSerializeImageCollection_DefaultExportLevel(t *testing.T) { - ic := &types.ImageCollection{ + ic := &ImageCollection{ BaseElement: model.BaseElement{ID: "ic-test-2"}, ContainerID: model.ID("module-id-1"), Name: "Icons", From f2c8e12bb8a5cda3df3e652bcb0ad19147cf69b5 Mon Sep 17 00:00:00 2001 From: Andrew Vasilyev Date: Sun, 19 Apr 2026 20:20:05 +0200 Subject: [PATCH 08/16] add tests for mdl/types and conversion layer - id_test.go: UUID generation, roundtrip, validation, hash - json_utils_test.go: pretty-print, datetime normalization, snippet builder - edmx_test.go: OData4 parsing, enums, capabilities, FindEntityType - asyncapi_test.go: parsing, sorted channels/messages, FindMessage - convert_test.go: prove sdk/mpr type aliases are identical to mdl/types - fix normalizeDateTimeValue matching '-' in date portion (search from idx 19+) --- mdl/backend/mpr/convert_test.go | 189 ++++++++++++++++++++++ mdl/types/asyncapi_test.go | 203 ++++++++++++++++++++++++ mdl/types/edmx_test.go | 268 ++++++++++++++++++++++++++++++++ mdl/types/id_test.go | 176 +++++++++++++++++++++ mdl/types/json_utils.go | 10 +- mdl/types/json_utils_test.go | 251 ++++++++++++++++++++++++++++++ 6 files changed, 1094 insertions(+), 3 deletions(-) create mode 100644 mdl/backend/mpr/convert_test.go create mode 100644 mdl/types/asyncapi_test.go create mode 100644 mdl/types/edmx_test.go create mode 100644 mdl/types/id_test.go create mode 100644 mdl/types/json_utils_test.go diff --git a/mdl/backend/mpr/convert_test.go b/mdl/backend/mpr/convert_test.go new file mode 100644 index 00000000..50c61af1 --- /dev/null +++ b/mdl/backend/mpr/convert_test.go @@ -0,0 +1,189 @@ +// SPDX-License-Identifier: Apache-2.0 + +// Package mprbackend_test verifies that the conversion layer between sdk/mpr +// and mdl/types works correctly. Because sdk/mpr types are now type aliases to +// mdl/types (e.g. mpr.JavaAction = types.JavaAction), the "convert" functions +// are effectively deep-copy operations on the same type. This test file proves +// the type system is consistent and conversions preserve all fields. +package mprbackend_test + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/types" + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/mpr" +) + +// TestTypeAliasesAreIdentical proves that sdk/mpr type aliases resolve to the +// same Go types as mdl/types. If these assignments compile, the types are +// identical — which is precisely what the conversion functions rely on. +func TestTypeAliasesAreIdentical(t *testing.T) { + // Each assignment proves the alias: mpr.X == types.X + var _ *types.JavaAction = new(mpr.JavaAction) + var _ *types.JavaScriptAction = new(mpr.JavaScriptAction) + var _ *types.NavigationDocument = new(mpr.NavigationDocument) + var _ *types.NavigationProfile = new(mpr.NavigationProfile) + var _ *types.NavHomePage = new(mpr.NavHomePage) + var _ *types.NavRoleBasedHome = new(mpr.NavRoleBasedHome) + var _ *types.NavMenuItem = new(mpr.NavMenuItem) + var _ *types.NavOfflineEntity = new(mpr.NavOfflineEntity) + var _ *types.JsonStructure = new(mpr.JsonStructure) + var _ *types.JsonElement = new(mpr.JsonElement) + var _ *types.ImageCollection = new(mpr.ImageCollection) + var _ *types.FolderInfo = new(mpr.FolderInfo) + var _ *types.UnitInfo = new(mpr.UnitInfo) + var _ *types.RawUnit = new(mpr.RawUnit) + var _ *types.ProjectVersion = new(mpr.ProjectVersion) + + // Slices are also interchangeable + var typesSlice []*types.FolderInfo + var mprSlice []*mpr.FolderInfo = typesSlice + _ = mprSlice + + var typesJSSlice []*types.JavaScriptAction + var mprJSSlice []*mpr.JavaScriptAction = typesJSSlice + _ = mprJSSlice +} + +// TestFolderInfoSlicePassthrough verifies that a []*mpr.FolderInfo value can +// be used where []*types.FolderInfo is expected, because they are the same type. +func TestFolderInfoSlicePassthrough(t *testing.T) { + folders := []*mpr.FolderInfo{ + {ID: model.ID("f1"), ContainerID: model.ID("c1"), Name: "Module"}, + {ID: model.ID("f2"), ContainerID: model.ID("c2"), Name: "Resources"}, + } + + // This compiles because mpr.FolderInfo = types.FolderInfo + var typesFolders []*types.FolderInfo = folders + if len(typesFolders) != 2 { + t.Fatalf("expected 2 folders, got %d", len(typesFolders)) + } + if typesFolders[0].Name != "Module" { + t.Errorf("expected Module, got %q", typesFolders[0].Name) + } +} + +// TestNavigationDocumentFieldPreservation verifies that all fields survive +// when a NavigationDocument created via mpr alias is accessed via types. +func TestNavigationDocumentFieldPreservation(t *testing.T) { + doc := &mpr.NavigationDocument{ + ContainerID: model.ID("c1"), + Name: "Navigation", + Profiles: []*mpr.NavigationProfile{ + { + Name: "Responsive", + Kind: "Responsive", + IsNative: false, + HomePage: &mpr.NavHomePage{Page: "MyFirstModule.Home"}, + RoleBasedHomePages: []*mpr.NavRoleBasedHome{ + {UserRole: "Admin", Page: "Admin.Dashboard"}, + }, + MenuItems: []*mpr.NavMenuItem{ + {Caption: "Home", Page: "Home"}, + }, + OfflineEntities: []*mpr.NavOfflineEntity{ + {Entity: "MyModule.Task", SyncMode: "FullSync"}, + }, + }, + }, + } + + // Access through types — compiles because they're the same type + var typesDoc *types.NavigationDocument = doc + if typesDoc.Name != "Navigation" { + t.Errorf("expected Navigation, got %q", typesDoc.Name) + } + if len(typesDoc.Profiles) != 1 { + t.Fatalf("expected 1 profile, got %d", len(typesDoc.Profiles)) + } + p := typesDoc.Profiles[0] + if p.Kind != "Responsive" { + t.Errorf("expected Responsive, got %q", p.Kind) + } + if p.HomePage.Page != "MyFirstModule.Home" { + t.Errorf("expected home page, got %q", p.HomePage.Page) + } + if len(p.RoleBasedHomePages) != 1 { + t.Errorf("expected 1 role-based home, got %d", len(p.RoleBasedHomePages)) + } + if len(p.MenuItems) != 1 { + t.Errorf("expected 1 menu item, got %d", len(p.MenuItems)) + } + if len(p.OfflineEntities) != 1 { + t.Errorf("expected 1 offline entity, got %d", len(p.OfflineEntities)) + } +} + +// TestJsonStructureFieldPreservation verifies JsonStructure + recursive +// JsonElement children survive alias crossing. +func TestJsonStructureFieldPreservation(t *testing.T) { + js := &mpr.JsonStructure{ + ContainerID: model.ID("m1"), + Name: "MyJson", + Documentation: "Test JSON structure", + JsonSnippet: `{"a":1}`, + Elements: []*mpr.JsonElement{ + { + ExposedName: "Root", + Path: "(Object)", + ElementType: "Object", + PrimitiveType: "Unknown", + Children: []*mpr.JsonElement{ + { + ExposedName: "A", + Path: "(Object)|a", + ElementType: "Value", + PrimitiveType: "Integer", + OriginalValue: "1", + }, + }, + }, + }, + } + + var typesJS *types.JsonStructure = js + if typesJS.Name != "MyJson" { + t.Errorf("expected MyJson, got %q", typesJS.Name) + } + if len(typesJS.Elements) != 1 { + t.Fatalf("expected 1 element, got %d", len(typesJS.Elements)) + } + root := typesJS.Elements[0] + if len(root.Children) != 1 { + t.Fatalf("expected 1 child, got %d", len(root.Children)) + } + child := root.Children[0] + if child.PrimitiveType != "Integer" { + t.Errorf("expected Integer, got %q", child.PrimitiveType) + } + if child.OriginalValue != "1" { + t.Errorf("expected original value '1', got %q", child.OriginalValue) + } +} + +// TestImageCollectionFieldPreservation verifies ImageCollection + Image. +func TestImageCollectionFieldPreservation(t *testing.T) { + ic := &mpr.ImageCollection{ + ContainerID: model.ID("m1"), + Name: "Images", + Images: []mpr.Image{ + {ID: model.ID("i1"), Name: "logo.png", Format: "png", Data: []byte{0x89, 0x50}}, + }, + } + + var typesIC *types.ImageCollection = ic + if typesIC.Name != "Images" { + t.Errorf("expected Images, got %q", typesIC.Name) + } + if len(typesIC.Images) != 1 { + t.Fatalf("expected 1 image, got %d", len(typesIC.Images)) + } + img := typesIC.Images[0] + if img.Name != "logo.png" { + t.Errorf("expected logo.png, got %q", img.Name) + } + if len(img.Data) != 2 { + t.Errorf("expected 2 bytes, got %d", len(img.Data)) + } +} diff --git a/mdl/types/asyncapi_test.go b/mdl/types/asyncapi_test.go new file mode 100644 index 00000000..f0c07b71 --- /dev/null +++ b/mdl/types/asyncapi_test.go @@ -0,0 +1,203 @@ +// SPDX-License-Identifier: Apache-2.0 + +package types + +import ( + "testing" +) + +func TestParseAsyncAPI_Basic(t *testing.T) { + yaml := `asyncapi: "2.2.0" +info: + title: Order Service + version: "1.0.0" + description: Handles orders +channels: + order/created: + subscribe: + operationId: receiveOrderCreated + message: + $ref: "#/components/messages/OrderCreated" +components: + messages: + OrderCreated: + title: Order Created + description: An order was created + contentType: application/json + payload: + $ref: "#/components/schemas/OrderPayload" + schemas: + OrderPayload: + type: object + properties: + orderId: + type: string + amount: + type: number + format: double +` + + doc, err := ParseAsyncAPI(yaml) + if err != nil { + t.Fatal(err) + } + + if doc.Version != "2.2.0" { + t.Errorf("expected version 2.2.0, got %q", doc.Version) + } + if doc.Title != "Order Service" { + t.Errorf("expected title Order Service, got %q", doc.Title) + } + if doc.DocVersion != "1.0.0" { + t.Errorf("expected doc version 1.0.0, got %q", doc.DocVersion) + } + if doc.Description != "Handles orders" { + t.Errorf("expected description, got %q", doc.Description) + } + + // Messages + if len(doc.Messages) != 1 { + t.Fatalf("expected 1 message, got %d", len(doc.Messages)) + } + msg := doc.Messages[0] + if msg.Name != "OrderCreated" { + t.Errorf("expected OrderCreated, got %q", msg.Name) + } + if msg.Title != "Order Created" { + t.Errorf("expected title, got %q", msg.Title) + } + if len(msg.Properties) != 2 { + t.Fatalf("expected 2 properties, got %d", len(msg.Properties)) + } + // Properties should be sorted alphabetically + if msg.Properties[0].Name != "amount" { + t.Errorf("expected first property 'amount' (sorted), got %q", msg.Properties[0].Name) + } + if msg.Properties[1].Name != "orderId" { + t.Errorf("expected second property 'orderId' (sorted), got %q", msg.Properties[1].Name) + } + + // Channels + if len(doc.Channels) != 1 { + t.Fatalf("expected 1 channel, got %d", len(doc.Channels)) + } + ch := doc.Channels[0] + if ch.Name != "order/created" { + t.Errorf("expected channel name, got %q", ch.Name) + } + if ch.OperationType != "subscribe" { + t.Errorf("expected subscribe, got %q", ch.OperationType) + } + if ch.MessageRef != "OrderCreated" { + t.Errorf("expected message ref OrderCreated, got %q", ch.MessageRef) + } +} + +func TestParseAsyncAPI_Empty(t *testing.T) { + _, err := ParseAsyncAPI("") + if err == nil { + t.Error("expected error for empty input") + } +} + +func TestParseAsyncAPI_InvalidYAML(t *testing.T) { + _, err := ParseAsyncAPI("not: valid: yaml: [") + if err == nil { + t.Error("expected error for invalid YAML") + } +} + +func TestParseAsyncAPI_MultipleChannels_Sorted(t *testing.T) { + yaml := `asyncapi: "2.0.0" +info: + title: Test + version: "1.0" +channels: + z/channel: + publish: + operationId: pub + message: + $ref: "#/components/messages/Msg" + a/channel: + subscribe: + operationId: sub + message: + $ref: "#/components/messages/Msg" +components: + messages: + Msg: + title: Test Message +` + + doc, err := ParseAsyncAPI(yaml) + if err != nil { + t.Fatal(err) + } + if len(doc.Channels) != 2 { + t.Fatalf("expected 2 channels, got %d", len(doc.Channels)) + } + // Channels should be sorted by name + if doc.Channels[0].Name != "a/channel" { + t.Errorf("expected first channel a/channel (sorted), got %q", doc.Channels[0].Name) + } + if doc.Channels[1].Name != "z/channel" { + t.Errorf("expected second channel z/channel (sorted), got %q", doc.Channels[1].Name) + } +} + +func TestFindMessage(t *testing.T) { + doc := &AsyncAPIDocument{ + Messages: []*AsyncAPIMessage{ + {Name: "OrderCreated"}, + {Name: "OrderUpdated"}, + }, + } + + if got := doc.FindMessage("OrderCreated"); got == nil || got.Name != "OrderCreated" { + t.Error("expected to find OrderCreated") + } + // Case-insensitive + if got := doc.FindMessage("ordercreated"); got == nil { + t.Error("expected case-insensitive match") + } + if got := doc.FindMessage("Missing"); got != nil { + t.Error("expected nil for missing message") + } +} + +func TestParseAsyncAPI_InlinePayload(t *testing.T) { + yaml := `asyncapi: "2.0.0" +info: + title: Test + version: "1.0" +channels: {} +components: + messages: + Inline: + title: Inline Message + payload: + type: object + properties: + field1: + type: string + field2: + type: integer + format: int32 +` + + doc, err := ParseAsyncAPI(yaml) + if err != nil { + t.Fatal(err) + } + msg := doc.Messages[0] + if len(msg.Properties) != 2 { + t.Fatalf("expected 2 properties, got %d", len(msg.Properties)) + } + // Sorted + if msg.Properties[0].Name != "field1" { + t.Errorf("expected field1 first, got %q", msg.Properties[0].Name) + } + if msg.Properties[1].Format != "int32" { + t.Errorf("expected format int32, got %q", msg.Properties[1].Format) + } +} diff --git a/mdl/types/edmx_test.go b/mdl/types/edmx_test.go new file mode 100644 index 00000000..b8d09986 --- /dev/null +++ b/mdl/types/edmx_test.go @@ -0,0 +1,268 @@ +// SPDX-License-Identifier: Apache-2.0 + +package types + +import ( + "testing" +) + +func TestParseEdmx_OData4(t *testing.T) { + xml := ` + + + + + + + + + + + + + + + + + + + + + + + + + + +` + + doc, err := ParseEdmx(xml) + if err != nil { + t.Fatal(err) + } + + if doc.Version != "4.0" { + t.Errorf("expected version 4.0, got %q", doc.Version) + } + if len(doc.Schemas) != 1 { + t.Fatalf("expected 1 schema, got %d", len(doc.Schemas)) + } + if doc.Schemas[0].Namespace != "DefaultNamespace" { + t.Errorf("expected namespace DefaultNamespace, got %q", doc.Schemas[0].Namespace) + } + if len(doc.Schemas[0].EntityTypes) != 2 { + t.Fatalf("expected 2 entity types, got %d", len(doc.Schemas[0].EntityTypes)) + } + + // Check Customer entity + customer := doc.Schemas[0].EntityTypes[0] + if customer.Name != "Customer" { + t.Errorf("expected Customer, got %q", customer.Name) + } + if len(customer.KeyProperties) != 1 || customer.KeyProperties[0] != "ID" { + t.Errorf("expected key [ID], got %v", customer.KeyProperties) + } + if len(customer.Properties) != 2 { + t.Errorf("expected 2 properties, got %d", len(customer.Properties)) + } + + // Check ID property nullable + idProp := customer.Properties[0] + if idProp.Nullable == nil || *idProp.Nullable { + t.Error("expected ID property to be non-nullable") + } + + // Check Name property MaxLength + nameProp := customer.Properties[1] + if nameProp.MaxLength != "200" { + t.Errorf("expected MaxLength 200, got %q", nameProp.MaxLength) + } + + // Check navigation property + if len(customer.NavigationProperties) != 1 { + t.Fatalf("expected 1 nav prop, got %d", len(customer.NavigationProperties)) + } + nav := customer.NavigationProperties[0] + if nav.Name != "Orders" { + t.Errorf("expected Orders, got %q", nav.Name) + } + if !nav.IsMany { + t.Error("expected Orders to be Collection") + } + if nav.TargetType != "Order" { + t.Errorf("expected target type Order, got %q", nav.TargetType) + } + + // Check entity sets + if len(doc.EntitySets) != 2 { + t.Fatalf("expected 2 entity sets, got %d", len(doc.EntitySets)) + } + + // Check action + if len(doc.Actions) != 1 { + t.Fatalf("expected 1 action, got %d", len(doc.Actions)) + } + action := doc.Actions[0] + if action.Name != "PlaceOrder" { + t.Errorf("expected PlaceOrder, got %q", action.Name) + } + if !action.IsBound { + t.Error("expected bound action") + } + if len(action.Parameters) != 2 { + t.Errorf("expected 2 params, got %d", len(action.Parameters)) + } + if action.ReturnType != "DefaultNamespace.Order" { + t.Errorf("expected return type, got %q", action.ReturnType) + } +} + +func TestParseEdmx_Empty(t *testing.T) { + _, err := ParseEdmx("") + if err == nil { + t.Error("expected error for empty input") + } +} + +func TestParseEdmx_InvalidXML(t *testing.T) { + _, err := ParseEdmx(" + + + + + + + + + + +` + + doc, err := ParseEdmx(xml) + if err != nil { + t.Fatal(err) + } + if len(doc.Schemas[0].EnumTypes) != 1 { + t.Fatalf("expected 1 enum type, got %d", len(doc.Schemas[0].EnumTypes)) + } + enum := doc.Schemas[0].EnumTypes[0] + if enum.Name != "Color" { + t.Errorf("expected Color, got %q", enum.Name) + } + if len(enum.Members) != 3 { + t.Errorf("expected 3 members, got %d", len(enum.Members)) + } +} + +func TestParseEdmx_CapabilityAnnotations(t *testing.T) { + xml := ` + + + + + + + + + + + + + + + +` + + doc, err := ParseEdmx(xml) + if err != nil { + t.Fatal(err) + } + es := doc.EntitySets[0] + if es.Insertable == nil || *es.Insertable { + t.Error("expected Insertable=false") + } + if es.Deletable == nil || *es.Deletable { + t.Error("expected Deletable=false") + } + if es.Updatable != nil { + t.Error("expected Updatable=nil (unspecified)") + } +} + +func TestFindEntityType(t *testing.T) { + doc := &EdmxDocument{ + Schemas: []*EdmSchema{{ + Namespace: "NS", + EntityTypes: []*EdmEntityType{{Name: "Customer"}, {Name: "Order"}}, + }}, + } + + if got := doc.FindEntityType("Customer"); got == nil || got.Name != "Customer" { + t.Error("expected to find Customer") + } + if got := doc.FindEntityType("NS.Customer"); got == nil || got.Name != "Customer" { + t.Error("expected to find Customer with namespace prefix") + } + if got := doc.FindEntityType("Missing"); got != nil { + t.Error("expected nil for missing type") + } +} + +func TestResolveNavType(t *testing.T) { + tests := []struct { + input string + typeName string + isMany bool + }{ + {"Collection(NS.Order)", "Order", true}, + {"NS.Customer", "Customer", false}, + {"SimpleType", "SimpleType", false}, + {"Collection(SimpleType)", "SimpleType", true}, + } + for _, tt := range tests { + name, many := ResolveNavType(tt.input) + if name != tt.typeName || many != tt.isMany { + t.Errorf("ResolveNavType(%q) = (%q, %v), want (%q, %v)", + tt.input, name, many, tt.typeName, tt.isMany) + } + } +} + +func TestParseEdmx_AbstractAndOpenType(t *testing.T) { + xml := ` + + + + + + + + + + + +` + + doc, err := ParseEdmx(xml) + if err != nil { + t.Fatal(err) + } + base := doc.Schemas[0].EntityTypes[0] + if !base.IsAbstract { + t.Error("expected IsAbstract=true") + } + if !base.IsOpen { + t.Error("expected IsOpen=true") + } + derived := doc.Schemas[0].EntityTypes[1] + if derived.BaseType != "NS.Base" { + t.Errorf("expected BaseType NS.Base, got %q", derived.BaseType) + } +} diff --git a/mdl/types/id_test.go b/mdl/types/id_test.go new file mode 100644 index 00000000..81e210a2 --- /dev/null +++ b/mdl/types/id_test.go @@ -0,0 +1,176 @@ +// SPDX-License-Identifier: Apache-2.0 + +package types + +import ( + "strings" + "testing" +) + +func TestGenerateID_Format(t *testing.T) { + id := GenerateID() + if !ValidateID(id) { + t.Fatalf("GenerateID() returned invalid UUID: %q", id) + } +} + +func TestGenerateID_Uniqueness(t *testing.T) { + seen := make(map[string]bool) + for i := 0; i < 1000; i++ { + id := GenerateID() + if seen[id] { + t.Fatalf("GenerateID() produced duplicate: %q", id) + } + seen[id] = true + } +} + +func TestGenerateID_V4Bits(t *testing.T) { + id := GenerateID() + // Version nibble at position 14 (0-indexed in hex chars) should be '4' + clean := strings.ReplaceAll(id, "-", "") + if clean[12] != '4' { + t.Errorf("expected version nibble '4', got %q in %q", string(clean[12]), id) + } + // Variant nibble at position 16 should be 8, 9, a, or b + v := clean[16] + if v != '8' && v != '9' && v != 'a' && v != 'b' { + t.Errorf("expected variant nibble in [89ab], got %q in %q", string(v), id) + } +} + +func TestGenerateDeterministicID_Stable(t *testing.T) { + id1 := GenerateDeterministicID("test-seed") + id2 := GenerateDeterministicID("test-seed") + if id1 != id2 { + t.Fatalf("expected same ID for same seed, got %q and %q", id1, id2) + } +} + +func TestGenerateDeterministicID_DifferentSeeds(t *testing.T) { + id1 := GenerateDeterministicID("seed-a") + id2 := GenerateDeterministicID("seed-b") + if id1 == id2 { + t.Fatalf("expected different IDs for different seeds") + } +} + +func TestGenerateDeterministicID_Format(t *testing.T) { + id := GenerateDeterministicID("test") + // Should be 36 chars: 8-4-4-4-12 + if len(id) != 36 { + t.Fatalf("expected 36 chars, got %d: %q", len(id), id) + } + parts := strings.Split(id, "-") + if len(parts) != 5 { + t.Fatalf("expected 5 dash-separated parts, got %d", len(parts)) + } + expectedLens := []int{8, 4, 4, 4, 12} + for i, p := range parts { + if len(p) != expectedLens[i] { + t.Errorf("part %d: expected %d chars, got %d", i, expectedLens[i], len(p)) + } + } +} + +func TestBlobToUUID_RoundTrip(t *testing.T) { + uuid := "a1b2c3d4-e5f6-7890-abcd-ef1234567890" + blob := UUIDToBlob(uuid) + if blob == nil { + t.Fatal("UUIDToBlob returned nil") + } + got := BlobToUUID(blob) + if got != uuid { + t.Errorf("roundtrip failed: %q -> blob -> %q", uuid, got) + } +} + +func TestBlobToUUID_Non16Bytes(t *testing.T) { + // Non-16-byte input should return hex-encoded string + data := []byte{0x01, 0x02, 0x03} + got := BlobToUUID(data) + if got != "010203" { + t.Errorf("expected hex fallback '010203', got %q", got) + } +} + +func TestBlobToUUID_Empty(t *testing.T) { + got := BlobToUUID(nil) + if got != "" { + t.Errorf("expected empty string for nil, got %q", got) + } +} + +func TestUUIDToBlob_Empty(t *testing.T) { + if got := UUIDToBlob(""); got != nil { + t.Errorf("expected nil for empty string, got %v", got) + } +} + +func TestUUIDToBlob_Invalid(t *testing.T) { + if got := UUIDToBlob("not-a-uuid"); got != nil { + t.Errorf("expected nil for invalid UUID, got %v", got) + } +} + +func TestUUIDToBlob_GUIDByteSwap(t *testing.T) { + // The first 4 bytes should be reversed, next 2 reversed, next 2 reversed, rest same + blob := UUIDToBlob("01020304-0506-0708-090a-0b0c0d0e0f10") + if blob == nil { + t.Fatal("UUIDToBlob returned nil") + } + // First group: 01020304 -> blob[0..3] = 04,03,02,01 + expected := []byte{0x04, 0x03, 0x02, 0x01, 0x06, 0x05, 0x08, 0x07, + 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10} + for i, b := range blob { + if b != expected[i] { + t.Errorf("byte %d: expected 0x%02x, got 0x%02x", i, expected[i], b) + } + } +} + +func TestValidateID(t *testing.T) { + tests := []struct { + input string + valid bool + }{ + {"a1b2c3d4-e5f6-7890-abcd-ef1234567890", true}, + {"00000000-0000-0000-0000-000000000000", true}, + {"AABBCCDD-EEFF-1122-3344-556677889900", true}, + {"", false}, + {"too-short", false}, + {"a1b2c3d4-e5f6-7890-abcd-ef123456789", false}, // 35 chars + {"a1b2c3d4-e5f6-7890-abcd-ef12345678901", false}, // 37 chars + {"a1b2c3d4xe5f6-7890-abcd-ef1234567890", false}, // wrong separator + {"g1b2c3d4-e5f6-7890-abcd-ef1234567890", false}, // invalid hex + } + for _, tt := range tests { + got := ValidateID(tt.input) + if got != tt.valid { + t.Errorf("ValidateID(%q) = %v, want %v", tt.input, got, tt.valid) + } + } +} + +func TestHash_Deterministic(t *testing.T) { + h1 := Hash([]byte("hello")) + h2 := Hash([]byte("hello")) + if h1 != h2 { + t.Fatalf("Hash not deterministic: %q vs %q", h1, h2) + } +} + +func TestHash_DifferentInputs(t *testing.T) { + h1 := Hash([]byte("hello")) + h2 := Hash([]byte("world")) + if h1 == h2 { + t.Fatal("Hash collision on different inputs") + } +} + +func TestHash_EmptyInput(t *testing.T) { + h := Hash([]byte{}) + if h != "0000000000000000" { + t.Errorf("expected zero hash for empty input, got %q", h) + } +} diff --git a/mdl/types/json_utils.go b/mdl/types/json_utils.go index 7a9d8da8..2431952f 100644 --- a/mdl/types/json_utils.go +++ b/mdl/types/json_utils.go @@ -34,9 +34,13 @@ func normalizeDateTimeValue(s string) string { // Find the decimal point after seconds dotIdx := strings.Index(s, ".") if dotIdx == -1 { - // No fractional part — insert .0000000 before timezone suffix - if idx := strings.IndexAny(s, "Z+-"); idx > 0 { - return s[:idx] + ".0000000" + s[idx:] + // No fractional part — insert .0000000 before timezone suffix. + // Search from index 19+ to avoid matching the '-' in the date portion (YYYY-MM-DD). + if len(s) > 19 { + if idx := strings.IndexAny(s[19:], "Z+-"); idx >= 0 { + pos := 19 + idx + return s[:pos] + ".0000000" + s[pos:] + } } return s } diff --git a/mdl/types/json_utils_test.go b/mdl/types/json_utils_test.go new file mode 100644 index 00000000..902a07ac --- /dev/null +++ b/mdl/types/json_utils_test.go @@ -0,0 +1,251 @@ +// SPDX-License-Identifier: Apache-2.0 + +package types + +import ( + "strings" + "testing" +) + +func TestPrettyPrintJSON_ValidObject(t *testing.T) { + got := PrettyPrintJSON(`{"a":1,"b":"hello"}`) + if !strings.Contains(got, " ") { + t.Errorf("expected indented output, got %q", got) + } + if !strings.Contains(got, `"a": 1`) { + t.Errorf("expected formatted key, got %q", got) + } +} + +func TestPrettyPrintJSON_InvalidJSON(t *testing.T) { + input := "not json" + got := PrettyPrintJSON(input) + if got != input { + t.Errorf("expected original string for invalid JSON, got %q", got) + } +} + +func TestPrettyPrintJSON_EmptyObject(t *testing.T) { + got := PrettyPrintJSON("{}") + if got != "{}" { + t.Errorf("expected '{}', got %q", got) + } +} + +func TestNormalizeDateTimeValue_WithFractional(t *testing.T) { + tests := []struct { + input, expected string + }{ + {"2015-05-22T14:56:29.000Z", "2015-05-22T14:56:29.0000000Z"}, + {"2015-05-22T14:56:29.1234567Z", "2015-05-22T14:56:29.1234567Z"}, + {"2015-05-22T14:56:29.12345678Z", "2015-05-22T14:56:29.1234567Z"}, + {"2015-05-22T14:56:29.1Z", "2015-05-22T14:56:29.1000000Z"}, + } + for _, tt := range tests { + got := normalizeDateTimeValue(tt.input) + if got != tt.expected { + t.Errorf("normalizeDateTimeValue(%q) = %q, want %q", tt.input, got, tt.expected) + } + } +} + +func TestNormalizeDateTimeValue_NoFractional(t *testing.T) { + got := normalizeDateTimeValue("2015-05-22T14:56:29Z") + if got != "2015-05-22T14:56:29.0000000Z" { + t.Errorf("expected .0000000 inserted, got %q", got) + } +} + +func TestNormalizeDateTimeValue_WithTimezone(t *testing.T) { + got := normalizeDateTimeValue("2015-05-22T14:56:29.123+02:00") + if got != "2015-05-22T14:56:29.1230000+02:00" { + t.Errorf("got %q", got) + } +} + +func TestNormalizeDateTimeValue_NoTimezone(t *testing.T) { + got := normalizeDateTimeValue("2015-05-22T14:56:29.123") + if got != "2015-05-22T14:56:29.1230000" { + t.Errorf("got %q", got) + } +} + +func TestBuildJsonElementsFromSnippet_SimpleObject(t *testing.T) { + snippet := `{"name": "John", "age": 30, "active": true}` + elems, err := BuildJsonElementsFromSnippet(snippet, nil) + if err != nil { + t.Fatal(err) + } + if len(elems) != 1 { + t.Fatalf("expected 1 root element, got %d", len(elems)) + } + root := elems[0] + if root.ElementType != "Object" { + t.Errorf("expected Object root, got %q", root.ElementType) + } + if root.ExposedName != "Root" { + t.Errorf("expected Root name, got %q", root.ExposedName) + } + if len(root.Children) != 3 { + t.Fatalf("expected 3 children, got %d", len(root.Children)) + } + + // Check child types + nameChild := root.Children[0] + if nameChild.PrimitiveType != "String" { + t.Errorf("expected String for 'name', got %q", nameChild.PrimitiveType) + } + ageChild := root.Children[1] + if ageChild.PrimitiveType != "Integer" { + t.Errorf("expected Integer for 'age', got %q", ageChild.PrimitiveType) + } + activeChild := root.Children[2] + if activeChild.PrimitiveType != "Boolean" { + t.Errorf("expected Boolean for 'active', got %q", activeChild.PrimitiveType) + } +} + +func TestBuildJsonElementsFromSnippet_RootArray(t *testing.T) { + snippet := `[{"id": 1}]` + elems, err := BuildJsonElementsFromSnippet(snippet, nil) + if err != nil { + t.Fatal(err) + } + root := elems[0] + if root.ElementType != "Array" { + t.Errorf("expected Array root, got %q", root.ElementType) + } +} + +func TestBuildJsonElementsFromSnippet_InvalidJSON(t *testing.T) { + _, err := BuildJsonElementsFromSnippet("not json", nil) + if err == nil { + t.Error("expected error for invalid JSON") + } +} + +func TestBuildJsonElementsFromSnippet_PrimitiveRoot(t *testing.T) { + _, err := BuildJsonElementsFromSnippet(`"hello"`, nil) + if err == nil { + t.Error("expected error for primitive root") + } +} + +func TestBuildJsonElementsFromSnippet_DateTimeDetection(t *testing.T) { + snippet := `{"created": "2015-05-22T14:56:29.000Z"}` + elems, err := BuildJsonElementsFromSnippet(snippet, nil) + if err != nil { + t.Fatal(err) + } + child := elems[0].Children[0] + if child.PrimitiveType != "DateTime" { + t.Errorf("expected DateTime, got %q", child.PrimitiveType) + } + // OriginalValue should have normalized fractional seconds + if !strings.Contains(child.OriginalValue, ".0000000") { + t.Errorf("expected normalized datetime in OriginalValue, got %q", child.OriginalValue) + } +} + +func TestBuildJsonElementsFromSnippet_DecimalDetection(t *testing.T) { + snippet := `{"price": 19.99}` + elems, err := BuildJsonElementsFromSnippet(snippet, nil) + if err != nil { + t.Fatal(err) + } + child := elems[0].Children[0] + if child.PrimitiveType != "Decimal" { + t.Errorf("expected Decimal, got %q", child.PrimitiveType) + } +} + +func TestBuildJsonElementsFromSnippet_NullValue(t *testing.T) { + snippet := `{"value": null}` + elems, err := BuildJsonElementsFromSnippet(snippet, nil) + if err != nil { + t.Fatal(err) + } + child := elems[0].Children[0] + if child.PrimitiveType != "Unknown" { + t.Errorf("expected Unknown for null, got %q", child.PrimitiveType) + } +} + +func TestBuildJsonElementsFromSnippet_NestedObject(t *testing.T) { + snippet := `{"address": {"city": "Amsterdam"}}` + elems, err := BuildJsonElementsFromSnippet(snippet, nil) + if err != nil { + t.Fatal(err) + } + addr := elems[0].Children[0] + if addr.ElementType != "Object" { + t.Errorf("expected Object for address, got %q", addr.ElementType) + } + if len(addr.Children) != 1 { + t.Fatalf("expected 1 child, got %d", len(addr.Children)) + } +} + +func TestBuildJsonElementsFromSnippet_PrimitiveArray(t *testing.T) { + snippet := `{"tags": ["a", "b"]}` + elems, err := BuildJsonElementsFromSnippet(snippet, nil) + if err != nil { + t.Fatal(err) + } + tags := elems[0].Children[0] + if tags.ElementType != "Array" { + t.Errorf("expected Array for tags, got %q", tags.ElementType) + } + // Should have a Wrapper child + if len(tags.Children) != 1 { + t.Fatalf("expected 1 wrapper child, got %d", len(tags.Children)) + } + wrapper := tags.Children[0] + if wrapper.ElementType != "Wrapper" { + t.Errorf("expected Wrapper, got %q", wrapper.ElementType) + } +} + +func TestBuildJsonElementsFromSnippet_CustomNameMap(t *testing.T) { + snippet := `{"myField": "value"}` + custom := map[string]string{"myField": "CustomName"} + elems, err := BuildJsonElementsFromSnippet(snippet, custom) + if err != nil { + t.Fatal(err) + } + child := elems[0].Children[0] + if child.ExposedName != "CustomName" { + t.Errorf("expected CustomName, got %q", child.ExposedName) + } +} + +func TestBuildJsonElementsFromSnippet_ReservedNames(t *testing.T) { + // "id" capitalizes to "Id" which is reserved — should get underscore prefix + snippet := `{"id": "123"}` + elems, err := BuildJsonElementsFromSnippet(snippet, nil) + if err != nil { + t.Fatal(err) + } + child := elems[0].Children[0] + if child.ExposedName != "_id" { + t.Errorf("expected _id for reserved name, got %q", child.ExposedName) + } +} + +func TestSingularize(t *testing.T) { + tests := []struct { + input, expected string + }{ + {"Tags", "Tag"}, + {"Items", "Item"}, + {"s", "s"}, // single char + {"", ""}, + {"Bus", "Bu"}, + } + for _, tt := range tests { + got := singularize(tt.input) + if got != tt.expected { + t.Errorf("singularize(%q) = %q, want %q", tt.input, got, tt.expected) + } + } +} From a807b77f047530c4b7f5f4fc6c77d3fdbe9edebc Mon Sep 17 00:00:00 2001 From: Andrew Vasilyev Date: Sun, 19 Apr 2026 20:27:34 +0200 Subject: [PATCH 09/16] fix copilot review comments on json_utils.go MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - normalizeDateTimeValue: pad fractional seconds even without timezone suffix - float64→int64: add safe integer range guard (±2^53) - fix reservedExposedNames comment (remove 'Name' not in map) - clarify singularize is intentionally naive (matches Studio Pro) --- mdl/types/json_utils.go | 14 ++++++++------ mdl/types/json_utils_test.go | 7 +++++++ 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/mdl/types/json_utils.go b/mdl/types/json_utils.go index 2431952f..acb40a62 100644 --- a/mdl/types/json_utils.go +++ b/mdl/types/json_utils.go @@ -36,13 +36,14 @@ func normalizeDateTimeValue(s string) string { if dotIdx == -1 { // No fractional part — insert .0000000 before timezone suffix. // Search from index 19+ to avoid matching the '-' in the date portion (YYYY-MM-DD). - if len(s) > 19 { + if len(s) >= 19 { if idx := strings.IndexAny(s[19:], "Z+-"); idx >= 0 { pos := 19 + idx return s[:pos] + ".0000000" + s[pos:] } } - return s + // No timezone suffix — append fractional part at end + return s + ".0000000" } // Find where fractional digits end (at Z, +, - or end of string) fracEnd := len(s) @@ -113,7 +114,7 @@ var reservedExposedNames = map[string]bool{ } // resolveExposedName returns the custom name if mapped, otherwise capitalizes the JSON key. -// Reserved names (Id, Type, Name) are prefixed with underscore to match Studio Pro behavior. +// Reserved names (Id, Type) are prefixed with underscore to match Studio Pro behavior. func (b *snippetBuilder) resolveExposedName(jsonKey string) string { if b.customNameMap != nil { if custom, ok := b.customNameMap[jsonKey]; ok { @@ -230,7 +231,7 @@ func (b *snippetBuilder) buildElementFromRawValue(exposedName, path, jsonKey str return buildValueElement(exposedName, path, primitiveType, fmt.Sprintf("%q", v)) case float64: // Check the raw JSON text for a decimal point — Go's %v drops ".0" from 41850.0 - if v == math.Trunc(v) && !strings.Contains(trimmed, ".") { + if v == math.Trunc(v) && !strings.Contains(trimmed, ".") && v >= -(1<<53) && v <= (1<<53) { return buildValueElement(exposedName, path, "Integer", fmt.Sprintf("%v", int64(v))) } return buildValueElement(exposedName, path, "Decimal", fmt.Sprintf("%v", v)) @@ -348,8 +349,9 @@ func (b *snippetBuilder) buildElementFromRawArray(exposedName, path, jsonKey, ra return arrayElem } -// singularize returns a simple singular form by stripping trailing "s". -// Handles common cases: Tags→Tag, Items→Item, Addresses→Addresse. +// singularize returns a naive singular form by stripping trailing "s". +// Handles common cases: Tags→Tag, Items→Item. Known-incorrect for some words +// (e.g. Addresses→Addresse) — this matches Studio Pro's behavior. func singularize(s string) string { if len(s) > 1 && strings.HasSuffix(s, "s") { return s[:len(s)-1] diff --git a/mdl/types/json_utils_test.go b/mdl/types/json_utils_test.go index 902a07ac..466fe8f2 100644 --- a/mdl/types/json_utils_test.go +++ b/mdl/types/json_utils_test.go @@ -70,6 +70,13 @@ func TestNormalizeDateTimeValue_NoTimezone(t *testing.T) { } } +func TestNormalizeDateTimeValue_NoFractionalNoTimezone(t *testing.T) { + got := normalizeDateTimeValue("2015-05-22T14:56:29") + if got != "2015-05-22T14:56:29.0000000" { + t.Errorf("expected .0000000 appended, got %q", got) + } +} + func TestBuildJsonElementsFromSnippet_SimpleObject(t *testing.T) { snippet := `{"name": "John", "age": 30, "active": true}` elems, err := BuildJsonElementsFromSnippet(snippet, nil) From c0decaecd82b26c48f91577d0d676b4c4db5972e Mon Sep 17 00:00:00 2001 From: Andrew Vasilyev Date: Sun, 19 Apr 2026 16:15:39 +0200 Subject: [PATCH 10/16] refactor: define mutation backend interfaces with stub implementations Add PageMutator, WorkflowMutator, WidgetSerializationBackend interfaces to mdl/backend/mutation.go for BSON-free handler decoupling. Extract BSON ID helpers (IDToBsonBinary, BsonBinaryToID, NewIDBsonBinary) to mdl/bsonutil/ package. Add panic stubs to MprBackend and mock function fields to MockBackend for all new interface methods. - Create mdl/bsonutil/bsonutil.go with BSON ID conversion utilities - Migrate 10 handler files from mpr.IDToBsonBinary to bsonutil.* - Define PageMutationBackend, WorkflowMutationBackend interfaces - Define WidgetSerializationBackend with opaque return types - Add PluggablePropertyContext for domain-typed widget property input --- mdl/backend/backend.go | 3 + mdl/backend/mock/backend.go | 12 ++ mdl/backend/mock/mock_mutation.go | 64 +++++++ mdl/backend/mpr/backend.go | 33 ++++ mdl/backend/mutation.go | 181 ++++++++++++++++++ mdl/bsonutil/bsonutil.go | 33 ++++ mdl/executor/cmd_alter_page.go | 28 +-- mdl/executor/cmd_alter_workflow.go | 37 ++-- mdl/executor/cmd_pages_builder_input.go | 12 +- .../cmd_pages_builder_input_cloning.go | 25 +-- .../cmd_pages_builder_input_cloning_test.go | 14 +- .../cmd_pages_builder_input_datagrid.go | 135 ++++++------- .../cmd_pages_builder_input_filters.go | 23 +-- .../cmd_pages_builder_v3_pluggable.go | 18 +- mdl/executor/widget_engine_test.go | 4 +- mdl/executor/widget_operations.go | 4 +- 16 files changed, 481 insertions(+), 145 deletions(-) create mode 100644 mdl/backend/mock/mock_mutation.go create mode 100644 mdl/backend/mutation.go create mode 100644 mdl/bsonutil/bsonutil.go diff --git a/mdl/backend/backend.go b/mdl/backend/backend.go index 7776ea4b..18fa681e 100644 --- a/mdl/backend/backend.go +++ b/mdl/backend/backend.go @@ -31,4 +31,7 @@ type FullBackend interface { MetadataBackend WidgetBackend AgentEditorBackend + PageMutationBackend + WorkflowMutationBackend + WidgetSerializationBackend } diff --git a/mdl/backend/mock/backend.go b/mdl/backend/mock/backend.go index e2f78a0c..1a4ff937 100644 --- a/mdl/backend/mock/backend.go +++ b/mdl/backend/mock/backend.go @@ -258,6 +258,18 @@ type MockBackend struct { FindCustomWidgetTypeFunc func(widgetID string) (*types.RawCustomWidgetType, error) FindAllCustomWidgetTypesFunc func(widgetID string) ([]*types.RawCustomWidgetType, error) + // PageMutationBackend + OpenPageForMutationFunc func(unitID model.ID) (backend.PageMutator, error) + + // WorkflowMutationBackend + OpenWorkflowForMutationFunc func(unitID model.ID) (backend.WorkflowMutator, error) + + // WidgetSerializationBackend + SerializeWidgetFunc func(w pages.Widget) (any, error) + SerializeClientActionFunc func(a pages.ClientAction) (any, error) + SerializeDataSourceFunc func(ds pages.DataSource) (any, error) + SerializeWorkflowActivityFunc func(a workflows.WorkflowActivity) (any, error) + // AgentEditorBackend ListAgentEditorModelsFunc func() ([]*agenteditor.Model, error) ListAgentEditorKnowledgeBasesFunc func() ([]*agenteditor.KnowledgeBase, error) diff --git a/mdl/backend/mock/mock_mutation.go b/mdl/backend/mock/mock_mutation.go new file mode 100644 index 00000000..89b91122 --- /dev/null +++ b/mdl/backend/mock/mock_mutation.go @@ -0,0 +1,64 @@ +// SPDX-License-Identifier: Apache-2.0 + +package mock + +import ( + "github.com/mendixlabs/mxcli/mdl/backend" + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/pages" + "github.com/mendixlabs/mxcli/sdk/workflows" +) + +// --------------------------------------------------------------------------- +// PageMutationBackend +// --------------------------------------------------------------------------- + +func (m *MockBackend) OpenPageForMutation(unitID model.ID) (backend.PageMutator, error) { + if m.OpenPageForMutationFunc != nil { + return m.OpenPageForMutationFunc(unitID) + } + return nil, nil +} + +// --------------------------------------------------------------------------- +// WorkflowMutationBackend +// --------------------------------------------------------------------------- + +func (m *MockBackend) OpenWorkflowForMutation(unitID model.ID) (backend.WorkflowMutator, error) { + if m.OpenWorkflowForMutationFunc != nil { + return m.OpenWorkflowForMutationFunc(unitID) + } + return nil, nil +} + +// --------------------------------------------------------------------------- +// WidgetSerializationBackend +// --------------------------------------------------------------------------- + +func (m *MockBackend) SerializeWidget(w pages.Widget) (any, error) { + if m.SerializeWidgetFunc != nil { + return m.SerializeWidgetFunc(w) + } + return nil, nil +} + +func (m *MockBackend) SerializeClientAction(a pages.ClientAction) (any, error) { + if m.SerializeClientActionFunc != nil { + return m.SerializeClientActionFunc(a) + } + return nil, nil +} + +func (m *MockBackend) SerializeDataSource(ds pages.DataSource) (any, error) { + if m.SerializeDataSourceFunc != nil { + return m.SerializeDataSourceFunc(ds) + } + return nil, nil +} + +func (m *MockBackend) SerializeWorkflowActivity(a workflows.WorkflowActivity) (any, error) { + if m.SerializeWorkflowActivityFunc != nil { + return m.SerializeWorkflowActivityFunc(a) + } + return nil, nil +} diff --git a/mdl/backend/mpr/backend.go b/mdl/backend/mpr/backend.go index 2dc66ecf..54bce183 100644 --- a/mdl/backend/mpr/backend.go +++ b/mdl/backend/mpr/backend.go @@ -724,3 +724,36 @@ func (b *MprBackend) CreateAgentEditorAgent(a *agenteditor.Agent) error { func (b *MprBackend) DeleteAgentEditorAgent(id string) error { return b.writer.DeleteAgentEditorAgent(id) } + +// --------------------------------------------------------------------------- +// PageMutationBackend + +func (b *MprBackend) OpenPageForMutation(unitID model.ID) (backend.PageMutator, error) { + panic("MprBackend.OpenPageForMutation not yet implemented") +} + +// --------------------------------------------------------------------------- +// WorkflowMutationBackend + +func (b *MprBackend) OpenWorkflowForMutation(unitID model.ID) (backend.WorkflowMutator, error) { + panic("MprBackend.OpenWorkflowForMutation not yet implemented") +} + +// --------------------------------------------------------------------------- +// WidgetSerializationBackend + +func (b *MprBackend) SerializeWidget(w pages.Widget) (any, error) { + panic("MprBackend.SerializeWidget not yet implemented") +} + +func (b *MprBackend) SerializeClientAction(a pages.ClientAction) (any, error) { + panic("MprBackend.SerializeClientAction not yet implemented") +} + +func (b *MprBackend) SerializeDataSource(ds pages.DataSource) (any, error) { + panic("MprBackend.SerializeDataSource not yet implemented") +} + +func (b *MprBackend) SerializeWorkflowActivity(a workflows.WorkflowActivity) (any, error) { + panic("MprBackend.SerializeWorkflowActivity not yet implemented") +} diff --git a/mdl/backend/mutation.go b/mdl/backend/mutation.go new file mode 100644 index 00000000..cb0846df --- /dev/null +++ b/mdl/backend/mutation.go @@ -0,0 +1,181 @@ +// SPDX-License-Identifier: Apache-2.0 + +package backend + +import ( + "github.com/mendixlabs/mxcli/model" + "github.com/mendixlabs/mxcli/sdk/pages" + "github.com/mendixlabs/mxcli/sdk/workflows" +) + +// PageMutator provides fine-grained mutation operations on a single +// page, layout, or snippet unit. Obtain one via PageMutationBackend.OpenPageForMutation. +// All methods operate on the in-memory representation; call Save to persist. +type PageMutator interface { + // ContainerType returns "page", "layout", or "snippet". + ContainerType() string + + // --- Widget property operations --- + + // SetWidgetProperty sets a simple property on the named widget. + // For pluggable widget properties, prop is the Mendix property key + // and value is the string representation. + SetWidgetProperty(widgetRef string, prop string, value any) error + + // SetWidgetDataSource sets the DataSource on the named widget. + SetWidgetDataSource(widgetRef string, ds pages.DataSource) error + + // SetColumnProperty sets a property on a column within a grid widget. + SetColumnProperty(gridRef string, columnRef string, prop string, value any) error + + // --- Widget tree operations --- + + // InsertWidget inserts serialized widgets at the given position + // relative to the target widget. Position is "before" or "after". + InsertWidget(targetWidget string, position string, widgets []pages.Widget) error + + // DropWidget removes widgets by name from the tree. + DropWidget(widgetRefs []string) error + + // ReplaceWidget replaces the target widget with the given widgets. + ReplaceWidget(targetWidget string, widgets []pages.Widget) error + + // --- Variable operations --- + + // AddVariable adds a local variable to the page/snippet. + AddVariable(name, dataType, defaultValue string) error + + // DropVariable removes a local variable by name. + DropVariable(name string) error + + // --- Layout operations --- + + // SetLayout changes the layout reference and remaps placeholder parameters. + SetLayout(newLayout string, paramMappings map[string]string) error + + // --- Pluggable widget operations --- + + // SetPluggableProperty sets a typed property on a pluggable widget's object. + // propKey is the Mendix property key, opName is the operation type + // ("attribute", "association", "primitive", "selection", "datasource", + // "widgets", "texttemplate", "action", "attributeObjects"). + // ctx carries the operation-specific values. + SetPluggableProperty(widgetRef string, propKey string, opName string, ctx PluggablePropertyContext) error + + // --- Introspection --- + + // EnclosingEntity returns the qualified entity name for the given widget's + // data context, or "" if none. + EnclosingEntity(widgetRef string) string + + // WidgetScope returns a map of widget name → unit ID for all widgets in the tree. + WidgetScope() map[string]model.ID + + // Save persists the mutations to the backend. + Save() error +} + +// PluggablePropertyContext carries operation-specific values for +// SetPluggableProperty. Only fields relevant to the operation are used. +type PluggablePropertyContext struct { + AttributePath string // "attribute", "association" + AttributePaths []string // "attributeObjects" + AssocPath string // "association" + EntityName string // "association" + PrimitiveVal string // "primitive" + DataSource pages.DataSource // "datasource" + ChildWidgets []pages.Widget // "widgets" + Action pages.ClientAction // "action" + TextTemplate string // "texttemplate" + Selection string // "selection" +} + +// WorkflowMutator provides fine-grained mutation operations on a single +// workflow unit. Obtain one via WorkflowMutationBackend.OpenWorkflowForMutation. +// All methods operate on the in-memory representation; call Save to persist. +type WorkflowMutator interface { + // --- Top-level property operations --- + + // SetProperty sets a workflow-level property (DisplayName, Description, + // ExportLevel, DueDate, Parameter, OverviewPage). + SetProperty(prop string, value string) error + + // SetPropertyWithEntity sets a workflow-level property that references + // an entity (e.g. Parameter). + SetPropertyWithEntity(prop string, value string, entity string) error + + // --- Activity operations --- + + // SetActivityProperty sets a property on an activity identified by + // caption and optional position index. + SetActivityProperty(activityRef string, atPos int, prop string, value string) error + + // InsertAfterActivity inserts new activities after the referenced activity. + InsertAfterActivity(activityRef string, atPos int, activities []workflows.WorkflowActivity) error + + // DropActivity removes the referenced activity. + DropActivity(activityRef string, atPos int) error + + // ReplaceActivity replaces the referenced activity with new ones. + ReplaceActivity(activityRef string, atPos int, activities []workflows.WorkflowActivity) error + + // --- Outcome operations --- + + // InsertOutcome adds a new outcome to the referenced activity. + InsertOutcome(activityRef string, atPos int, outcomeName string, activities []workflows.WorkflowActivity) error + + // DropOutcome removes an outcome by name from the referenced activity. + DropOutcome(activityRef string, atPos int, outcomeName string) error + + // --- Path operations (parallel split) --- + + InsertPath(activityRef string, atPos int, pathCaption string, activities []workflows.WorkflowActivity) error + DropPath(activityRef string, atPos int, pathCaption string) error + + // --- Branch operations (exclusive split) --- + + InsertBranch(activityRef string, atPos int, condition string, activities []workflows.WorkflowActivity) error + DropBranch(activityRef string, atPos int, branchName string) error + + // --- Boundary event operations --- + + InsertBoundaryEvent(activityRef string, atPos int, eventType string, delay string, activities []workflows.WorkflowActivity) error + DropBoundaryEvent(activityRef string, atPos int) error + + // Save persists the mutations to the backend. + Save() error +} + +// PageMutationBackend provides page/layout/snippet mutation capabilities. +type PageMutationBackend interface { + // OpenPageForMutation loads a page, layout, or snippet unit and returns + // a mutator for applying changes. Call Save() on the returned mutator + // to persist. + OpenPageForMutation(unitID model.ID) (PageMutator, error) +} + +// WorkflowMutationBackend provides workflow mutation capabilities. +type WorkflowMutationBackend interface { + // OpenWorkflowForMutation loads a workflow unit and returns a mutator + // for applying changes. Call Save() on the returned mutator to persist. + OpenWorkflowForMutation(unitID model.ID) (WorkflowMutator, error) +} + +// WidgetSerializationBackend provides widget and activity serialization +// for CREATE paths where the executor builds domain objects that need +// to be converted to the storage format. +type WidgetSerializationBackend interface { + // SerializeWidget converts a domain Widget to its storage representation. + // The returned value is opaque to the caller; it is only used as input + // to mutation operations or passed to the backend for persistence. + SerializeWidget(w pages.Widget) (any, error) + + // SerializeClientAction converts a domain ClientAction to storage format. + SerializeClientAction(a pages.ClientAction) (any, error) + + // SerializeDataSource converts a domain DataSource to storage format. + SerializeDataSource(ds pages.DataSource) (any, error) + + // SerializeWorkflowActivity converts a domain WorkflowActivity to storage format. + SerializeWorkflowActivity(a workflows.WorkflowActivity) (any, error) +} diff --git a/mdl/bsonutil/bsonutil.go b/mdl/bsonutil/bsonutil.go new file mode 100644 index 00000000..558497c3 --- /dev/null +++ b/mdl/bsonutil/bsonutil.go @@ -0,0 +1,33 @@ +// SPDX-License-Identifier: Apache-2.0 + +// Package bsonutil provides BSON-aware ID conversion utilities for model elements. +// It depends on mdl/types (WASM-safe) and the BSON driver (also WASM-safe), +// but does NOT depend on sdk/mpr (which pulls in SQLite/CGO). +package bsonutil + +import ( + "github.com/mendixlabs/mxcli/mdl/types" + "go.mongodb.org/mongo-driver/bson/primitive" +) + +// IDToBsonBinary converts a hex UUID string to a BSON binary value. +func IDToBsonBinary(id string) primitive.Binary { + blob := types.UUIDToBlob(id) + if blob == nil || len(blob) != 16 { + blob = types.UUIDToBlob(types.GenerateID()) + } + return primitive.Binary{ + Subtype: 0x00, + Data: blob, + } +} + +// BsonBinaryToID converts a BSON binary value to a hex UUID string. +func BsonBinaryToID(bin primitive.Binary) string { + return types.BlobToUUID(bin.Data) +} + +// NewIDBsonBinary generates a new unique ID and returns it as a BSON binary value. +func NewIDBsonBinary() primitive.Binary { + return IDToBsonBinary(types.GenerateID()) +} diff --git a/mdl/executor/cmd_alter_page.go b/mdl/executor/cmd_alter_page.go index 900be082..cfb5b04b 100644 --- a/mdl/executor/cmd_alter_page.go +++ b/mdl/executor/cmd_alter_page.go @@ -10,7 +10,9 @@ import ( "go.mongodb.org/mongo-driver/bson/primitive" "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/bsonutil" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/mpr" ) @@ -357,7 +359,7 @@ func dSetArray(doc bson.D, key string, elements []any) { // extractBinaryIDFromDoc extracts a binary ID string from a bson.D field. func extractBinaryIDFromDoc(val any) string { if bin, ok := val.(primitive.Binary); ok { - return mpr.BlobToUUID(bin.Data) + return types.BlobToUUID(bin.Data) } return "" } @@ -936,7 +938,7 @@ func setWidgetAttributeRef(widget bson.D, value interface{}) error { var attrRefValue interface{} if strings.Count(attrPath, ".") >= 2 { attrRefValue = bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "DomainModels$AttributeRef"}, {Key: "Attribute", Value: attrPath}, {Key: "EntityRef", Value: nil}, @@ -971,7 +973,7 @@ func setWidgetDataSource(widget bson.D, value interface{}) error { case "selection": // SELECTION widgetName → Forms$ListenTargetSource serialized = bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$ListenTargetSource"}, {Key: "ListenTarget", Value: ds.Reference}, } @@ -980,13 +982,13 @@ func setWidgetDataSource(widget bson.D, value interface{}) error { var entityRef interface{} if ds.Reference != "" { entityRef = bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "DomainModels$DirectEntityRef"}, {Key: "Entity", Value: ds.Reference}, } } serialized = bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$DataViewSource"}, {Key: "EntityRef", Value: entityRef}, {Key: "ForceFullObjects", Value: false}, @@ -994,10 +996,10 @@ func setWidgetDataSource(widget bson.D, value interface{}) error { } case "microflow": serialized = bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$MicroflowSource"}, {Key: "MicroflowSettings", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$MicroflowSettings"}, {Key: "Asynchronous", Value: false}, {Key: "ConfirmationInfo", Value: nil}, @@ -1010,10 +1012,10 @@ func setWidgetDataSource(widget bson.D, value interface{}) error { } case "nanoflow": serialized = bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$NanoflowSource"}, {Key: "NanoflowSettings", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$NanoflowSettings"}, {Key: "Nanoflow", Value: ds.Reference}, {Key: "ParameterMappings", Value: bson.A{int32(3)}}, @@ -1462,10 +1464,10 @@ func applyAddVariable(rawData *bson.D, op *ast.AddVariableOp) error { } // Build VariableType BSON - varTypeID := mpr.GenerateID() + varTypeID := types.GenerateID() bsonTypeName := mdlTypeToBsonType(op.Variable.DataType) varType := bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(varTypeID)}, + {Key: "$ID", Value: bsonutil.IDToBsonBinary(varTypeID)}, {Key: "$Type", Value: bsonTypeName}, } if bsonTypeName == "DataTypes$ObjectType" { @@ -1473,9 +1475,9 @@ func applyAddVariable(rawData *bson.D, op *ast.AddVariableOp) error { } // Build LocalVariable BSON document - varID := mpr.GenerateID() + varID := types.GenerateID() varDoc := bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(varID)}, + {Key: "$ID", Value: bsonutil.IDToBsonBinary(varID)}, {Key: "$Type", Value: "Forms$LocalVariable"}, {Key: "DefaultValue", Value: op.Variable.DefaultValue}, {Key: "Name", Value: op.Variable.Name}, diff --git a/mdl/executor/cmd_alter_workflow.go b/mdl/executor/cmd_alter_workflow.go index df403ca5..e7552e10 100644 --- a/mdl/executor/cmd_alter_workflow.go +++ b/mdl/executor/cmd_alter_workflow.go @@ -10,6 +10,7 @@ import ( "go.mongodb.org/mongo-driver/bson" "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/bsonutil" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/mpr" @@ -155,7 +156,7 @@ func applySetWorkflowProperty(doc *bson.D, op *ast.SetWorkflowPropertyOp) error if wfName == nil { // Auto-create the WorkflowName sub-document newName := bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Texts$Text"}, {Key: "Text", Value: op.Value}, } @@ -173,7 +174,7 @@ func applySetWorkflowProperty(doc *bson.D, op *ast.SetWorkflowPropertyOp) error if wfDesc == nil { // Auto-create the WorkflowDescription sub-document newDesc := bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Texts$Text"}, {Key: "Text", Value: op.Value}, } @@ -198,7 +199,7 @@ func applySetWorkflowProperty(doc *bson.D, op *ast.SetWorkflowPropertyOp) error dSet(*doc, "AdminPage", nil) } else { pageRef := bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Workflows$PageReference"}, {Key: "Page", Value: qn}, } @@ -225,7 +226,7 @@ func applySetWorkflowProperty(doc *bson.D, op *ast.SetWorkflowPropertyOp) error } else { // Create new Parameter newParam := bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Workflows$Parameter"}, {Key: "Entity", Value: qn}, {Key: "Name", Value: "WorkflowContext"}, @@ -264,7 +265,7 @@ func applySetActivityProperty(doc bson.D, op *ast.SetActivityPropertyOp) error { } else { // Create TaskPage pageRef := bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Workflows$PageReference"}, {Key: "Page", Value: qn}, } @@ -283,7 +284,7 @@ func applySetActivityProperty(doc bson.D, op *ast.SetActivityPropertyOp) error { case "TARGETING_MICROFLOW": qn := op.Microflow.Module + "." + op.Microflow.Name userTargeting := bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Workflows$MicroflowUserTargeting"}, {Key: "Microflow", Value: qn}, } @@ -292,7 +293,7 @@ func applySetActivityProperty(doc bson.D, op *ast.SetActivityPropertyOp) error { case "TARGETING_XPATH": userTargeting := bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Workflows$XPathUserTargeting"}, {Key: "XPathConstraint", Value: op.Value}, } @@ -500,7 +501,7 @@ func buildSubFlowBson(ctx *ExecContext, doc bson.D, activities []ast.WorkflowAct } } return bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Workflows$Flow"}, {Key: "Activities", Value: subActsBson}, } @@ -602,7 +603,7 @@ func applyInsertOutcome(ctx *ExecContext, doc bson.D, op *ast.InsertOutcomeOp) e // Build outcome BSON outcomeDoc := bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Workflows$UserTaskOutcome"}, } @@ -612,7 +613,7 @@ func applyInsertOutcome(ctx *ExecContext, doc bson.D, op *ast.InsertOutcomeOp) e } outcomeDoc = append(outcomeDoc, - bson.E{Key: "PersistentId", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + bson.E{Key: "PersistentId", Value: bsonutil.NewIDBsonBinary()}, bson.E{Key: "Value", Value: op.OutcomeName}, ) @@ -667,7 +668,7 @@ func applyInsertPath(ctx *ExecContext, doc bson.D, op *ast.InsertPathOp) error { } pathDoc := bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Workflows$ParallelSplitOutcome"}, } @@ -675,7 +676,7 @@ func applyInsertPath(ctx *ExecContext, doc bson.D, op *ast.InsertPathOp) error { pathDoc = append(pathDoc, bson.E{Key: "Flow", Value: buildSubFlowBson(ctx, doc, op.Activities)}) } - pathDoc = append(pathDoc, bson.E{Key: "PersistentId", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + pathDoc = append(pathDoc, bson.E{Key: "PersistentId", Value: bsonutil.NewIDBsonBinary()}) outcomes := dGetArrayElements(dGet(actDoc, "Outcomes")) outcomes = append(outcomes, pathDoc) @@ -730,24 +731,24 @@ func applyInsertBranch(ctx *ExecContext, doc bson.D, op *ast.InsertBranchOp) err switch strings.ToLower(op.Condition) { case "true": outcomeDoc = bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Workflows$BooleanConditionOutcome"}, {Key: "Value", Value: true}, } case "false": outcomeDoc = bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Workflows$BooleanConditionOutcome"}, {Key: "Value", Value: false}, } case "default": outcomeDoc = bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Workflows$VoidConditionOutcome"}, } default: outcomeDoc = bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Workflows$EnumerationValueConditionOutcome"}, {Key: "Value", Value: op.Condition}, } @@ -835,7 +836,7 @@ func applyInsertBoundaryEvent(ctx *ExecContext, doc bson.D, op *ast.InsertBounda } eventDoc := bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: typeName}, {Key: "Caption", Value: ""}, } @@ -848,7 +849,7 @@ func applyInsertBoundaryEvent(ctx *ExecContext, doc bson.D, op *ast.InsertBounda eventDoc = append(eventDoc, bson.E{Key: "Flow", Value: buildSubFlowBson(ctx, doc, op.Activities)}) } - eventDoc = append(eventDoc, bson.E{Key: "PersistentId", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + eventDoc = append(eventDoc, bson.E{Key: "PersistentId", Value: bsonutil.NewIDBsonBinary()}) if typeName == "Workflows$NonInterruptingTimerBoundaryEvent" { eventDoc = append(eventDoc, bson.E{Key: "Recurrence", Value: nil}) diff --git a/mdl/executor/cmd_pages_builder_input.go b/mdl/executor/cmd_pages_builder_input.go index 16bb6fcb..4a8cfed1 100644 --- a/mdl/executor/cmd_pages_builder_input.go +++ b/mdl/executor/cmd_pages_builder_input.go @@ -8,6 +8,8 @@ import ( "strings" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" + "github.com/mendixlabs/mxcli/mdl/bsonutil" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/model" "github.com/mendixlabs/mxcli/sdk/mpr" "github.com/mendixlabs/mxcli/sdk/pages" @@ -128,10 +130,10 @@ func matchesTypePointer(prop bson.D, propertyTypeID string) bool { // Handle both primitive.Binary (from MPR) and []byte (from JSON templates) switch v := elem.Value.(type) { case primitive.Binary: - propID := strings.ReplaceAll(mpr.BlobToUUID(v.Data), "-", "") + propID := strings.ReplaceAll(types.BlobToUUID(v.Data), "-", "") return propID == normalizedTarget case []byte: - propID := strings.ReplaceAll(mpr.BlobToUUID(v), "-", "") + propID := strings.ReplaceAll(types.BlobToUUID(v), "-", "") if propID == normalizedTarget { return true } @@ -196,12 +198,12 @@ func setAssociationRef(val bson.D, assocPath string, entityName string) bson.D { for _, elem := range val { if elem.Key == "EntityRef" && entityName != "" { result = append(result, bson.E{Key: "EntityRef", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "DomainModels$IndirectEntityRef"}, {Key: "Steps", Value: bson.A{ int32(2), // version marker bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "DomainModels$EntityRefStep"}, {Key: "Association", Value: assocPath}, {Key: "DestinationEntity", Value: entityName}, @@ -224,7 +226,7 @@ func setAttributeRef(val bson.D, attrPath string) bson.D { if elem.Key == "AttributeRef" { if strings.Count(attrPath, ".") >= 2 { result = append(result, bson.E{Key: "AttributeRef", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "DomainModels$AttributeRef"}, {Key: "Attribute", Value: attrPath}, {Key: "EntityRef", Value: nil}, diff --git a/mdl/executor/cmd_pages_builder_input_cloning.go b/mdl/executor/cmd_pages_builder_input_cloning.go index 5a8fb0d0..af8681e2 100644 --- a/mdl/executor/cmd_pages_builder_input_cloning.go +++ b/mdl/executor/cmd_pages_builder_input_cloning.go @@ -3,7 +3,8 @@ package executor import ( - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/bsonutil" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/sdk/pages" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson/primitive" @@ -17,7 +18,7 @@ func (pb *pageBuilder) cloneDataGrid2ObjectWithDatasourceOnly(templateObject bso for _, elem := range templateObject { if elem.Key == "$ID" { // Generate new ID for the object - result = append(result, bson.E{Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + result = append(result, bson.E{Key: "$ID", Value: bsonutil.NewIDBsonBinary()}) } else if elem.Key == "Properties" { // Update only datasource property if propsArr, ok := elem.Value.(bson.A); ok { @@ -67,10 +68,10 @@ func (pb *pageBuilder) getTypePointerFromProperty(prop bson.D) string { if elem.Key == "TypePointer" { switch v := elem.Value.(type) { case primitive.Binary: - return mpr.BsonBinaryToID(v) + return bsonutil.BsonBinaryToID(v) case []byte: // When loaded from JSON template, binary is []byte instead of primitive.Binary - return mpr.BlobToUUID(v) + return types.BlobToUUID(v) } } } @@ -82,7 +83,7 @@ func (pb *pageBuilder) clonePropertyWithNewIDs(prop bson.D) bson.D { result := make(bson.D, 0, len(prop)) for _, elem := range prop { if elem.Key == "$ID" { - result = append(result, bson.E{Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + result = append(result, bson.E{Key: "$ID", Value: bsonutil.NewIDBsonBinary()}) } else if elem.Key == "Value" { if valMap, ok := elem.Value.(bson.D); ok { result = append(result, bson.E{Key: "Value", Value: pb.cloneValueWithNewIDs(valMap)}) @@ -109,7 +110,7 @@ func (pb *pageBuilder) clonePropertyWithPrimitiveValue(prop bson.D, newValue str result := make(bson.D, 0, len(prop)) for _, elem := range prop { if elem.Key == "$ID" { - result = append(result, bson.E{Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + result = append(result, bson.E{Key: "$ID", Value: bsonutil.NewIDBsonBinary()}) } else if elem.Key == "Value" { if valMap, ok := elem.Value.(bson.D); ok { result = append(result, bson.E{Key: "Value", Value: pb.cloneValueWithUpdatedPrimitive(valMap, newValue)}) @@ -128,7 +129,7 @@ func (pb *pageBuilder) cloneValueWithUpdatedPrimitive(val bson.D, newValue strin result := make(bson.D, 0, len(val)) for _, elem := range val { if elem.Key == "$ID" { - result = append(result, bson.E{Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + result = append(result, bson.E{Key: "$ID", Value: bsonutil.NewIDBsonBinary()}) } else if elem.Key == "PrimitiveValue" { result = append(result, bson.E{Key: "PrimitiveValue", Value: newValue}) } else { @@ -144,7 +145,7 @@ func (pb *pageBuilder) clonePropertyClearingTextTemplate(prop bson.D) bson.D { result := make(bson.D, 0, len(prop)) for _, elem := range prop { if elem.Key == "$ID" { - result = append(result, bson.E{Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + result = append(result, bson.E{Key: "$ID", Value: bsonutil.NewIDBsonBinary()}) } else if elem.Key == "Value" { if valMap, ok := elem.Value.(bson.D); ok { result = append(result, bson.E{Key: "Value", Value: pb.cloneValueClearingTextTemplate(valMap)}) @@ -163,7 +164,7 @@ func (pb *pageBuilder) cloneValueClearingTextTemplate(val bson.D) bson.D { result := make(bson.D, 0, len(val)) for _, elem := range val { if elem.Key == "$ID" { - result = append(result, bson.E{Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + result = append(result, bson.E{Key: "$ID", Value: bsonutil.NewIDBsonBinary()}) } else if elem.Key == "TextTemplate" { result = append(result, bson.E{Key: "TextTemplate", Value: nil}) } else { @@ -189,7 +190,7 @@ func (pb *pageBuilder) clonePropertyWithExpression(prop bson.D, newExpr string) result := make(bson.D, 0, len(prop)) for _, elem := range prop { if elem.Key == "$ID" { - result = append(result, bson.E{Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + result = append(result, bson.E{Key: "$ID", Value: bsonutil.NewIDBsonBinary()}) } else if elem.Key == "Value" { if valMap, ok := elem.Value.(bson.D); ok { result = append(result, bson.E{Key: "Value", Value: pb.cloneValueWithUpdatedExpression(valMap, newExpr)}) @@ -208,7 +209,7 @@ func (pb *pageBuilder) cloneValueWithUpdatedExpression(val bson.D, newExpr strin result := make(bson.D, 0, len(val)) for _, elem := range val { if elem.Key == "$ID" { - result = append(result, bson.E{Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + result = append(result, bson.E{Key: "$ID", Value: bsonutil.NewIDBsonBinary()}) } else if elem.Key == "Expression" { result = append(result, bson.E{Key: "Expression", Value: newExpr}) } else { @@ -229,7 +230,7 @@ func deepCloneWithNewIDs(doc bson.D) bson.D { result := make(bson.D, 0, len(doc)) for _, elem := range doc { if elem.Key == "$ID" { - result = append(result, bson.E{Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + result = append(result, bson.E{Key: "$ID", Value: bsonutil.NewIDBsonBinary()}) } else { result = append(result, bson.E{Key: elem.Key, Value: deepCloneValue(elem.Value)}) } diff --git a/mdl/executor/cmd_pages_builder_input_cloning_test.go b/mdl/executor/cmd_pages_builder_input_cloning_test.go index 830b77b0..0e11bf8b 100644 --- a/mdl/executor/cmd_pages_builder_input_cloning_test.go +++ b/mdl/executor/cmd_pages_builder_input_cloning_test.go @@ -5,15 +5,15 @@ package executor import ( "testing" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/bsonutil" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson/primitive" ) func TestDeepCloneWithNewIDs_RegeneratesAllIDs(t *testing.T) { - origID1 := mpr.IDToBsonBinary(mpr.GenerateID()) - origID2 := mpr.IDToBsonBinary(mpr.GenerateID()) - origID3 := mpr.IDToBsonBinary(mpr.GenerateID()) + origID1 := bsonutil.NewIDBsonBinary() + origID2 := bsonutil.NewIDBsonBinary() + origID3 := bsonutil.NewIDBsonBinary() doc := bson.D{ {Key: "$ID", Value: origID1}, @@ -75,8 +75,8 @@ func TestDeepCloneWithNewIDs_RegeneratesAllIDs(t *testing.T) { } func TestDeepCloneWithNewIDs_HandlesArrays(t *testing.T) { - origID := mpr.IDToBsonBinary(mpr.GenerateID()) - innerID := mpr.IDToBsonBinary(mpr.GenerateID()) + origID := bsonutil.NewIDBsonBinary() + innerID := bsonutil.NewIDBsonBinary() doc := bson.D{ {Key: "$ID", Value: origID}, @@ -107,7 +107,7 @@ func TestDeepCloneWithNewIDs_HandlesArrays(t *testing.T) { } func TestDeepCloneWithNewIDs_PreservesNil(t *testing.T) { - origID := mpr.IDToBsonBinary(mpr.GenerateID()) + origID := bsonutil.NewIDBsonBinary() doc := bson.D{ {Key: "$ID", Value: origID}, diff --git a/mdl/executor/cmd_pages_builder_input_datagrid.go b/mdl/executor/cmd_pages_builder_input_datagrid.go index e7d2a290..b86a98dc 100644 --- a/mdl/executor/cmd_pages_builder_input_datagrid.go +++ b/mdl/executor/cmd_pages_builder_input_datagrid.go @@ -7,6 +7,7 @@ import ( "strings" "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/bsonutil" "github.com/mendixlabs/mxcli/sdk/mpr" "github.com/mendixlabs/mxcli/sdk/pages" "go.mongodb.org/mongo-driver/bson" @@ -91,7 +92,7 @@ func (pb *pageBuilder) buildDataGrid2Property(entry pages.PropertyTypeIDEntry, d var attrRefBSON any if attrRef != "" { attrRefBSON = bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "DomainModels$AttributeRef"}, {Key: "Attribute", Value: attrRef}, {Key: "EntityRef", Value: nil}, @@ -99,14 +100,14 @@ func (pb *pageBuilder) buildDataGrid2Property(entry pages.PropertyTypeIDEntry, d } return bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetProperty"}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.PropertyTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.PropertyTypeID)}, {Key: "Value", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetValue"}, {Key: "Action", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$NoAction"}, {Key: "DisabledDuringExecution", Value: true}, }}, @@ -125,7 +126,7 @@ func (pb *pageBuilder) buildDataGrid2Property(entry pages.PropertyTypeIDEntry, d {Key: "SourceVariable", Value: nil}, {Key: "TextTemplate", Value: nil}, {Key: "TranslatableValue", Value: nil}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.ValueTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.ValueTypeID)}, {Key: "Widgets", Value: bson.A{int32(2)}}, {Key: "XPathConstraint", Value: ""}, }}, @@ -139,7 +140,7 @@ func (pb *pageBuilder) updateDataGrid2Object(templateObject bson.D, propertyType for _, elem := range templateObject { if elem.Key == "$ID" { // Generate new ID for the object - result = append(result, bson.E{Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + result = append(result, bson.E{Key: "$ID", Value: bsonutil.NewIDBsonBinary()}) } else if elem.Key == "Properties" { // Update properties if propsArr, ok := elem.Value.(bson.A); ok { @@ -237,13 +238,13 @@ func (pb *pageBuilder) cloneAndUpdateColumnsProperty(templateProp bson.D, column result := make(bson.D, 0, len(templateProp)) for _, elem := range templateProp { if elem.Key == "$ID" { - result = append(result, bson.E{Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + result = append(result, bson.E{Key: "$ID", Value: bsonutil.NewIDBsonBinary()}) } else if elem.Key == "Value" { if valMap, ok := elem.Value.(bson.D); ok { newVal := make(bson.D, 0, len(valMap)) for _, ve := range valMap { if ve.Key == "$ID" { - newVal = append(newVal, bson.E{Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + newVal = append(newVal, bson.E{Key: "$ID", Value: bsonutil.NewIDBsonBinary()}) } else if ve.Key == "Objects" { newVal = append(newVal, bson.E{Key: "Objects", Value: columnObjects}) } else if ve.Key == "Action" { @@ -292,7 +293,7 @@ func (pb *pageBuilder) cloneAndUpdateColumnObject(templateCol bson.D, col *ast.D result := make(bson.D, 0, len(templateCol)) for _, elem := range templateCol { if elem.Key == "$ID" { - result = append(result, bson.E{Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + result = append(result, bson.E{Key: "$ID", Value: bsonutil.NewIDBsonBinary()}) } else if elem.Key == "Properties" { // Update properties if propsArr, ok := elem.Value.(bson.A); ok { @@ -529,11 +530,11 @@ func (pb *pageBuilder) buildDataGrid2Object(propertyTypeIDs map[string]pages.Pro // Build TypePointer - references the WidgetObjectType var typePointer any if objectTypeID != "" { - typePointer = mpr.IDToBsonBinary(objectTypeID) + typePointer = bsonutil.IDToBsonBinary(objectTypeID) } return bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetObject"}, {Key: "Properties", Value: properties}, {Key: "TypePointer", Value: typePointer}, @@ -556,14 +557,14 @@ func (pb *pageBuilder) buildDataGrid2DefaultProperty(entry pages.PropertyTypeIDE } return bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetProperty"}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.PropertyTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.PropertyTypeID)}, {Key: "Value", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetValue"}, {Key: "Action", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$NoAction"}, {Key: "DisabledDuringExecution", Value: true}, }}, @@ -582,7 +583,7 @@ func (pb *pageBuilder) buildDataGrid2DefaultProperty(entry pages.PropertyTypeIDE {Key: "SourceVariable", Value: nil}, {Key: "TextTemplate", Value: textTemplate}, {Key: "TranslatableValue", Value: nil}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.ValueTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.ValueTypeID)}, {Key: "Widgets", Value: bson.A{int32(2)}}, {Key: "XPathConstraint", Value: ""}, }}, @@ -591,16 +592,16 @@ func (pb *pageBuilder) buildDataGrid2DefaultProperty(entry pages.PropertyTypeIDE func (pb *pageBuilder) buildEmptyClientTemplate() bson.D { return bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$ClientTemplate"}, {Key: "Fallback", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Texts$Text"}, {Key: "Items", Value: bson.A{int32(3)}}, // Empty items with version marker }}, {Key: "Parameters", Value: bson.A{int32(2)}}, // Empty parameters {Key: "Template", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Texts$Text"}, {Key: "Items", Value: bson.A{int32(3)}}, // Empty items with version marker }}, @@ -609,21 +610,21 @@ func (pb *pageBuilder) buildEmptyClientTemplate() bson.D { func (pb *pageBuilder) buildClientTemplateWithText(text string) bson.D { return bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$ClientTemplate"}, {Key: "Fallback", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Texts$Text"}, {Key: "Items", Value: bson.A{int32(3)}}, }}, {Key: "Parameters", Value: bson.A{int32(2)}}, {Key: "Template", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Texts$Text"}, {Key: "Items", Value: bson.A{ int32(3), bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Texts$Translation"}, {Key: "LanguageCode", Value: "en_US"}, {Key: "Text", Value: text}, @@ -641,14 +642,14 @@ func (pb *pageBuilder) buildFiltersPlaceholderProperty(entry pages.PropertyTypeI } return bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetProperty"}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.PropertyTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.PropertyTypeID)}, {Key: "Value", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetValue"}, {Key: "Action", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$NoAction"}, {Key: "DisabledDuringExecution", Value: true}, }}, @@ -667,7 +668,7 @@ func (pb *pageBuilder) buildFiltersPlaceholderProperty(entry pages.PropertyTypeI {Key: "SourceVariable", Value: nil}, {Key: "TextTemplate", Value: nil}, {Key: "TranslatableValue", Value: nil}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.ValueTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.ValueTypeID)}, {Key: "Widgets", Value: widgetsArray}, {Key: "XPathConstraint", Value: ""}, }}, @@ -682,14 +683,14 @@ func (pb *pageBuilder) buildDataGrid2ColumnsProperty(entry pages.PropertyTypeIDE } return bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetProperty"}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.PropertyTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.PropertyTypeID)}, {Key: "Value", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetValue"}, {Key: "Action", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$NoAction"}, {Key: "DisabledDuringExecution", Value: true}, }}, @@ -708,7 +709,7 @@ func (pb *pageBuilder) buildDataGrid2ColumnsProperty(entry pages.PropertyTypeIDE {Key: "SourceVariable", Value: nil}, {Key: "TextTemplate", Value: nil}, {Key: "TranslatableValue", Value: nil}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.ValueTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.ValueTypeID)}, {Key: "Widgets", Value: bson.A{int32(2)}}, {Key: "XPathConstraint", Value: ""}, }}, @@ -878,11 +879,11 @@ func (pb *pageBuilder) buildDataGrid2ColumnObject(col *ast.DataGridColumnDef, co // Column ObjectType pointer var typePointer any if columnObjectTypeID != "" { - typePointer = mpr.IDToBsonBinary(columnObjectTypeID) + typePointer = bsonutil.IDToBsonBinary(columnObjectTypeID) } return bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetObject"}, {Key: "Properties", Value: properties}, {Key: "TypePointer", Value: typePointer}, @@ -897,14 +898,14 @@ func (pb *pageBuilder) buildColumnDefaultProperty(entry pages.PropertyTypeIDEntr } return bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetProperty"}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.PropertyTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.PropertyTypeID)}, {Key: "Value", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetValue"}, {Key: "Action", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$NoAction"}, {Key: "DisabledDuringExecution", Value: true}, }}, @@ -923,7 +924,7 @@ func (pb *pageBuilder) buildColumnDefaultProperty(entry pages.PropertyTypeIDEntr {Key: "SourceVariable", Value: nil}, {Key: "TextTemplate", Value: textTemplate}, {Key: "TranslatableValue", Value: nil}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.ValueTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.ValueTypeID)}, {Key: "Widgets", Value: bson.A{int32(2)}}, {Key: "XPathConstraint", Value: ""}, }}, @@ -932,14 +933,14 @@ func (pb *pageBuilder) buildColumnDefaultProperty(entry pages.PropertyTypeIDEntr func (pb *pageBuilder) buildColumnPrimitiveProperty(entry pages.PropertyTypeIDEntry, value string) bson.D { return bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetProperty"}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.PropertyTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.PropertyTypeID)}, {Key: "Value", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetValue"}, {Key: "Action", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$NoAction"}, {Key: "DisabledDuringExecution", Value: true}, }}, @@ -958,7 +959,7 @@ func (pb *pageBuilder) buildColumnPrimitiveProperty(entry pages.PropertyTypeIDEn {Key: "SourceVariable", Value: nil}, {Key: "TextTemplate", Value: nil}, {Key: "TranslatableValue", Value: nil}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.ValueTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.ValueTypeID)}, {Key: "Widgets", Value: bson.A{int32(2)}}, {Key: "XPathConstraint", Value: ""}, }}, @@ -967,14 +968,14 @@ func (pb *pageBuilder) buildColumnPrimitiveProperty(entry pages.PropertyTypeIDEn func (pb *pageBuilder) buildColumnExpressionProperty(entry pages.PropertyTypeIDEntry, expression string) bson.D { return bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetProperty"}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.PropertyTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.PropertyTypeID)}, {Key: "Value", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetValue"}, {Key: "Action", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$NoAction"}, {Key: "DisabledDuringExecution", Value: true}, }}, @@ -993,7 +994,7 @@ func (pb *pageBuilder) buildColumnExpressionProperty(entry pages.PropertyTypeIDE {Key: "SourceVariable", Value: nil}, {Key: "TextTemplate", Value: nil}, {Key: "TranslatableValue", Value: nil}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.ValueTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.ValueTypeID)}, {Key: "Widgets", Value: bson.A{int32(2)}}, {Key: "XPathConstraint", Value: ""}, }}, @@ -1006,21 +1007,21 @@ func (pb *pageBuilder) buildColumnAttributeProperty(entry pages.PropertyTypeIDEn var attributeRef any if strings.Count(attrPath, ".") >= 2 { attributeRef = bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "DomainModels$AttributeRef"}, {Key: "Attribute", Value: attrPath}, {Key: "EntityRef", Value: nil}, } } return bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetProperty"}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.PropertyTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.PropertyTypeID)}, {Key: "Value", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetValue"}, {Key: "Action", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$NoAction"}, {Key: "DisabledDuringExecution", Value: true}, }}, @@ -1039,7 +1040,7 @@ func (pb *pageBuilder) buildColumnAttributeProperty(entry pages.PropertyTypeIDEn {Key: "SourceVariable", Value: nil}, {Key: "TextTemplate", Value: nil}, {Key: "TranslatableValue", Value: nil}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.ValueTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.ValueTypeID)}, {Key: "Widgets", Value: bson.A{int32(2)}}, {Key: "XPathConstraint", Value: ""}, }}, @@ -1051,14 +1052,14 @@ func (pb *pageBuilder) buildColumnHeaderProperty(entry pages.PropertyTypeIDEntry textTemplate := pb.buildClientTemplateWithText(caption) return bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetProperty"}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.PropertyTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.PropertyTypeID)}, {Key: "Value", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetValue"}, {Key: "Action", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$NoAction"}, {Key: "DisabledDuringExecution", Value: true}, }}, @@ -1077,7 +1078,7 @@ func (pb *pageBuilder) buildColumnHeaderProperty(entry pages.PropertyTypeIDEntry {Key: "SourceVariable", Value: nil}, {Key: "TextTemplate", Value: textTemplate}, {Key: "TranslatableValue", Value: nil}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.ValueTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.ValueTypeID)}, {Key: "Widgets", Value: bson.A{int32(2)}}, {Key: "XPathConstraint", Value: ""}, }}, @@ -1099,14 +1100,14 @@ func (pb *pageBuilder) buildColumnContentProperty(entry pages.PropertyTypeIDEntr } return bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetProperty"}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.PropertyTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.PropertyTypeID)}, {Key: "Value", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$WidgetValue"}, {Key: "Action", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "Forms$NoAction"}, {Key: "DisabledDuringExecution", Value: true}, }}, @@ -1125,7 +1126,7 @@ func (pb *pageBuilder) buildColumnContentProperty(entry pages.PropertyTypeIDEntr {Key: "SourceVariable", Value: nil}, {Key: "TextTemplate", Value: nil}, {Key: "TranslatableValue", Value: nil}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(entry.ValueTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(entry.ValueTypeID)}, {Key: "Widgets", Value: widgetsArray}, {Key: "XPathConstraint", Value: ""}, }}, diff --git a/mdl/executor/cmd_pages_builder_input_filters.go b/mdl/executor/cmd_pages_builder_input_filters.go index 58b6cee7..eadf5943 100644 --- a/mdl/executor/cmd_pages_builder_input_filters.go +++ b/mdl/executor/cmd_pages_builder_input_filters.go @@ -5,8 +5,9 @@ package executor import ( "strings" + "github.com/mendixlabs/mxcli/mdl/bsonutil" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/sdk/domainmodel" - "github.com/mendixlabs/mxcli/sdk/mpr" "github.com/mendixlabs/mxcli/sdk/pages" "github.com/mendixlabs/mxcli/sdk/widgets" "go.mongodb.org/mongo-driver/bson" @@ -106,7 +107,7 @@ func (pb *pageBuilder) findAttributeType(attrPath string) domainmodel.AttributeT func (pb *pageBuilder) buildFilterWidgetBSON(widgetID, filterName string) bson.D { // Load the filter widget template - rawType, rawObject, propertyTypeIDs, objectTypeID, err := widgets.GetTemplateFullBSON(widgetID, mpr.GenerateID, pb.reader.Path()) + rawType, rawObject, propertyTypeIDs, objectTypeID, err := widgets.GetTemplateFullBSON(widgetID, types.GenerateID, pb.reader.Path()) if err != nil || rawType == nil { // Fallback: create minimal filter widget structure return pb.buildMinimalFilterWidgetBSON(widgetID, filterName) @@ -114,7 +115,7 @@ func (pb *pageBuilder) buildFilterWidgetBSON(widgetID, filterName string) bson.D // The widget structure is: CustomWidgets$CustomWidget with Type and Object widgetBSON := bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$CustomWidget"}, {Key: "Editable", Value: "Inherited"}, {Key: "Name", Value: filterName}, @@ -138,9 +139,9 @@ func (pb *pageBuilder) setFilterWidgetLinkedDsAuto(widget bson.D, propertyTypeID } func (pb *pageBuilder) buildMinimalFilterWidgetBSON(widgetID, filterName string) bson.D { - typeID := mpr.GenerateID() - objectTypeID := mpr.GenerateID() - objectID := mpr.GenerateID() + typeID := types.GenerateID() + objectTypeID := types.GenerateID() + objectID := types.GenerateID() // Get widget type name based on ID var widgetTypeName string @@ -158,23 +159,23 @@ func (pb *pageBuilder) buildMinimalFilterWidgetBSON(widgetID, filterName string) } return bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}, + {Key: "$ID", Value: bsonutil.NewIDBsonBinary()}, {Key: "$Type", Value: "CustomWidgets$CustomWidget"}, {Key: "Editable", Value: "Inherited"}, {Key: "Name", Value: filterName}, {Key: "Object", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(objectID)}, + {Key: "$ID", Value: bsonutil.IDToBsonBinary(objectID)}, {Key: "$Type", Value: "CustomWidgets$WidgetObject"}, {Key: "Properties", Value: bson.A{int32(2)}}, - {Key: "TypePointer", Value: mpr.IDToBsonBinary(objectTypeID)}, + {Key: "TypePointer", Value: bsonutil.IDToBsonBinary(objectTypeID)}, }}, {Key: "TabIndex", Value: int32(0)}, {Key: "Type", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(typeID)}, + {Key: "$ID", Value: bsonutil.IDToBsonBinary(typeID)}, {Key: "$Type", Value: "CustomWidgets$CustomWidgetType"}, {Key: "HelpUrl", Value: ""}, {Key: "ObjectType", Value: bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(objectTypeID)}, + {Key: "$ID", Value: bsonutil.IDToBsonBinary(objectTypeID)}, {Key: "$Type", Value: "CustomWidgets$WidgetObjectType"}, {Key: "PropertyTypes", Value: bson.A{int32(2)}}, }}, diff --git a/mdl/executor/cmd_pages_builder_v3_pluggable.go b/mdl/executor/cmd_pages_builder_v3_pluggable.go index 072959f7..b79db2b9 100644 --- a/mdl/executor/cmd_pages_builder_v3_pluggable.go +++ b/mdl/executor/cmd_pages_builder_v3_pluggable.go @@ -9,7 +9,9 @@ import ( "go.mongodb.org/mongo-driver/bson" "github.com/mendixlabs/mxcli/mdl/ast" + "github.com/mendixlabs/mxcli/mdl/bsonutil" mdlerrors "github.com/mendixlabs/mxcli/mdl/errors" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/sdk/mpr" ) @@ -24,7 +26,7 @@ func (pb *pageBuilder) buildGallerySelectionProperty(propMap bson.D, selectionMo for _, elem := range propMap { if elem.Key == "$ID" { // Generate new ID - result = append(result, bson.E{Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + result = append(result, bson.E{Key: "$ID", Value: bsonutil.NewIDBsonBinary()}) } else if elem.Key == "Value" { // Clone Value and update Selection if valueMap, ok := elem.Value.(bson.D); ok { @@ -46,7 +48,7 @@ func (pb *pageBuilder) cloneGallerySelectionValue(valueMap bson.D, selectionMode for _, elem := range valueMap { if elem.Key == "$ID" { - result = append(result, bson.E{Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + result = append(result, bson.E{Key: "$ID", Value: bsonutil.NewIDBsonBinary()}) } else if elem.Key == "Selection" { // Update selection mode result = append(result, bson.E{Key: "Selection", Value: selectionMode}) @@ -71,7 +73,7 @@ func (pb *pageBuilder) cloneActionWithNewID(actionMap bson.D) bson.D { for _, elem := range actionMap { if elem.Key == "$ID" { - result = append(result, bson.E{Key: "$ID", Value: mpr.IDToBsonBinary(mpr.GenerateID())}) + result = append(result, bson.E{Key: "$ID", Value: bsonutil.NewIDBsonBinary()}) } else { result = append(result, elem) } @@ -101,24 +103,24 @@ func (pb *pageBuilder) createAttributeObject(attributePath string, objectTypeID, return nil, mdlerrors.NewValidationf("invalid attribute path %q: expected Module.Entity.Attribute format", attributePath) } return bson.D{ - {Key: "$ID", Value: hexToBytes(mpr.GenerateID())}, + {Key: "$ID", Value: hexToBytes(types.GenerateID())}, {Key: "$Type", Value: "CustomWidgets$WidgetObject"}, {Key: "Properties", Value: []any{ int32(2), bson.D{ - {Key: "$ID", Value: hexToBytes(mpr.GenerateID())}, + {Key: "$ID", Value: hexToBytes(types.GenerateID())}, {Key: "$Type", Value: "CustomWidgets$WidgetProperty"}, {Key: "TypePointer", Value: hexToBytes(propertyTypeID)}, {Key: "Value", Value: bson.D{ - {Key: "$ID", Value: hexToBytes(mpr.GenerateID())}, + {Key: "$ID", Value: hexToBytes(types.GenerateID())}, {Key: "$Type", Value: "CustomWidgets$WidgetValue"}, {Key: "Action", Value: bson.D{ - {Key: "$ID", Value: hexToBytes(mpr.GenerateID())}, + {Key: "$ID", Value: hexToBytes(types.GenerateID())}, {Key: "$Type", Value: "Forms$NoAction"}, {Key: "DisabledDuringExecution", Value: true}, }}, {Key: "AttributeRef", Value: bson.D{ - {Key: "$ID", Value: hexToBytes(mpr.GenerateID())}, + {Key: "$ID", Value: hexToBytes(types.GenerateID())}, {Key: "$Type", Value: "DomainModels$AttributeRef"}, {Key: "Attribute", Value: attributePath}, {Key: "EntityRef", Value: nil}, diff --git a/mdl/executor/widget_engine_test.go b/mdl/executor/widget_engine_test.go index 23069ed6..bb4502de 100644 --- a/mdl/executor/widget_engine_test.go +++ b/mdl/executor/widget_engine_test.go @@ -8,7 +8,7 @@ import ( "github.com/mendixlabs/mxcli/mdl/ast" "github.com/mendixlabs/mxcli/model" - "github.com/mendixlabs/mxcli/sdk/mpr" + "github.com/mendixlabs/mxcli/mdl/types" "github.com/mendixlabs/mxcli/sdk/pages" "go.mongodb.org/mongo-driver/bson" ) @@ -540,7 +540,7 @@ func TestSetChildWidgets(t *testing.T) { func TestOpSelection(t *testing.T) { // Call the real opSelection function with a properly structured widget BSON. typePointerBytes := []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16} - typePointerUUID := mpr.BlobToUUID(typePointerBytes) + typePointerUUID := types.BlobToUUID(typePointerBytes) widgetObj := bson.D{ {Key: "Properties", Value: bson.A{ diff --git a/mdl/executor/widget_operations.go b/mdl/executor/widget_operations.go index 6d183314..183d57c9 100644 --- a/mdl/executor/widget_operations.go +++ b/mdl/executor/widget_operations.go @@ -5,8 +5,8 @@ package executor import ( "log" + "github.com/mendixlabs/mxcli/mdl/bsonutil" "github.com/mendixlabs/mxcli/mdl/types" - "github.com/mendixlabs/mxcli/sdk/mpr" "github.com/mendixlabs/mxcli/sdk/pages" "go.mongodb.org/mongo-driver/bson" ) @@ -218,7 +218,7 @@ func updateTemplateText(tmpl bson.D, text string) bson.D { updated = append(updated, bson.E{Key: "Items", Value: bson.A{ int32(3), bson.D{ - {Key: "$ID", Value: mpr.IDToBsonBinary(types.GenerateID())}, + {Key: "$ID", Value: bsonutil.IDToBsonBinary(types.GenerateID())}, {Key: "$Type", Value: "Texts$Translation"}, {Key: "LanguageCode", Value: "en_US"}, {Key: "Text", Value: text}, From c546aa9e3477eaacb33a79de2849b5fe0f1fabe7 Mon Sep 17 00:00:00 2001 From: Andrew Vasilyev Date: Sun, 19 Apr 2026 20:36:35 +0200 Subject: [PATCH 11/16] fix: set UUID v4 version/variant bits in GenerateDeterministicID --- mdl/types/id.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mdl/types/id.go b/mdl/types/id.go index ea7299ec..bb99dd27 100644 --- a/mdl/types/id.go +++ b/mdl/types/id.go @@ -27,10 +27,13 @@ func GenerateID() string { b[10], b[11], b[12], b[13], b[14], b[15]) } -// GenerateDeterministicID generates a stable UUID from a seed string. +// GenerateDeterministicID generates a stable UUID v4 from a seed string. // Used for System module entities that aren't in the MPR but need consistent IDs. func GenerateDeterministicID(seed string) string { h := sha256.Sum256([]byte(seed)) + // Set UUID version 4 and variant bits on the hash bytes + h[6] = (h[6] & 0x0f) | 0x40 // Version 4 + h[8] = (h[8] & 0x3f) | 0x80 // Variant is 10 return fmt.Sprintf("%08x-%04x-%04x-%04x-%012x", h[0:4], h[4:6], h[6:8], h[8:10], h[10:16]) } From 26b24780044c265e73179ca30960f61a12b906ea Mon Sep 17 00:00:00 2001 From: Andrew Vasilyev Date: Sun, 19 Apr 2026 20:43:47 +0200 Subject: [PATCH 12/16] fix: panic on invalid UUID in IDToBsonBinary, use mpr aliases in convert.go signatures --- mdl/backend/mpr/convert.go | 36 ++++++++++++++++++------------------ mdl/bsonutil/bsonutil.go | 3 ++- 2 files changed, 20 insertions(+), 19 deletions(-) diff --git a/mdl/backend/mpr/convert.go b/mdl/backend/mpr/convert.go index 1cfd39ca..96c7766d 100644 --- a/mdl/backend/mpr/convert.go +++ b/mdl/backend/mpr/convert.go @@ -29,7 +29,7 @@ func convertProjectVersion(v *version.ProjectVersion) *types.ProjectVersion { } } -func convertFolderInfoSlice(in []*types.FolderInfo, err error) ([]*types.FolderInfo, error) { +func convertFolderInfoSlice(in []*mpr.FolderInfo, err error) ([]*types.FolderInfo, error) { if err != nil || in == nil { return nil, err } @@ -40,7 +40,7 @@ func convertFolderInfoSlice(in []*types.FolderInfo, err error) ([]*types.FolderI return out, nil } -func convertUnitInfoSlice(in []*types.UnitInfo, err error) ([]*types.UnitInfo, error) { +func convertUnitInfoSlice(in []*mpr.UnitInfo, err error) ([]*types.UnitInfo, error) { if err != nil || in == nil { return nil, err } @@ -65,7 +65,7 @@ func convertRenameHitSlice(in []mpr.RenameHit, err error) ([]types.RenameHit, er return out, nil } -func convertRawUnitSlice(in []*types.RawUnit, err error) ([]*types.RawUnit, error) { +func convertRawUnitSlice(in []*mpr.RawUnit, err error) ([]*types.RawUnit, error) { if err != nil || in == nil { return nil, err } @@ -126,7 +126,7 @@ func convertRawCustomWidgetTypeSlice(in []*mpr.RawCustomWidgetType, err error) ( return out, nil } -func convertJavaActionSlice(in []*types.JavaAction, err error) ([]*types.JavaAction, error) { +func convertJavaActionSlice(in []*mpr.JavaAction, err error) ([]*types.JavaAction, error) { if err != nil || in == nil { return nil, err } @@ -142,7 +142,7 @@ func convertJavaActionSlice(in []*types.JavaAction, err error) ([]*types.JavaAct return out, nil } -func convertJavaScriptActionSlice(in []*types.JavaScriptAction, err error) ([]*types.JavaScriptAction, error) { +func convertJavaScriptActionSlice(in []*mpr.JavaScriptAction, err error) ([]*types.JavaScriptAction, error) { if err != nil || in == nil { return nil, err } @@ -153,14 +153,14 @@ func convertJavaScriptActionSlice(in []*types.JavaScriptAction, err error) ([]*t return out, nil } -func convertJavaScriptActionPtr(in *types.JavaScriptAction, err error) (*types.JavaScriptAction, error) { +func convertJavaScriptActionPtr(in *mpr.JavaScriptAction, err error) (*types.JavaScriptAction, error) { if err != nil || in == nil { return nil, err } return convertJavaScriptAction(in), nil } -func convertJavaScriptAction(in *types.JavaScriptAction) *types.JavaScriptAction { +func convertJavaScriptAction(in *mpr.JavaScriptAction) *types.JavaScriptAction { return &types.JavaScriptAction{ BaseElement: in.BaseElement, ContainerID: in.ContainerID, @@ -177,7 +177,7 @@ func convertJavaScriptAction(in *types.JavaScriptAction) *types.JavaScriptAction } } -func convertNavDocSlice(in []*types.NavigationDocument, err error) ([]*types.NavigationDocument, error) { +func convertNavDocSlice(in []*mpr.NavigationDocument, err error) ([]*types.NavigationDocument, error) { if err != nil || in == nil { return nil, err } @@ -188,14 +188,14 @@ func convertNavDocSlice(in []*types.NavigationDocument, err error) ([]*types.Nav return out, nil } -func convertNavDocPtr(in *types.NavigationDocument, err error) (*types.NavigationDocument, error) { +func convertNavDocPtr(in *mpr.NavigationDocument, err error) (*types.NavigationDocument, error) { if err != nil || in == nil { return nil, err } return convertNavDoc(in), nil } -func convertNavDoc(in *types.NavigationDocument) *types.NavigationDocument { +func convertNavDoc(in *mpr.NavigationDocument) *types.NavigationDocument { nd := &types.NavigationDocument{ BaseElement: in.BaseElement, ContainerID: in.ContainerID, @@ -210,7 +210,7 @@ func convertNavDoc(in *types.NavigationDocument) *types.NavigationDocument { return nd } -func convertNavProfile(in *types.NavigationProfile) *types.NavigationProfile { +func convertNavProfile(in *mpr.NavigationProfile) *types.NavigationProfile { p := &types.NavigationProfile{ Name: in.Name, Kind: in.Kind, @@ -246,7 +246,7 @@ func convertNavProfile(in *types.NavigationProfile) *types.NavigationProfile { return p } -func convertNavMenuItem(in *types.NavMenuItem) *types.NavMenuItem { +func convertNavMenuItem(in *mpr.NavMenuItem) *types.NavMenuItem { mi := &types.NavMenuItem{ Caption: in.Caption, Page: in.Page, Microflow: in.Microflow, ActionType: in.ActionType, } @@ -319,7 +319,7 @@ func unconvertEntityAccessRevocation(in types.EntityAccessRevocation) mpr.Entity } } -func convertJsonStructureSlice(in []*types.JsonStructure, err error) ([]*types.JsonStructure, error) { +func convertJsonStructureSlice(in []*mpr.JsonStructure, err error) ([]*types.JsonStructure, error) { if err != nil || in == nil { return nil, err } @@ -330,14 +330,14 @@ func convertJsonStructureSlice(in []*types.JsonStructure, err error) ([]*types.J return out, nil } -func convertJsonStructurePtr(in *types.JsonStructure, err error) (*types.JsonStructure, error) { +func convertJsonStructurePtr(in *mpr.JsonStructure, err error) (*types.JsonStructure, error) { if err != nil || in == nil { return nil, err } return convertJsonStructure(in), nil } -func convertJsonStructure(in *types.JsonStructure) *types.JsonStructure { +func convertJsonStructure(in *mpr.JsonStructure) *types.JsonStructure { js := &types.JsonStructure{ BaseElement: in.BaseElement, ContainerID: in.ContainerID, @@ -356,7 +356,7 @@ func convertJsonStructure(in *types.JsonStructure) *types.JsonStructure { return js } -func convertJsonElement(in *types.JsonElement) *types.JsonElement { +func convertJsonElement(in *mpr.JsonElement) *types.JsonElement { e := &types.JsonElement{ ExposedName: in.ExposedName, ExposedItemName: in.ExposedItemName, Path: in.Path, ElementType: in.ElementType, PrimitiveType: in.PrimitiveType, @@ -411,7 +411,7 @@ func unconvertJsonElement(in *types.JsonElement) *types.JsonElement { return e } -func convertImageCollectionSlice(in []*types.ImageCollection, err error) ([]*types.ImageCollection, error) { +func convertImageCollectionSlice(in []*mpr.ImageCollection, err error) ([]*types.ImageCollection, error) { if err != nil || in == nil { return nil, err } @@ -422,7 +422,7 @@ func convertImageCollectionSlice(in []*types.ImageCollection, err error) ([]*typ return out, nil } -func convertImageCollection(in *types.ImageCollection) *types.ImageCollection { +func convertImageCollection(in *mpr.ImageCollection) *types.ImageCollection { ic := &types.ImageCollection{ BaseElement: in.BaseElement, ContainerID: in.ContainerID, diff --git a/mdl/bsonutil/bsonutil.go b/mdl/bsonutil/bsonutil.go index 558497c3..dfa9266d 100644 --- a/mdl/bsonutil/bsonutil.go +++ b/mdl/bsonutil/bsonutil.go @@ -11,10 +11,11 @@ import ( ) // IDToBsonBinary converts a hex UUID string to a BSON binary value. +// Panics if id is not a valid UUID — an invalid ID at this layer is always a programming error. func IDToBsonBinary(id string) primitive.Binary { blob := types.UUIDToBlob(id) if blob == nil || len(blob) != 16 { - blob = types.UUIDToBlob(types.GenerateID()) + panic("bsonutil.IDToBsonBinary: invalid UUID: " + id) } return primitive.Binary{ Subtype: 0x00, From c22227c11dfb996f6249cfa2ab610210e38aec20 Mon Sep 17 00:00:00 2001 From: Andrew Vasilyev Date: Sun, 19 Apr 2026 20:50:07 +0200 Subject: [PATCH 13/16] docs: fix IDToBsonBinary comment to say UUID string, not hex UUID --- mdl/bsonutil/bsonutil.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mdl/bsonutil/bsonutil.go b/mdl/bsonutil/bsonutil.go index dfa9266d..19120acb 100644 --- a/mdl/bsonutil/bsonutil.go +++ b/mdl/bsonutil/bsonutil.go @@ -10,7 +10,7 @@ import ( "go.mongodb.org/mongo-driver/bson/primitive" ) -// IDToBsonBinary converts a hex UUID string to a BSON binary value. +// IDToBsonBinary converts a UUID string to a BSON binary value. // Panics if id is not a valid UUID — an invalid ID at this layer is always a programming error. func IDToBsonBinary(id string) primitive.Binary { blob := types.UUIDToBlob(id) From 957a1e7f98d211a7b684053b36aa7f81601f5732 Mon Sep 17 00:00:00 2001 From: Andrew Vasilyev Date: Sun, 19 Apr 2026 20:53:24 +0200 Subject: [PATCH 14/16] refactor: add typed consts for ContainerKind, InsertPosition, PluggablePropertyOp; add TODO comments in panic stubs --- mdl/backend/mpr/backend.go | 12 +++++----- mdl/backend/mutation.go | 48 +++++++++++++++++++++++++++++++------- 2 files changed, 45 insertions(+), 15 deletions(-) diff --git a/mdl/backend/mpr/backend.go b/mdl/backend/mpr/backend.go index 54bce183..431be412 100644 --- a/mdl/backend/mpr/backend.go +++ b/mdl/backend/mpr/backend.go @@ -729,31 +729,31 @@ func (b *MprBackend) DeleteAgentEditorAgent(id string) error { // PageMutationBackend func (b *MprBackend) OpenPageForMutation(unitID model.ID) (backend.PageMutator, error) { - panic("MprBackend.OpenPageForMutation not yet implemented") + panic("MprBackend.OpenPageForMutation not yet implemented") // TODO: implement in PR #237 } // --------------------------------------------------------------------------- // WorkflowMutationBackend func (b *MprBackend) OpenWorkflowForMutation(unitID model.ID) (backend.WorkflowMutator, error) { - panic("MprBackend.OpenWorkflowForMutation not yet implemented") + panic("MprBackend.OpenWorkflowForMutation not yet implemented") // TODO: implement in PR #237 } // --------------------------------------------------------------------------- // WidgetSerializationBackend func (b *MprBackend) SerializeWidget(w pages.Widget) (any, error) { - panic("MprBackend.SerializeWidget not yet implemented") + panic("MprBackend.SerializeWidget not yet implemented") // TODO: implement in PR #237 } func (b *MprBackend) SerializeClientAction(a pages.ClientAction) (any, error) { - panic("MprBackend.SerializeClientAction not yet implemented") + panic("MprBackend.SerializeClientAction not yet implemented") // TODO: implement in PR #237 } func (b *MprBackend) SerializeDataSource(ds pages.DataSource) (any, error) { - panic("MprBackend.SerializeDataSource not yet implemented") + panic("MprBackend.SerializeDataSource not yet implemented") // TODO: implement in PR #237 } func (b *MprBackend) SerializeWorkflowActivity(a workflows.WorkflowActivity) (any, error) { - panic("MprBackend.SerializeWorkflowActivity not yet implemented") + panic("MprBackend.SerializeWorkflowActivity not yet implemented") // TODO: implement in PR #237 } diff --git a/mdl/backend/mutation.go b/mdl/backend/mutation.go index cb0846df..3dacdd33 100644 --- a/mdl/backend/mutation.go +++ b/mdl/backend/mutation.go @@ -8,12 +8,44 @@ import ( "github.com/mendixlabs/mxcli/sdk/workflows" ) +// ContainerKind represents the type of page container (page, layout, or snippet). +type ContainerKind string + +const ( + ContainerPage ContainerKind = "page" + ContainerLayout ContainerKind = "layout" + ContainerSnippet ContainerKind = "snippet" +) + +// InsertPosition represents where a widget is inserted relative to a target. +type InsertPosition string + +const ( + InsertBefore InsertPosition = "before" + InsertAfter InsertPosition = "after" +) + +// PluggablePropertyOp represents the operation type for SetPluggableProperty. +type PluggablePropertyOp string + +const ( + PluggableOpAttribute PluggablePropertyOp = "attribute" + PluggableOpAssociation PluggablePropertyOp = "association" + PluggableOpPrimitive PluggablePropertyOp = "primitive" + PluggableOpSelection PluggablePropertyOp = "selection" + PluggableOpDataSource PluggablePropertyOp = "datasource" + PluggableOpWidgets PluggablePropertyOp = "widgets" + PluggableOpTextTemplate PluggablePropertyOp = "texttemplate" + PluggableOpAction PluggablePropertyOp = "action" + PluggableOpAttributeObjects PluggablePropertyOp = "attributeObjects" +) + // PageMutator provides fine-grained mutation operations on a single // page, layout, or snippet unit. Obtain one via PageMutationBackend.OpenPageForMutation. // All methods operate on the in-memory representation; call Save to persist. type PageMutator interface { - // ContainerType returns "page", "layout", or "snippet". - ContainerType() string + // ContainerType returns the kind of container (page, layout, or snippet). + ContainerType() ContainerKind // --- Widget property operations --- @@ -31,8 +63,8 @@ type PageMutator interface { // --- Widget tree operations --- // InsertWidget inserts serialized widgets at the given position - // relative to the target widget. Position is "before" or "after". - InsertWidget(targetWidget string, position string, widgets []pages.Widget) error + // relative to the target widget. + InsertWidget(targetWidget string, position InsertPosition, widgets []pages.Widget) error // DropWidget removes widgets by name from the tree. DropWidget(widgetRefs []string) error @@ -56,11 +88,9 @@ type PageMutator interface { // --- Pluggable widget operations --- // SetPluggableProperty sets a typed property on a pluggable widget's object. - // propKey is the Mendix property key, opName is the operation type - // ("attribute", "association", "primitive", "selection", "datasource", - // "widgets", "texttemplate", "action", "attributeObjects"). - // ctx carries the operation-specific values. - SetPluggableProperty(widgetRef string, propKey string, opName string, ctx PluggablePropertyContext) error + // propKey is the Mendix property key, op identifies the operation type, + // and ctx carries the operation-specific values. + SetPluggableProperty(widgetRef string, propKey string, op PluggablePropertyOp, ctx PluggablePropertyContext) error // --- Introspection --- From 741e6897c1397deecb48fdfb7839e3e0aca712cb Mon Sep 17 00:00:00 2001 From: Andrew Vasilyev Date: Sun, 19 Apr 2026 20:55:26 +0200 Subject: [PATCH 15/16] test: add bsonutil package tests for IDToBsonBinary, BsonBinaryToID, NewIDBsonBinary --- mdl/bsonutil/bsonutil_test.go | 75 +++++++++++++++++++++++++++++++++++ 1 file changed, 75 insertions(+) create mode 100644 mdl/bsonutil/bsonutil_test.go diff --git a/mdl/bsonutil/bsonutil_test.go b/mdl/bsonutil/bsonutil_test.go new file mode 100644 index 00000000..144642c2 --- /dev/null +++ b/mdl/bsonutil/bsonutil_test.go @@ -0,0 +1,75 @@ +// SPDX-License-Identifier: Apache-2.0 + +package bsonutil + +import ( + "testing" + + "github.com/mendixlabs/mxcli/mdl/types" +) + +func TestIDToBsonBinary_ValidUUID(t *testing.T) { + id := "550e8400-e29b-41d4-a716-446655440000" + bin := IDToBsonBinary(id) + + if bin.Subtype != 0x00 { + t.Errorf("expected subtype 0x00, got 0x%02x", bin.Subtype) + } + if len(bin.Data) != 16 { + t.Errorf("expected 16 bytes, got %d", len(bin.Data)) + } +} + +func TestIDToBsonBinary_PanicsOnInvalidUUID(t *testing.T) { + defer func() { + if r := recover(); r == nil { + t.Error("expected panic on invalid UUID, got none") + } + }() + IDToBsonBinary("not-a-uuid") +} + +func TestIDToBsonBinary_PanicsOnEmptyString(t *testing.T) { + defer func() { + if r := recover(); r == nil { + t.Error("expected panic on empty string, got none") + } + }() + IDToBsonBinary("") +} + +func TestBsonBinaryToID_Roundtrip(t *testing.T) { + id := "550e8400-e29b-41d4-a716-446655440000" + bin := IDToBsonBinary(id) + got := BsonBinaryToID(bin) + if got != id { + t.Errorf("roundtrip failed: got %q, want %q", got, id) + } +} + +func TestNewIDBsonBinary_ProducesValidUUID(t *testing.T) { + bin := NewIDBsonBinary() + if bin.Subtype != 0x00 { + t.Errorf("expected subtype 0x00, got 0x%02x", bin.Subtype) + } + if len(bin.Data) != 16 { + t.Errorf("expected 16 bytes, got %d", len(bin.Data)) + } + + // Convert back and validate UUID format + id := BsonBinaryToID(bin) + if !types.ValidateID(id) { + t.Errorf("generated ID is not valid UUID format: %q", id) + } +} + +func TestNewIDBsonBinary_Uniqueness(t *testing.T) { + seen := make(map[string]bool) + for i := 0; i < 100; i++ { + id := BsonBinaryToID(NewIDBsonBinary()) + if seen[id] { + t.Fatalf("duplicate ID generated: %q", id) + } + seen[id] = true + } +} From f07d6fbac53905eeaa3b18a5d48eaddab7b6c501 Mon Sep 17 00:00:00 2001 From: Andrew Vasilyev Date: Sun, 19 Apr 2026 21:19:14 +0200 Subject: [PATCH 16/16] test: add GenerateDeterministicID UUID v4 compliance test --- mdl/types/id_test.go | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/mdl/types/id_test.go b/mdl/types/id_test.go index 81e210a2..d30a5e39 100644 --- a/mdl/types/id_test.go +++ b/mdl/types/id_test.go @@ -39,6 +39,23 @@ func TestGenerateID_V4Bits(t *testing.T) { } } +func TestGenerateDeterministicID_V4Bits(t *testing.T) { + seeds := []string{"test", "hello", "System.User", "System.Session", ""} + for _, seed := range seeds { + id := GenerateDeterministicID(seed) + clean := strings.ReplaceAll(id, "-", "") + // Version nibble at hex position 12 should be '4' + if clean[12] != '4' { + t.Errorf("seed %q: expected version nibble '4', got %q in %q", seed, string(clean[12]), id) + } + // Variant nibble at hex position 16 should be 8, 9, a, or b + v := clean[16] + if v != '8' && v != '9' && v != 'a' && v != 'b' { + t.Errorf("seed %q: expected variant nibble in [89ab], got %q in %q", seed, string(v), id) + } + } +} + func TestGenerateDeterministicID_Stable(t *testing.T) { id1 := GenerateDeterministicID("test-seed") id2 := GenerateDeterministicID("test-seed")