Skip to content

Commit 88c271c

Browse files
neiljbrookestobioCopilot
authored
fix(ml): anomaly_detection_job import/refresh mismatch (#1550)
* fix(ml): make anomaly_detection_job import resilient Fixes terraform import/refresh failures for elasticstack_elasticsearch_ml_anomaly_detection_job by keeping ImportState sparse (id/job_id only) and allowing analysis_config to be null during import before Read populates it. Also ensures empty nested lists in analysis_config (e.g. categorization_filters/influencers/custom_rules) are always typed to avoid DynamicPseudoType conversion errors. AI assistance: This change was implemented with the help of an AI coding assistant (Cursor + GPT). * Update internal/elasticsearch/ml/anomaly_detection_job/resource.go Co-authored-by: Toby Brain <[email protected]> * Remove unused import in anomaly_detection_job resource file * Add ImportState testing for anomaly_detection_job acceptance tests This update introduces ImportState testing for the elasticstack_elasticsearch_ml_anomaly_detection_job resource, ensuring that the job can be imported correctly with the necessary configuration variables. The test verifies the import functionality and enhances the robustness of the acceptance tests. * Extract duplicated list type handling logic into reusable helper (#1559) * Initial plan * Refactor: extract duplicated list type handling into reusable helper function - Created EnsureTypedList helper in typeutils package - Replaced three instances of duplicated logic for handling untyped zero-value lists - Removed unused strings import from resource.go - All three instances (CategorizationFilters, Influencers, CustomRules) now use the centralized helper Co-authored-by: neiljbrookes <[email protected]> --------- Co-authored-by: copilot-swe-agent[bot] <[email protected]> Co-authored-by: neiljbrookes <[email protected]> * Update CHANGELOG.md to include a fix for `elasticstack_elasticsearch_ml_anomaly_detection_job` import, enhancing resilience to sparse state values. --------- Co-authored-by: Toby Brain <[email protected]> Co-authored-by: Copilot <[email protected]> Co-authored-by: neiljbrookes <[email protected]>
1 parent 946b987 commit 88c271c

File tree

5 files changed

+95
-39
lines changed

5 files changed

+95
-39
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,7 @@ inputs = {
7373
- Fix the enabled property being ignored in `elasticstack_kibana_alerting_rule` ([#1527](https://github.com/elastic/terraform-provider-elasticstack/pull/1527))
7474
- Add `advanced_monitoring_options` to `elasticstack_fleet_agent_policy` to configure HTTP monitoring endpoint and diagnostics settings ([#1537](https://github.com/elastic/terraform-provider-elasticstack/pull/1537))
7575
- Move the `input` block to an `inputs` map in `elasticstack_fleet_integration_policy` ([#1482](https://github.com/elastic/terraform-provider-elasticstack/pull/1482))
76+
- Fix `elasticstack_elasticsearch_ml_anomaly_detection_job` import to be resilient to sparse state values
7677

7778
## [0.13.1] - 2025-12-12
7879

internal/elasticsearch/ml/anomaly_detection_job/acc_test.go

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,17 @@ func TestAccResourceAnomalyDetectionJobBasic(t *testing.T) {
5656
resource.TestCheckResourceAttr("elasticstack_elasticsearch_ml_anomaly_detection_job.test", "job_type", "anomaly_detector"),
5757
),
5858
},
59+
// ImportState testing
60+
{
61+
ProtoV6ProviderFactories: acctest.Providers,
62+
ResourceName: "elasticstack_elasticsearch_ml_anomaly_detection_job.test",
63+
ImportState: true,
64+
ImportStateVerify: true,
65+
ConfigDirectory: acctest.NamedTestCaseDirectory("update"),
66+
ConfigVariables: config.Variables{
67+
"job_id": config.StringVariable(jobID),
68+
},
69+
},
5970
},
6071
})
6172
}

internal/elasticsearch/ml/anomaly_detection_job/models_tf.go

Lines changed: 54 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -17,23 +17,24 @@ import (
1717

1818
// AnomalyDetectionJobTFModel represents the Terraform resource model for ML anomaly detection jobs
1919
type AnomalyDetectionJobTFModel struct {
20-
ID types.String `tfsdk:"id"`
21-
ElasticsearchConnection types.List `tfsdk:"elasticsearch_connection"`
22-
JobID types.String `tfsdk:"job_id"`
23-
Description types.String `tfsdk:"description"`
24-
Groups types.Set `tfsdk:"groups"`
25-
AnalysisConfig AnalysisConfigTFModel `tfsdk:"analysis_config"`
26-
AnalysisLimits types.Object `tfsdk:"analysis_limits"`
27-
DataDescription types.Object `tfsdk:"data_description"`
28-
ModelPlotConfig types.Object `tfsdk:"model_plot_config"`
29-
AllowLazyOpen types.Bool `tfsdk:"allow_lazy_open"`
30-
BackgroundPersistInterval types.String `tfsdk:"background_persist_interval"`
31-
CustomSettings jsontypes.Normalized `tfsdk:"custom_settings"`
32-
DailyModelSnapshotRetentionAfterDays types.Int64 `tfsdk:"daily_model_snapshot_retention_after_days"`
33-
ModelSnapshotRetentionDays types.Int64 `tfsdk:"model_snapshot_retention_days"`
34-
RenormalizationWindowDays types.Int64 `tfsdk:"renormalization_window_days"`
35-
ResultsIndexName types.String `tfsdk:"results_index_name"`
36-
ResultsRetentionDays types.Int64 `tfsdk:"results_retention_days"`
20+
ID types.String `tfsdk:"id"`
21+
ElasticsearchConnection types.List `tfsdk:"elasticsearch_connection"`
22+
JobID types.String `tfsdk:"job_id"`
23+
Description types.String `tfsdk:"description"`
24+
Groups types.Set `tfsdk:"groups"`
25+
// AnalysisConfig is required in configuration, but can be null in state during import.
26+
AnalysisConfig *AnalysisConfigTFModel `tfsdk:"analysis_config"`
27+
AnalysisLimits types.Object `tfsdk:"analysis_limits"`
28+
DataDescription types.Object `tfsdk:"data_description"`
29+
ModelPlotConfig types.Object `tfsdk:"model_plot_config"`
30+
AllowLazyOpen types.Bool `tfsdk:"allow_lazy_open"`
31+
BackgroundPersistInterval types.String `tfsdk:"background_persist_interval"`
32+
CustomSettings jsontypes.Normalized `tfsdk:"custom_settings"`
33+
DailyModelSnapshotRetentionAfterDays types.Int64 `tfsdk:"daily_model_snapshot_retention_after_days"`
34+
ModelSnapshotRetentionDays types.Int64 `tfsdk:"model_snapshot_retention_days"`
35+
RenormalizationWindowDays types.Int64 `tfsdk:"renormalization_window_days"`
36+
ResultsIndexName types.String `tfsdk:"results_index_name"`
37+
ResultsRetentionDays types.Int64 `tfsdk:"results_retention_days"`
3738

3839
// Read-only computed fields
3940
CreateTime types.String `tfsdk:"create_time"`
@@ -124,9 +125,15 @@ func (plan *AnomalyDetectionJobTFModel) toAPIModel(ctx context.Context) (*Anomal
124125
apiModel.Groups = groups
125126
}
126127

128+
if plan.AnalysisConfig == nil {
129+
diags.AddError("Missing analysis_config", "analysis_config is required")
130+
return nil, diags
131+
}
132+
analysisConfig := plan.AnalysisConfig
133+
127134
// Convert detectors
128-
apiDetectors := make([]DetectorAPIModel, len(plan.AnalysisConfig.Detectors))
129-
for i, detector := range plan.AnalysisConfig.Detectors {
135+
apiDetectors := make([]DetectorAPIModel, len(analysisConfig.Detectors))
136+
for i, detector := range analysisConfig.Detectors {
130137
apiDetectors[i] = DetectorAPIModel{
131138
Function: detector.Function.ValueString(),
132139
FieldName: detector.FieldName.ValueString(),
@@ -143,40 +150,40 @@ func (plan *AnomalyDetectionJobTFModel) toAPIModel(ctx context.Context) (*Anomal
143150

144151
// Convert influencers
145152
var influencers []string
146-
if utils.IsKnown(plan.AnalysisConfig.Influencers) {
147-
d := plan.AnalysisConfig.Influencers.ElementsAs(ctx, &influencers, false)
153+
if utils.IsKnown(analysisConfig.Influencers) {
154+
d := analysisConfig.Influencers.ElementsAs(ctx, &influencers, false)
148155
diags.Append(d...)
149156
}
150157

151158
apiModel.AnalysisConfig = AnalysisConfigAPIModel{
152-
BucketSpan: plan.AnalysisConfig.BucketSpan.ValueString(),
153-
CategorizationFieldName: plan.AnalysisConfig.CategorizationFieldName.ValueString(),
159+
BucketSpan: analysisConfig.BucketSpan.ValueString(),
160+
CategorizationFieldName: analysisConfig.CategorizationFieldName.ValueString(),
154161
Detectors: apiDetectors,
155162
Influencers: influencers,
156-
Latency: plan.AnalysisConfig.Latency.ValueString(),
157-
ModelPruneWindow: plan.AnalysisConfig.ModelPruneWindow.ValueString(),
158-
SummaryCountFieldName: plan.AnalysisConfig.SummaryCountFieldName.ValueString(),
163+
Latency: analysisConfig.Latency.ValueString(),
164+
ModelPruneWindow: analysisConfig.ModelPruneWindow.ValueString(),
165+
SummaryCountFieldName: analysisConfig.SummaryCountFieldName.ValueString(),
159166
}
160167

161-
if utils.IsKnown(plan.AnalysisConfig.MultivariateByFields) {
162-
apiModel.AnalysisConfig.MultivariateByFields = utils.Pointer(plan.AnalysisConfig.MultivariateByFields.ValueBool())
168+
if utils.IsKnown(analysisConfig.MultivariateByFields) {
169+
apiModel.AnalysisConfig.MultivariateByFields = utils.Pointer(analysisConfig.MultivariateByFields.ValueBool())
163170
}
164171

165172
// Convert categorization filters
166-
if utils.IsKnown(plan.AnalysisConfig.CategorizationFilters) {
173+
if utils.IsKnown(analysisConfig.CategorizationFilters) {
167174
var categorizationFilters []string
168-
d := plan.AnalysisConfig.CategorizationFilters.ElementsAs(ctx, &categorizationFilters, false)
175+
d := analysisConfig.CategorizationFilters.ElementsAs(ctx, &categorizationFilters, false)
169176
diags.Append(d...)
170177
apiModel.AnalysisConfig.CategorizationFilters = categorizationFilters
171178
}
172179

173180
// Convert per_partition_categorization
174-
if plan.AnalysisConfig.PerPartitionCategorization != nil {
181+
if analysisConfig.PerPartitionCategorization != nil {
175182
apiModel.AnalysisConfig.PerPartitionCategorization = &PerPartitionCategorizationAPIModel{
176-
Enabled: plan.AnalysisConfig.PerPartitionCategorization.Enabled.ValueBool(),
183+
Enabled: analysisConfig.PerPartitionCategorization.Enabled.ValueBool(),
177184
}
178-
if utils.IsKnown(plan.AnalysisConfig.PerPartitionCategorization.StopOnWarn) {
179-
apiModel.AnalysisConfig.PerPartitionCategorization.StopOnWarn = utils.Pointer(plan.AnalysisConfig.PerPartitionCategorization.StopOnWarn.ValueBool())
185+
if utils.IsKnown(analysisConfig.PerPartitionCategorization.StopOnWarn) {
186+
apiModel.AnalysisConfig.PerPartitionCategorization.StopOnWarn = utils.Pointer(analysisConfig.PerPartitionCategorization.StopOnWarn.ValueBool())
180187
}
181188
}
182189

@@ -331,12 +338,15 @@ func (tfModel *AnomalyDetectionJobTFModel) fromAPIModel(ctx context.Context, api
331338

332339
// Helper functions for schema attribute types
333340
// Conversion helper methods
334-
func (tfModel *AnomalyDetectionJobTFModel) convertAnalysisConfigFromAPI(ctx context.Context, apiConfig *AnalysisConfigAPIModel, diags *diag.Diagnostics) AnalysisConfigTFModel {
341+
func (tfModel *AnomalyDetectionJobTFModel) convertAnalysisConfigFromAPI(ctx context.Context, apiConfig *AnalysisConfigAPIModel, diags *diag.Diagnostics) *AnalysisConfigTFModel {
335342
if apiConfig == nil || apiConfig.BucketSpan == "" {
336-
return AnalysisConfigTFModel{}
343+
return nil
337344
}
338345

339-
analysisConfigTF := tfModel.AnalysisConfig
346+
var analysisConfigTF AnalysisConfigTFModel
347+
if tfModel.AnalysisConfig != nil {
348+
analysisConfigTF = *tfModel.AnalysisConfig
349+
}
340350
analysisConfigTF.BucketSpan = types.StringValue(apiConfig.BucketSpan)
341351

342352
// Convert optional string fields
@@ -352,11 +362,15 @@ func (tfModel *AnomalyDetectionJobTFModel) convertAnalysisConfigFromAPI(ctx cont
352362
var categorizationFiltersDiags diag.Diagnostics
353363
analysisConfigTF.CategorizationFilters, categorizationFiltersDiags = typeutils.NonEmptyListOrDefault(ctx, analysisConfigTF.CategorizationFilters, types.StringType, apiConfig.CategorizationFilters)
354364
diags.Append(categorizationFiltersDiags...)
365+
// Ensure the list is properly typed (handles untyped zero-value lists from import)
366+
analysisConfigTF.CategorizationFilters = typeutils.EnsureTypedList(ctx, analysisConfigTF.CategorizationFilters, types.StringType)
355367

356368
// Convert influencers
357369
var influencersDiags diag.Diagnostics
358370
analysisConfigTF.Influencers, influencersDiags = typeutils.NonEmptyListOrDefault(ctx, analysisConfigTF.Influencers, types.StringType, apiConfig.Influencers)
359371
diags.Append(influencersDiags...)
372+
// Ensure the list is properly typed (handles untyped zero-value lists from import)
373+
analysisConfigTF.Influencers = typeutils.EnsureTypedList(ctx, analysisConfigTF.Influencers, types.StringType)
360374

361375
// Convert detectors
362376
if len(apiConfig.Detectors) > 0 {
@@ -427,6 +441,8 @@ func (tfModel *AnomalyDetectionJobTFModel) convertAnalysisConfigFromAPI(ctx cont
427441
var customRulesDiags diag.Diagnostics
428442
detectorsTF[i].CustomRules, customRulesDiags = typeutils.NonEmptyListOrDefault(ctx, originalDetector.CustomRules, types.ObjectType{AttrTypes: getCustomRuleAttrTypes()}, apiConfig.Detectors[i].CustomRules)
429443
diags.Append(customRulesDiags...)
444+
// Ensure the list is properly typed (handles untyped zero-value lists from import)
445+
detectorsTF[i].CustomRules = typeutils.EnsureTypedList(ctx, detectorsTF[i].CustomRules, types.ObjectType{AttrTypes: getCustomRuleAttrTypes()})
430446
}
431447
analysisConfigTF.Detectors = detectorsTF
432448
}
@@ -440,7 +456,7 @@ func (tfModel *AnomalyDetectionJobTFModel) convertAnalysisConfigFromAPI(ctx cont
440456
analysisConfigTF.PerPartitionCategorization = &perPartitionCategorizationTF
441457
}
442458

443-
return analysisConfigTF
459+
return &analysisConfigTF
444460
}
445461

446462
func (tfModel *AnomalyDetectionJobTFModel) convertDataDescriptionFromAPI(ctx context.Context, apiDataDescription *DataDescriptionAPIModel, diags *diag.Diagnostics) types.Object {

internal/elasticsearch/ml/anomaly_detection_job/resource.go

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,5 +71,13 @@ func (r *anomalyDetectionJobResource) resourceReady(diags *fwdiags.Diagnostics)
7171
}
7272

7373
func (r *anomalyDetectionJobResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
74-
resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp)
74+
// Import is intentionally sparse: only IDs are set. Everything else is populated by Read().
75+
compID, diags := clients.CompositeIdFromStrFw(req.ID)
76+
resp.Diagnostics.Append(diags...)
77+
if resp.Diagnostics.HasError() {
78+
return
79+
}
80+
81+
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("id"), req.ID)...)
82+
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("job_id"), compID.ResourceId)...)
7583
}

internal/utils/typeutils/list.go

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ import (
66
"github.com/hashicorp/terraform-plugin-framework/attr"
77
"github.com/hashicorp/terraform-plugin-framework/diag"
88
"github.com/hashicorp/terraform-plugin-framework/types"
9+
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
910
)
1011

1112
func NonEmptyListOrDefault[T any](ctx context.Context, original types.List, elemType attr.Type, slice []T) (types.List, diag.Diagnostics) {
@@ -15,3 +16,22 @@ func NonEmptyListOrDefault[T any](ctx context.Context, original types.List, elem
1516

1617
return types.ListValueFrom(ctx, elemType, slice)
1718
}
19+
20+
// EnsureTypedList converts untyped zero-value lists to properly typed null lists.
21+
// This is commonly needed during import operations where the framework may create
22+
// untyped lists with DynamicPseudoType elements, which causes type conversion errors.
23+
// If the list already has a proper type, it is returned unchanged.
24+
func EnsureTypedList(ctx context.Context, list types.List, elemType attr.Type) types.List {
25+
// Check if the list has no element type (nil)
26+
if list.ElementType(ctx) == nil {
27+
return types.ListNull(elemType)
28+
}
29+
30+
// Check if the list has a dynamic pseudo type
31+
if _, ok := list.ElementType(ctx).(basetypes.DynamicType); ok {
32+
return types.ListNull(elemType)
33+
}
34+
35+
// List is already properly typed, return as-is
36+
return list
37+
}

0 commit comments

Comments
 (0)