Skip to content

Commit d522227

Browse files
committed
fix(ml): make anomaly_detection_job import resilient
Fixes terraform import/refresh failures for elasticstack_elasticsearch_ml_anomaly_detection_job by keeping ImportState sparse (id/job_id only) and allowing analysis_config to be null during import before Read populates it. Also ensures empty nested lists in analysis_config (e.g. categorization_filters/influencers/custom_rules) are always typed to avoid DynamicPseudoType conversion errors. AI assistance: This change was implemented with the help of an AI coding assistant (Cursor + GPT).
1 parent 02d10b4 commit d522227

File tree

2 files changed

+74
-39
lines changed

2 files changed

+74
-39
lines changed

internal/elasticsearch/ml/anomaly_detection_job/models_tf.go

Lines changed: 66 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -17,23 +17,24 @@ import (
1717

1818
// AnomalyDetectionJobTFModel represents the Terraform resource model for ML anomaly detection jobs
1919
type AnomalyDetectionJobTFModel struct {
20-
ID types.String `tfsdk:"id"`
21-
ElasticsearchConnection types.List `tfsdk:"elasticsearch_connection"`
22-
JobID types.String `tfsdk:"job_id"`
23-
Description types.String `tfsdk:"description"`
24-
Groups types.Set `tfsdk:"groups"`
25-
AnalysisConfig AnalysisConfigTFModel `tfsdk:"analysis_config"`
26-
AnalysisLimits types.Object `tfsdk:"analysis_limits"`
27-
DataDescription types.Object `tfsdk:"data_description"`
28-
ModelPlotConfig types.Object `tfsdk:"model_plot_config"`
29-
AllowLazyOpen types.Bool `tfsdk:"allow_lazy_open"`
30-
BackgroundPersistInterval types.String `tfsdk:"background_persist_interval"`
31-
CustomSettings jsontypes.Normalized `tfsdk:"custom_settings"`
32-
DailyModelSnapshotRetentionAfterDays types.Int64 `tfsdk:"daily_model_snapshot_retention_after_days"`
33-
ModelSnapshotRetentionDays types.Int64 `tfsdk:"model_snapshot_retention_days"`
34-
RenormalizationWindowDays types.Int64 `tfsdk:"renormalization_window_days"`
35-
ResultsIndexName types.String `tfsdk:"results_index_name"`
36-
ResultsRetentionDays types.Int64 `tfsdk:"results_retention_days"`
20+
ID types.String `tfsdk:"id"`
21+
ElasticsearchConnection types.List `tfsdk:"elasticsearch_connection"`
22+
JobID types.String `tfsdk:"job_id"`
23+
Description types.String `tfsdk:"description"`
24+
Groups types.Set `tfsdk:"groups"`
25+
// AnalysisConfig is required in configuration, but can be null in state during import.
26+
AnalysisConfig *AnalysisConfigTFModel `tfsdk:"analysis_config"`
27+
AnalysisLimits types.Object `tfsdk:"analysis_limits"`
28+
DataDescription types.Object `tfsdk:"data_description"`
29+
ModelPlotConfig types.Object `tfsdk:"model_plot_config"`
30+
AllowLazyOpen types.Bool `tfsdk:"allow_lazy_open"`
31+
BackgroundPersistInterval types.String `tfsdk:"background_persist_interval"`
32+
CustomSettings jsontypes.Normalized `tfsdk:"custom_settings"`
33+
DailyModelSnapshotRetentionAfterDays types.Int64 `tfsdk:"daily_model_snapshot_retention_after_days"`
34+
ModelSnapshotRetentionDays types.Int64 `tfsdk:"model_snapshot_retention_days"`
35+
RenormalizationWindowDays types.Int64 `tfsdk:"renormalization_window_days"`
36+
ResultsIndexName types.String `tfsdk:"results_index_name"`
37+
ResultsRetentionDays types.Int64 `tfsdk:"results_retention_days"`
3738

3839
// Read-only computed fields
3940
CreateTime types.String `tfsdk:"create_time"`
@@ -124,9 +125,15 @@ func (plan *AnomalyDetectionJobTFModel) toAPIModel(ctx context.Context) (*Anomal
124125
apiModel.Groups = groups
125126
}
126127

128+
if plan.AnalysisConfig == nil {
129+
diags.AddError("Missing analysis_config", "analysis_config is required")
130+
return nil, diags
131+
}
132+
analysisConfig := plan.AnalysisConfig
133+
127134
// Convert detectors
128-
apiDetectors := make([]DetectorAPIModel, len(plan.AnalysisConfig.Detectors))
129-
for i, detector := range plan.AnalysisConfig.Detectors {
135+
apiDetectors := make([]DetectorAPIModel, len(analysisConfig.Detectors))
136+
for i, detector := range analysisConfig.Detectors {
130137
apiDetectors[i] = DetectorAPIModel{
131138
Function: detector.Function.ValueString(),
132139
FieldName: detector.FieldName.ValueString(),
@@ -143,40 +150,40 @@ func (plan *AnomalyDetectionJobTFModel) toAPIModel(ctx context.Context) (*Anomal
143150

144151
// Convert influencers
145152
var influencers []string
146-
if utils.IsKnown(plan.AnalysisConfig.Influencers) {
147-
d := plan.AnalysisConfig.Influencers.ElementsAs(ctx, &influencers, false)
153+
if utils.IsKnown(analysisConfig.Influencers) {
154+
d := analysisConfig.Influencers.ElementsAs(ctx, &influencers, false)
148155
diags.Append(d...)
149156
}
150157

151158
apiModel.AnalysisConfig = AnalysisConfigAPIModel{
152-
BucketSpan: plan.AnalysisConfig.BucketSpan.ValueString(),
153-
CategorizationFieldName: plan.AnalysisConfig.CategorizationFieldName.ValueString(),
159+
BucketSpan: analysisConfig.BucketSpan.ValueString(),
160+
CategorizationFieldName: analysisConfig.CategorizationFieldName.ValueString(),
154161
Detectors: apiDetectors,
155162
Influencers: influencers,
156-
Latency: plan.AnalysisConfig.Latency.ValueString(),
157-
ModelPruneWindow: plan.AnalysisConfig.ModelPruneWindow.ValueString(),
158-
SummaryCountFieldName: plan.AnalysisConfig.SummaryCountFieldName.ValueString(),
163+
Latency: analysisConfig.Latency.ValueString(),
164+
ModelPruneWindow: analysisConfig.ModelPruneWindow.ValueString(),
165+
SummaryCountFieldName: analysisConfig.SummaryCountFieldName.ValueString(),
159166
}
160167

161-
if utils.IsKnown(plan.AnalysisConfig.MultivariateByFields) {
162-
apiModel.AnalysisConfig.MultivariateByFields = utils.Pointer(plan.AnalysisConfig.MultivariateByFields.ValueBool())
168+
if utils.IsKnown(analysisConfig.MultivariateByFields) {
169+
apiModel.AnalysisConfig.MultivariateByFields = utils.Pointer(analysisConfig.MultivariateByFields.ValueBool())
163170
}
164171

165172
// Convert categorization filters
166-
if utils.IsKnown(plan.AnalysisConfig.CategorizationFilters) {
173+
if utils.IsKnown(analysisConfig.CategorizationFilters) {
167174
var categorizationFilters []string
168-
d := plan.AnalysisConfig.CategorizationFilters.ElementsAs(ctx, &categorizationFilters, false)
175+
d := analysisConfig.CategorizationFilters.ElementsAs(ctx, &categorizationFilters, false)
169176
diags.Append(d...)
170177
apiModel.AnalysisConfig.CategorizationFilters = categorizationFilters
171178
}
172179

173180
// Convert per_partition_categorization
174-
if plan.AnalysisConfig.PerPartitionCategorization != nil {
181+
if analysisConfig.PerPartitionCategorization != nil {
175182
apiModel.AnalysisConfig.PerPartitionCategorization = &PerPartitionCategorizationAPIModel{
176-
Enabled: plan.AnalysisConfig.PerPartitionCategorization.Enabled.ValueBool(),
183+
Enabled: analysisConfig.PerPartitionCategorization.Enabled.ValueBool(),
177184
}
178-
if utils.IsKnown(plan.AnalysisConfig.PerPartitionCategorization.StopOnWarn) {
179-
apiModel.AnalysisConfig.PerPartitionCategorization.StopOnWarn = utils.Pointer(plan.AnalysisConfig.PerPartitionCategorization.StopOnWarn.ValueBool())
185+
if utils.IsKnown(analysisConfig.PerPartitionCategorization.StopOnWarn) {
186+
apiModel.AnalysisConfig.PerPartitionCategorization.StopOnWarn = utils.Pointer(analysisConfig.PerPartitionCategorization.StopOnWarn.ValueBool())
180187
}
181188
}
182189

@@ -331,12 +338,15 @@ func (tfModel *AnomalyDetectionJobTFModel) fromAPIModel(ctx context.Context, api
331338

332339
// Helper functions for schema attribute types
333340
// Conversion helper methods
334-
func (tfModel *AnomalyDetectionJobTFModel) convertAnalysisConfigFromAPI(ctx context.Context, apiConfig *AnalysisConfigAPIModel, diags *diag.Diagnostics) AnalysisConfigTFModel {
341+
func (tfModel *AnomalyDetectionJobTFModel) convertAnalysisConfigFromAPI(ctx context.Context, apiConfig *AnalysisConfigAPIModel, diags *diag.Diagnostics) *AnalysisConfigTFModel {
335342
if apiConfig == nil || apiConfig.BucketSpan == "" {
336-
return AnalysisConfigTFModel{}
343+
return nil
337344
}
338345

339-
analysisConfigTF := tfModel.AnalysisConfig
346+
var analysisConfigTF AnalysisConfigTFModel
347+
if tfModel.AnalysisConfig != nil {
348+
analysisConfigTF = *tfModel.AnalysisConfig
349+
}
340350
analysisConfigTF.BucketSpan = types.StringValue(apiConfig.BucketSpan)
341351

342352
// Convert optional string fields
@@ -352,11 +362,23 @@ func (tfModel *AnomalyDetectionJobTFModel) convertAnalysisConfigFromAPI(ctx cont
352362
var categorizationFiltersDiags diag.Diagnostics
353363
analysisConfigTF.CategorizationFilters, categorizationFiltersDiags = typeutils.NonEmptyListOrDefault(ctx, analysisConfigTF.CategorizationFilters, types.StringType, apiConfig.CategorizationFilters)
354364
diags.Append(categorizationFiltersDiags...)
365+
// If the existing value was an untyped zero-value list (common during import), force a typed null list.
366+
if analysisConfigTF.CategorizationFilters.ElementType(ctx) == nil {
367+
analysisConfigTF.CategorizationFilters = types.ListNull(types.StringType)
368+
} else if _, ok := analysisConfigTF.CategorizationFilters.ElementType(ctx).(basetypes.DynamicType); ok {
369+
analysisConfigTF.CategorizationFilters = types.ListNull(types.StringType)
370+
}
355371

356372
// Convert influencers
357373
var influencersDiags diag.Diagnostics
358374
analysisConfigTF.Influencers, influencersDiags = typeutils.NonEmptyListOrDefault(ctx, analysisConfigTF.Influencers, types.StringType, apiConfig.Influencers)
359375
diags.Append(influencersDiags...)
376+
// If the existing value was an untyped zero-value list (common during import), force a typed null list.
377+
if analysisConfigTF.Influencers.ElementType(ctx) == nil {
378+
analysisConfigTF.Influencers = types.ListNull(types.StringType)
379+
} else if _, ok := analysisConfigTF.Influencers.ElementType(ctx).(basetypes.DynamicType); ok {
380+
analysisConfigTF.Influencers = types.ListNull(types.StringType)
381+
}
360382

361383
// Convert detectors
362384
if len(apiConfig.Detectors) > 0 {
@@ -427,6 +449,12 @@ func (tfModel *AnomalyDetectionJobTFModel) convertAnalysisConfigFromAPI(ctx cont
427449
var customRulesDiags diag.Diagnostics
428450
detectorsTF[i].CustomRules, customRulesDiags = typeutils.NonEmptyListOrDefault(ctx, originalDetector.CustomRules, types.ObjectType{AttrTypes: getCustomRuleAttrTypes()}, apiConfig.Detectors[i].CustomRules)
429451
diags.Append(customRulesDiags...)
452+
// If the existing value was an untyped zero-value list (common during import), force a typed null list.
453+
if detectorsTF[i].CustomRules.ElementType(ctx) == nil {
454+
detectorsTF[i].CustomRules = types.ListNull(types.ObjectType{AttrTypes: getCustomRuleAttrTypes()})
455+
} else if _, ok := detectorsTF[i].CustomRules.ElementType(ctx).(basetypes.DynamicType); ok {
456+
detectorsTF[i].CustomRules = types.ListNull(types.ObjectType{AttrTypes: getCustomRuleAttrTypes()})
457+
}
430458
}
431459
analysisConfigTF.Detectors = detectorsTF
432460
}
@@ -440,7 +468,7 @@ func (tfModel *AnomalyDetectionJobTFModel) convertAnalysisConfigFromAPI(ctx cont
440468
analysisConfigTF.PerPartitionCategorization = &perPartitionCategorizationTF
441469
}
442470

443-
return analysisConfigTF
471+
return &analysisConfigTF
444472
}
445473

446474
func (tfModel *AnomalyDetectionJobTFModel) convertDataDescriptionFromAPI(ctx context.Context, apiDataDescription *DataDescriptionAPIModel, diags *diag.Diagnostics) types.Object {

internal/elasticsearch/ml/anomaly_detection_job/resource.go

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ package anomaly_detection_job
22

33
import (
44
"context"
5+
"strings"
56

67
"github.com/elastic/terraform-provider-elasticstack/internal/clients"
78
fwdiags "github.com/hashicorp/terraform-plugin-framework/diag"
@@ -71,5 +72,11 @@ func (r *anomalyDetectionJobResource) resourceReady(diags *fwdiags.Diagnostics)
7172
}
7273

7374
func (r *anomalyDetectionJobResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
74-
resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp)
75+
// Import is intentionally sparse: only IDs are set. Everything else is populated by Read().
76+
raw := req.ID
77+
parts := strings.Split(raw, "/")
78+
jobID := parts[len(parts)-1]
79+
80+
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("id"), jobID)...)
81+
resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("job_id"), jobID)...)
7582
}

0 commit comments

Comments
 (0)