Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion bin/datadog_backup
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,9 @@ def prereqs(defaults) # rubocop:disable Metrics/AbcSize
opts.on('--synthetics-only') do
result[:resources] = [DatadogBackup::Synthetics]
end
opts.on('--workflows-only') do
result[:resources] = [DatadogBackup::Workflows]
end
opts.on(
'--json',
'format backups as JSON instead of YAML. Does not impact `diffs` nor `restore`, but do not mix formats in the same backup-dir.'
Expand Down Expand Up @@ -86,7 +89,7 @@ defaults = {
action: nil,
backup_dir: File.join(ENV.fetch('PWD'), 'backup'),
diff_format: :color,
resources: [DatadogBackup::Dashboards, DatadogBackup::Monitors, DatadogBackup::SLOs, DatadogBackup::Synthetics],
resources: [DatadogBackup::Dashboards, DatadogBackup::Monitors, DatadogBackup::SLOs, DatadogBackup::Synthetics, DatadogBackup::Workflows],
output_format: :yaml,
force_restore: false,
disable_array_sort: false
Expand Down
1 change: 1 addition & 0 deletions lib/datadog_backup.rb
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
require_relative 'datadog_backup/monitors'
require_relative 'datadog_backup/slos'
require_relative 'datadog_backup/synthetics'
require_relative 'datadog_backup/workflows'
require_relative 'datadog_backup/thread_pool'
require_relative 'datadog_backup/version'
require_relative 'datadog_backup/deprecations'
Expand Down
62 changes: 62 additions & 0 deletions lib/datadog_backup/workflows.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
# frozen_string_literal: true

module DatadogBackup
# Workflow specific overrides for backup and restore.
class Workflows < Resources
def all
get_all
end

def backup
LOGGER.info("Starting workflows backup on #{::DatadogBackup::ThreadPool::TPOOL.max_length} threads")

futures = all.map do |workflow|
Concurrent::Promises.future_on(::DatadogBackup::ThreadPool::TPOOL, workflow) do |wf|
id = wf[id_keyname]
get_and_write_file(id)
end
end

watcher = ::DatadogBackup::ThreadPool.watcher
watcher.join if watcher.status

Concurrent::Promises.zip(*futures).value!
end

def get_by_id(id)
except(get(id))
rescue Faraday::ResourceNotFound
LOGGER.warn("Workflow #{id} not found (404)")
{}
rescue Faraday::BadRequestError => e
LOGGER.warn("Workflow #{id} returned bad request (400) - skipping - #{e.message}")
{}
end

def initialize(options)
super
@banlist = %w[created_at modified_at last_executed_at].freeze
end

# v2 API wraps all responses in 'data' key
def body_with_2xx(response)
raise "#{caller_locations(1, 1)[0].label} failed with error #{response.status}" unless response.status.to_s =~ /^2/

response.body.fetch('data')
end

private

def api_version
'v2'
end

def api_resource_name
'workflows'
end

def id_keyname
'id'
end
end
end
204 changes: 204 additions & 0 deletions spec/datadog_backup/workflows_spec.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,204 @@
# frozen_string_literal: true

require 'spec_helper'

describe DatadogBackup::Workflows do
let(:stubs) { Faraday::Adapter::Test::Stubs.new }
let(:api_client_double) { Faraday.new { |f| f.adapter :test, stubs } }
let(:tempdir) { Dir.mktmpdir }
let(:workflows) do
workflows = described_class.new(
action: 'backup',
backup_dir: tempdir,
output_format: :json,
resources: []
)
allow(workflows).to receive(:api_service).and_return(api_client_double)
return workflows
end
let(:workflow_abc_123) do
{
'id' => 'abc-123-def',
'attributes' => {
'name' => 'Test Workflow',
'description' => 'A test workflow for CI/CD',
'steps' => [
{
'name' => 'step_1',
'action' => 'com.datadoghq.http',
'params' => {
'url' => 'https://example.com/api',
'method' => 'POST'
}
}
],
'triggers' => [
{
'type' => 'schedule',
'schedule' => '0 9 * * 1-5'
}
]
},
'created_at' => '2024-01-01T00:00:00Z',
'modified_at' => '2024-01-02T00:00:00Z',
'last_executed_at' => '2024-01-03T00:00:00Z'
}
end
let(:workflow_xyz_456) do
{
'id' => 'xyz-456-ghi',
'attributes' => {
'name' => 'Another Workflow',
'description' => 'Another test workflow',
'steps' => [],
'triggers' => []
},
'created_at' => '2024-02-01T00:00:00Z',
'modified_at' => '2024-02-02T00:00:00Z'
}
end
let(:workflow_abc_123_clean) do
{
'id' => 'abc-123-def',
'attributes' => {
'name' => 'Test Workflow',
'description' => 'A test workflow for CI/CD',
'steps' => [
{
'name' => 'step_1',
'action' => 'com.datadoghq.http',
'params' => {
'url' => 'https://example.com/api',
'method' => 'POST'
}
}
],
'triggers' => [
{
'type' => 'schedule',
'schedule' => '0 9 * * 1-5'
}
]
}
}
end
let(:workflow_xyz_456_clean) do
{
'id' => 'xyz-456-ghi',
'attributes' => {
'name' => 'Another Workflow',
'description' => 'Another test workflow',
'steps' => [],
'triggers' => []
}
}
end
let(:fetched_workflows) do
{
'data' => [workflow_abc_123, workflow_xyz_456]
}
end
let(:workflow_abc_123_response) do
{ 'data' => workflow_abc_123 }
end
let(:workflow_xyz_456_response) do
{ 'data' => workflow_xyz_456 }
end
let(:all_workflows) { respond_with200(fetched_workflows) }
let(:example_workflow1) { respond_with200(workflow_abc_123_response) }
let(:example_workflow2) { respond_with200(workflow_xyz_456_response) }

before do
stubs.get('/api/v2/workflows') { all_workflows }
stubs.get('/api/v2/workflows/abc-123-def') { example_workflow1 }
stubs.get('/api/v2/workflows/xyz-456-ghi') { example_workflow2 }
end

after do
FileUtils.remove_entry tempdir
end

describe '#backup' do
subject { workflows.backup }

it 'is expected to create two files' do
file1 = instance_double(File)
allow(File).to receive(:open).with(workflows.filename('abc-123-def'), 'w').and_return(file1)
allow(file1).to receive(:write)
allow(file1).to receive(:close)

file2 = instance_double(File)
allow(File).to receive(:open).with(workflows.filename('xyz-456-ghi'), 'w').and_return(file2)
allow(file2).to receive(:write)
allow(file2).to receive(:close)

workflows.backup
expect(file1).to have_received(:write).with(::JSON.pretty_generate(workflow_abc_123_clean.deep_sort))
expect(file2).to have_received(:write).with(::JSON.pretty_generate(workflow_xyz_456_clean.deep_sort))
end
end

describe '#filename' do
subject { workflows.filename('abc-123-def') }

it { is_expected.to eq("#{tempdir}/workflows/abc-123-def.json") }
end

describe '#get_by_id' do
subject { workflows.get_by_id('abc-123-def') }

it { is_expected.to eq workflow_abc_123_clean }
end

describe '#all' do
subject { workflows.all }

it 'returns array of workflows' do
expect(subject).to eq([workflow_abc_123, workflow_xyz_456])
end
end

describe '#diff' do
it 'calls the api only once' do
workflows.write_file('{"a":"b"}', workflows.filename('abc-123-def'))
expect(workflows.diff('abc-123-def')).to eq(<<~EODASH
---
-attributes:
- description: A test workflow for CI/CD
- name: Test Workflow
- steps:
- - action: com.datadoghq.http
- name: step_1
- params:
- method: POST
- url: https://example.com/api
- triggers:
- - schedule: 0 9 * * 1-5
- type: schedule
-id: abc-123-def
+a: b
EODASH
.chomp)
end
end

describe '#except' do
subject { workflows.except({ :a => :b, 'created_at' => :c, 'modified_at' => :d, 'last_executed_at' => :e }) }

it { is_expected.to eq({ a: :b }) }
end

describe 'private methods' do
it 'uses v2 API' do
expect(workflows.send(:api_version)).to eq('v2')
end

it 'uses workflows resource name' do
expect(workflows.send(:api_resource_name)).to eq('workflows')
end

it 'uses id as id_keyname' do
expect(workflows.send(:id_keyname)).to eq('id')
end
end
end