From 1c3010edc4b773da845df5fc9b723a3fb8600d71 Mon Sep 17 00:00:00 2001 From: Sang Truong Date: Mon, 24 Oct 2022 16:14:06 +0700 Subject: [PATCH 1/4] FEATURE:MSF-24365 - Change update status for test --- bin/run_brick.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/run_brick.rb b/bin/run_brick.rb index 5c0aec05f..e027bed57 100755 --- a/bin/run_brick.rb +++ b/bin/run_brick.rb @@ -20,7 +20,7 @@ def get_brick_params(prefix) def handle_error(params, log, brick_type, error, error_message) execution_log = GoodData.logger execution_log.error "Execution failed. Error: #{error}" unless execution_log.nil? - GoodData::Bricks::ExecutionResultMiddleware.update_execution_result(GoodData::Bricks::ExecutionStatus::ERROR, error_message) + GoodData::Bricks::ExecutionResultMiddleware.update_execution_result(GoodData::Bricks::ExecutionStatus::WARNING, error_message) log.error "action=#{brick_type}_execution status=failed commit_hash=#{params['GOODDATA_RUBY_COMMIT']} execution_id=#{params['GDC_EXECUTION_ID']} exception=#{error}" raise end From 440bf582f4c8929c4a428dd4b68f04c680240220 Mon Sep 17 00:00:00 2001 From: Sang Truong Date: Mon, 31 Oct 2022 07:40:03 +0700 Subject: [PATCH 2/4] FEATURE:MSF-24365 - Remove test for building testing image --- spec/lcm/load/users_brick_load_spec.rb | 202 ------------------------- 1 file changed, 202 deletions(-) delete mode 100644 spec/lcm/load/users_brick_load_spec.rb diff --git a/spec/lcm/load/users_brick_load_spec.rb b/spec/lcm/load/users_brick_load_spec.rb deleted file mode 100644 index c88ac5012..000000000 --- a/spec/lcm/load/users_brick_load_spec.rb +++ /dev/null @@ -1,202 +0,0 @@ -require 'active_support/core_ext/numeric/time' - -require_relative '../integration/support/project_helper' -require_relative '../integration/support/connection_helper' -require_relative '../integration/support/configuration_helper' -require_relative '../integration/support/s3_helper' -require_relative '../userprov/shared_contexts_for_user_bricks' -require_relative '../integration/spec/brick_runner' -require_relative '../helpers/schedule_helper' -require_relative 'shared_contexts_for_load_tests' - -def user_in_domain(user_name) - domain = @rest_client.domain(LcmConnectionHelper.environment[:prod_organization]) - domain.find_user_by_login(user_name) -end - -# set up by execmgr-k8s -image_tag = ENV['LCM_BRICKS_IMAGE_TAG'] - -GoodData::Environment.const_set('VCR_ON', false) - -user_array = [] -user_count = ENV['GD_LCM_SPEC_USER_COUNT'] ? ENV['GD_LCM_SPEC_USER_COUNT'].to_i : 20 -project_array = [] -project_count = ENV['GD_LCM_SPEC_PROJECT_COUNT'] ? ENV['GD_LCM_SPEC_PROJECT_COUNT'].to_i : 20 -service_project = nil -users_schedule = nil -user_filters_schedule = nil - -describe 'UsersBrick' do - include_context 'load tests cleanup' unless ENV['GD_LCM_SMOKE_TEST'] == 'true' - - before(:all) do - @suffix = ConfigurationHelper.suffix - @rest_client = LcmConnectionHelper.production_server_connection - @domain = @rest_client.domain(LcmConnectionHelper.environment[:prod_organization]) - @opts = { - client: @rest_client, - title: "users brick load test #{@suffix}", - auth_token: LcmConnectionHelper.environment[:prod_token], - environment: 'TESTING' - } - project_helper = Support::ProjectHelper.create(@opts) - project_helper.create_ldm - project_helper.load_data - @project = project_helper.project - - @user_name = "#{@suffix}@bar.baz" - project_helper.ensure_user(@user_name, @domain) - @user_data = { - login: @user_name, - first_name: 'first', - last_name: 'last', - company: 'GoodData', - position: 'developer', - country: 'CZech', - phone: '123', - language: 'fr-FR', - user_groups: 'test_group', - client_id: 'testingclient' - } - users_csv = ConfigurationHelper.csv_from_hashes([@user_data]) - s3_key = 'user_data' - s3_info = Support::S3Helper.upload_file(users_csv, s3_key) - - @test_context = { - project_id: @project.pid, - config: LcmConnectionHelper.environment, - s3_bucket: GoodData::Environment::ConnectionHelper::SECRETS[:s3_bucket_name], - s3_key: s3_key - }.merge(s3_info) - @template_path = File.expand_path('../userprov/params/users_brick.json.erb', __dir__) - end - - after(:each) do - $SCRIPT_PARAMS = nil - end - - context 'when using mode sync_multiple_projects_based_on_custom_id' do - before(:all) do - @test_context[:sync_mode] = 'sync_multiple_projects_based_on_custom_id' - - (1..user_count).each do |i| - user_name = "#{@suffix}2_#{i}@bar.baz" - @user_data[:login] = user_name - user_array << @user_data.clone - end - (1..project_count).each do |j| - (1..user_count).each do |i| - user_name = "#{@suffix}2_#{i}_#{j}@bar.baz" - @user_data[:login] = user_name - @user_data[:client_id] = "testing_client_#{j}" - user_array << @user_data.clone - end - end - @domain.create_users(user_array) - users_csv = ConfigurationHelper.csv_from_hashes(user_array) - Support::S3Helper.upload_file(users_csv, @test_context[:s3_key]) - @data_product_id = "testing-data-product-#{@suffix}" - @data_product = @domain.create_data_product(id: @data_product_id) - @master_project = @rest_client.create_project(title: "Test MASTER project for #{@suffix}", auth_token: LcmConnectionHelper.environment[:prod_token]) - @segment = @data_product.create_segment(segment_id: "testing-segment-#{@suffix}", master_project: @master_project) - @segment.create_client(id: 'testingclient', project: @project.uri) - (1..project_count).each do |i| - project_helper = Support::ProjectHelper.create( - client: @rest_client, - title: "Test MINOR project with testing_client_#{i} for #{@suffix}", - auth_token: LcmConnectionHelper.environment[:prod_token], - environment: 'TESTING' - ) - project_helper.create_ldm - project_helper.load_data - project = project_helper.project - - @segment.create_client(id: "testing_client_#{i}", project: project.uri) - project_array << project - end - - @test_context[:data_product] = @data_product_id - @config_path = ConfigurationHelper.create_interpolated_tempfile( - @template_path, - @test_context - ) - end - - it 'adds users to project' do - # service_project = @rest_client.create_project( - # title: 'users load test service project', - # auth_token: @test_context[:config][:prod_token] - # ) - # opts = { - # context: @test_context, - # template_path: '../../../userprov/params/users_brick.json.erb', - # image_tag: image_tag - # } - # users_schedule = BrickRunner.schedule_brick('users_brick', service_project, opts) - end - - it 'sets the right MUFs to right users' do - # @test_context = { - # project_id: @project.pid, - # config: LcmConnectionHelper.environment, - # s3_bucket: GoodData::Environment::ConnectionHelper::SECRETS[:s3_bucket_name], - # s3_endpoint: Support::S3Helper::S3_ENDPOINT, - # s3_key: 'user_data', - # users_brick_input: { - # s3_bucket: GoodData::Environment::ConnectionHelper::SECRETS[:s3_bucket_name], - # s3_endpoint: Support::S3Helper::S3_ENDPOINT, - # s3_key: 'users_brick_input' - # } - # } - # @ads = GoodData::DataWarehouse.create( - # client: @rest_client, - # title: 'TEST ADS', - # auth_token: LcmConnectionHelper.environment[:prod_token] - # ) - # @test_context[:jdbc_url] = @ads.data['connectionUrl'] - # @ads_client = GoodData::Datawarehouse.new( - # @test_context[:config][:username], - # @test_context[:config][:password], - # nil, - # jdbc_url: @ads.data['connectionUrl'] - # ) - # query = 'CREATE TABLE IF NOT EXISTS "user_filters" (login VARCHAR(255) NOT NULL, state VARCHAR(255) NOT NULL, client_id VARCHAR(255));' - # @ads_client.execute(query) - # user_array.map do |u| - # insert = "INSERT INTO \"user_filters\" VALUES('#{u[:login]}', 'Oregon','#{u[:client_id]}');" - # @ads_client.execute(insert) - # end - # @test_context[:sync_mode] = 'sync_multiple_projects_based_on_custom_id' - # @test_context[:data_product] = @data_product_id - # @template_path = File.expand_path('../userprov/params/user_filters_brick_ads.json.erb', __dir__) - # @config_path = ConfigurationHelper.create_interpolated_tempfile( - # @template_path, - # @test_context - # ) - # user_filters_schedule = BrickRunner.schedule_brick( - # 'user_filters_brick', - # service_project, - # context: @test_context, - # template_path: '../../../userprov/params/user_filters_brick_ads.json.erb', - # image_tag: image_tag, - # run_after: users_schedule - # ) - end - - it 'executes the schedules' do - #users_schedule.execute(wait: false) - end - - it 'successfully finishes' do - # timeout = 3.hours - # results = GoodData::AppStore::Helper.wait_for_executions( - # [users_schedule, user_filters_schedule], - # timeout - # ) - # results.each do |result| - # expect(result.status).to be :ok - # end - end - end -end From 4b391f6010aca33b0c4337bb09a7e1e9068e304b Mon Sep 17 00:00:00 2001 From: Sang Truong Date: Thu, 3 Nov 2022 10:47:55 +0700 Subject: [PATCH 3/4] FEATURE:MSF-24365 - Add log for test --- bin/run_brick.rb | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bin/run_brick.rb b/bin/run_brick.rb index e027bed57..d53f16397 100755 --- a/bin/run_brick.rb +++ b/bin/run_brick.rb @@ -21,6 +21,7 @@ def handle_error(params, log, brick_type, error, error_message) execution_log = GoodData.logger execution_log.error "Execution failed. Error: #{error}" unless execution_log.nil? GoodData::Bricks::ExecutionResultMiddleware.update_execution_result(GoodData::Bricks::ExecutionStatus::WARNING, error_message) + GoodData.logger.warn("Handling the WARNING status when error") log.error "action=#{brick_type}_execution status=failed commit_hash=#{params['GOODDATA_RUBY_COMMIT']} execution_id=#{params['GDC_EXECUTION_ID']} exception=#{error}" raise end @@ -29,6 +30,7 @@ def handle_error(params, log, brick_type, error, error_message) log = RemoteSyslogLogger.new(syslog_node, 514, :program => "ruby_#{brick_type}", :facility => 'local2') log.info "action=#{brick_type}_execution status=init" +GoodData.logger.info("Start for testing WARNING status") begin commit_hash = ENV['GOODDATA_RUBY_COMMIT'] || '' From 84f7960ad003db760806fb696d65d0198130c207 Mon Sep 17 00:00:00 2001 From: Sang Truong Date: Fri, 4 Nov 2022 08:37:06 +0700 Subject: [PATCH 4/4] FEATURE:MSF-24365 - Modified code for test --- bin/run_brick.rb | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/bin/run_brick.rb b/bin/run_brick.rb index d53f16397..c2c46364e 100755 --- a/bin/run_brick.rb +++ b/bin/run_brick.rb @@ -20,7 +20,7 @@ def get_brick_params(prefix) def handle_error(params, log, brick_type, error, error_message) execution_log = GoodData.logger execution_log.error "Execution failed. Error: #{error}" unless execution_log.nil? - GoodData::Bricks::ExecutionResultMiddleware.update_execution_result(GoodData::Bricks::ExecutionStatus::WARNING, error_message) + GoodData::Bricks::ExecutionResultMiddleware.update_execution_result(GoodData::Bricks::ExecutionStatus::ERROR, error_message) GoodData.logger.warn("Handling the WARNING status when error") log.error "action=#{brick_type}_execution status=failed commit_hash=#{params['GOODDATA_RUBY_COMMIT']} execution_id=#{params['GDC_EXECUTION_ID']} exception=#{error}" raise @@ -60,6 +60,12 @@ def handle_error(params, log, brick_type, error, error_message) log.info "action=#{brick_type}_execution status=start commit_hash=#{commit_hash} execution_id=#{execution_id}" brick_pipeline.call(params) + if params['is_brick_ok'].nil? || params['is_brick_ok'] == false + log.info "#Debug brick ends: WARNING" + GoodData::Bricks::ExecutionResultMiddleware.update_execution_result(GoodData::Bricks::ExecutionStatus::WARNING, "The testing warning message") + else + log.info "#Debug brick ends: OK" + end rescue GoodData::LcmExecutionError => lcm_error handle_error(execution_result_log_params, log, brick_type, lcm_error, lcm_error.summary_error) rescue Exception => e # rubocop:disable RescueException