diff --git a/lib/optimizely/audience.rb b/lib/optimizely/audience.rb index 0a2a6a2e..130e5d95 100644 --- a/lib/optimizely/audience.rb +++ b/lib/optimizely/audience.rb @@ -16,7 +16,7 @@ # limitations under the License. # require 'json' -require_relative './custom_attribute_condition_evaluator' +require_relative './user_condition_evaluator' require_relative 'condition_tree_evaluator' require_relative 'helpers/constants' @@ -24,13 +24,12 @@ module Optimizely module Audience module_function - def user_meets_audience_conditions?(config, experiment, attributes, logger, logging_hash = nil, logging_key = nil) + def user_meets_audience_conditions?(config, experiment, user_context, logger, logging_hash = nil, logging_key = nil) # Determine for given experiment/rollout rule if user satisfies the audience conditions. # # config - Representation of the Optimizely project config. # experiment - Experiment/Rollout rule in which user is to be bucketed. - # attributes - Hash representing user attributes which will be used in determining if - # the audience conditions are met. + # user_context - Optimizely user context instance # logger - Provides a logger instance. # logging_hash - Optional string representing logs hash inside Helpers::Constants. # This defaults to 'EXPERIMENT_AUDIENCE_EVALUATION_LOGS'. @@ -57,12 +56,10 @@ def user_meets_audience_conditions?(config, experiment, attributes, logger, logg return true, decide_reasons end - attributes ||= {} + user_condition_evaluator = UserConditionEvaluator.new(user_context, logger) - custom_attr_condition_evaluator = CustomAttributeConditionEvaluator.new(attributes, logger) - - evaluate_custom_attr = lambda do |condition| - return custom_attr_condition_evaluator.evaluate(condition) + evaluate_user_conditions = lambda do |condition| + return user_condition_evaluator.evaluate(condition) end evaluate_audience = lambda do |audience_id| @@ -75,7 +72,7 @@ def user_meets_audience_conditions?(config, experiment, attributes, logger, logg decide_reasons.push(message) audience_conditions = JSON.parse(audience_conditions) if audience_conditions.is_a?(String) - result = ConditionTreeEvaluator.evaluate(audience_conditions, evaluate_custom_attr) + result = ConditionTreeEvaluator.evaluate(audience_conditions, evaluate_user_conditions) result_str = result.nil? ? 'UNKNOWN' : result.to_s.upcase message = format(logs_hash['AUDIENCE_EVALUATION_RESULT'], audience_id, result_str) logger.log(Logger::DEBUG, message) @@ -93,5 +90,38 @@ def user_meets_audience_conditions?(config, experiment, attributes, logger, logg [eval_result, decide_reasons] end + + def get_segments(conditions) + # Return any audience segments from provided conditions. + # + # conditions - Nested array of and/or conditions. + # Example: ['and', operand_1, ['or', operand_2, operand_3]] + # + # Returns unique array of segment names. + conditions = JSON.parse(conditions) if conditions.is_a?(String) + @parse_segments.call(conditions).uniq + end + + @parse_segments = lambda { |conditions| + # Return any audience segments from provided conditions. + # Helper function for get_segments. + # + # conditions - Nested array of and/or conditions. + # Example: ['and', operand_1, ['or', operand_2, operand_3]] + # + # Returns array of segment names. + segments = [] + + conditions.each do |condition| + case condition + when Array + segments.concat @parse_segments.call(condition) + when Hash + segments.push(condition['value']) if condition.fetch('match', nil) == 'qualified' + end + end + + segments + } end end diff --git a/lib/optimizely/config/datafile_project_config.rb b/lib/optimizely/config/datafile_project_config.rb index 321512ae..d6cb29c9 100644 --- a/lib/optimizely/config/datafile_project_config.rb +++ b/lib/optimizely/config/datafile_project_config.rb @@ -43,6 +43,10 @@ class DatafileProjectConfig < ProjectConfig attr_reader :rollouts attr_reader :version attr_reader :send_flag_decisions + attr_reader :integrations + attr_reader :public_key_for_odp + attr_reader :host_for_odp + attr_reader :all_segments attr_reader :attribute_key_map attr_reader :audience_id_map @@ -61,6 +65,7 @@ class DatafileProjectConfig < ProjectConfig attr_reader :variation_id_map_by_experiment_id attr_reader :variation_key_map_by_experiment_id attr_reader :flag_variation_map + attr_reader :integration_key_map def initialize(datafile, logger, error_handler) # ProjectConfig init method to fetch and set project config data @@ -92,6 +97,7 @@ def initialize(datafile, logger, error_handler) @environment_key = config.fetch('environmentKey', '') @rollouts = config.fetch('rollouts', []) @send_flag_decisions = config.fetch('sendFlagDecisions', false) + @integrations = config.fetch('integrations', []) # Json type is represented in datafile as a subtype of string for the sake of backwards compatibility. # Converting it to a first-class json type while creating Project Config @@ -117,6 +123,7 @@ def initialize(datafile, logger, error_handler) @experiment_key_map = generate_key_map(@experiments, 'key') @experiment_id_map = generate_key_map(@experiments, 'id') @audience_id_map = generate_key_map(@audiences, 'id') + @integration_key_map = generate_key_map(@integrations, 'key') @audience_id_map = @audience_id_map.merge(generate_key_map(@typed_audiences, 'id')) unless @typed_audiences.empty? @variation_id_map = {} @variation_key_map = {} @@ -142,6 +149,16 @@ def initialize(datafile, logger, error_handler) @rollout_experiment_id_map = @rollout_experiment_id_map.merge(generate_key_map(exps, 'id')) end + if (odp_integration = @integration_key_map&.fetch('odp', nil)) + @public_key_for_odp = odp_integration['publicKey'] + @host_for_odp = odp_integration['host'] + end + + @all_segments = [] + @audience_id_map.each_value do |audience| + @all_segments.concat Audience.get_segments(audience['conditions']) + end + @flag_variation_map = generate_feature_variation_map(@feature_flags) @all_experiments = @experiment_id_map.merge(@rollout_experiment_id_map) @all_experiments.each do |id, exp| diff --git a/lib/optimizely/decision_service.rb b/lib/optimizely/decision_service.rb index 9c04923e..3dbbf1d0 100644 --- a/lib/optimizely/decision_service.rb +++ b/lib/optimizely/decision_service.rb @@ -106,7 +106,7 @@ def get_variation(project_config, experiment_id, user_context, decide_options = end # Check audience conditions - user_meets_audience_conditions, reasons_received = Audience.user_meets_audience_conditions?(project_config, experiment, attributes, @logger) + user_meets_audience_conditions, reasons_received = Audience.user_meets_audience_conditions?(project_config, experiment, user_context, @logger) decide_reasons.push(*reasons_received) unless user_meets_audience_conditions message = "User '#{user_id}' does not meet the conditions to be in experiment '#{experiment_key}'." @@ -276,27 +276,27 @@ def get_variation_from_experiment_rule(project_config, flag_key, rule, user, opt [variation_id, reasons] end - def get_variation_from_delivery_rule(project_config, flag_key, rules, rule_index, user) + def get_variation_from_delivery_rule(project_config, flag_key, rules, rule_index, user_context) # Determine which variation the user is in for a given rollout. # Returns the variation from delivery rules. # # project_config - project_config - Instance of ProjectConfig # flag_key - The feature flag the user wants to access # rule - An experiment rule key - # user - Optimizely user context instance + # user_context - Optimizely user context instance # # Returns variation, boolean to skip for eveyone else rule and reasons reasons = [] skip_to_everyone_else = false rule = rules[rule_index] context = Optimizely::OptimizelyUserContext::OptimizelyDecisionContext.new(flag_key, rule['key']) - variation, forced_reasons = validated_forced_decision(project_config, context, user) + variation, forced_reasons = validated_forced_decision(project_config, context, user_context) reasons.push(*forced_reasons) return [variation, skip_to_everyone_else, reasons] if variation - user_id = user.user_id - attributes = user.user_attributes + user_id = user_context.user_id + attributes = user_context.user_attributes bucketing_id, bucketing_id_reasons = get_bucketing_id(user_id, attributes) reasons.push(*bucketing_id_reasons) @@ -304,7 +304,7 @@ def get_variation_from_delivery_rule(project_config, flag_key, rules, rule_index logging_key = everyone_else ? 'Everyone Else' : (rule_index + 1).to_s - user_meets_audience_conditions, reasons_received = Audience.user_meets_audience_conditions?(project_config, rule, attributes, @logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', logging_key) + user_meets_audience_conditions, reasons_received = Audience.user_meets_audience_conditions?(project_config, rule, user_context, @logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', logging_key) reasons.push(*reasons_received) unless user_meets_audience_conditions message = "User '#{user_id}' does not meet the conditions for targeting rule '#{logging_key}'." diff --git a/lib/optimizely/helpers/constants.rb b/lib/optimizely/helpers/constants.rb index 45dede71..eae4906f 100644 --- a/lib/optimizely/helpers/constants.rb +++ b/lib/optimizely/helpers/constants.rb @@ -285,6 +285,24 @@ module Constants }, 'revision' => { 'type' => 'string' + }, + 'integrations' => { + 'type' => 'array', + 'items' => { + 'type' => 'object', + 'properties' => { + 'key' => { + 'type' => 'string' + }, + 'host' => { + 'type' => 'string' + }, + 'publicKey' => { + 'type' => 'string' + } + }, + 'required' => %w[key] + } } }, 'required' => %w[ diff --git a/lib/optimizely/optimizely_user_context.rb b/lib/optimizely/optimizely_user_context.rb index 04d663b6..f00f78c9 100644 --- a/lib/optimizely/optimizely_user_context.rb +++ b/lib/optimizely/optimizely_user_context.rb @@ -33,15 +33,18 @@ class OptimizelyUserContext def initialize(optimizely_client, user_id, user_attributes) @attr_mutex = Mutex.new @forced_decision_mutex = Mutex.new + @qualified_segment_mutex = Mutex.new @optimizely_client = optimizely_client @user_id = user_id @user_attributes = user_attributes.nil? ? {} : user_attributes.clone @forced_decisions = {} + @qualified_segments = [] end def clone user_context = OptimizelyUserContext.new(@optimizely_client, @user_id, user_attributes) @forced_decision_mutex.synchronize { user_context.instance_variable_set('@forced_decisions', @forced_decisions.dup) unless @forced_decisions.empty? } + @qualified_segment_mutex.synchronize { user_context.instance_variable_set('@qualified_segments', @qualified_segments.dup) unless @qualified_segments.empty? } user_context end @@ -175,5 +178,31 @@ def as_json def to_json(*args) as_json.to_json(*args) end + + # Returns An array of qualified segments for this user + # + # @return - An array of segments names. + + def qualified_segments + @qualified_segment_mutex.synchronize { @qualified_segments.clone } + end + + # Replace qualified segments with provided segments + # + # @param segments - An array of segment names + + def qualified_segments=(segments) + @qualified_segment_mutex.synchronize { @qualified_segments = segments.clone } + end + + # Checks if user is qualified for the provided segment. + # + # @param segment - A segment name + + def qualified_for?(segment) + return false if @qualified_segments.empty? + + @qualified_segment_mutex.synchronize { @qualified_segments.include?(segment) } + end end end diff --git a/lib/optimizely/project_config.rb b/lib/optimizely/project_config.rb index 989b9b8d..b0d43aa3 100644 --- a/lib/optimizely/project_config.rb +++ b/lib/optimizely/project_config.rb @@ -54,6 +54,14 @@ def send_flag_decisions; end def rollouts; end + def integrations; end + + def public_key_for_odp; end + + def host_for_odp; end + + def all_segments; end + def experiment_running?(experiment); end def get_experiment_from_key(experiment_key); end diff --git a/lib/optimizely/custom_attribute_condition_evaluator.rb b/lib/optimizely/user_condition_evaluator.rb similarity index 90% rename from lib/optimizely/custom_attribute_condition_evaluator.rb rename to lib/optimizely/user_condition_evaluator.rb index 68256804..9f4556cf 100644 --- a/lib/optimizely/custom_attribute_condition_evaluator.rb +++ b/lib/optimizely/user_condition_evaluator.rb @@ -21,8 +21,8 @@ require_relative 'semantic_version' module Optimizely - class CustomAttributeConditionEvaluator - CUSTOM_ATTRIBUTE_CONDITION_TYPE = 'custom_attribute' + class UserConditionEvaluator + CONDITION_TYPES = %w[custom_attribute third_party_dimension].freeze # Conditional match types EXACT_MATCH_TYPE = 'exact' @@ -37,6 +37,7 @@ class CustomAttributeConditionEvaluator SEMVER_GT = 'semver_gt' SEMVER_LE = 'semver_le' SEMVER_LT = 'semver_lt' + QUALIFIED_MATCH_TYPE = 'qualified' EVALUATORS_BY_MATCH_TYPE = { EXACT_MATCH_TYPE => :exact_evaluator, @@ -50,13 +51,15 @@ class CustomAttributeConditionEvaluator SEMVER_GE => :semver_greater_than_or_equal_evaluator, SEMVER_GT => :semver_greater_than_evaluator, SEMVER_LE => :semver_less_than_or_equal_evaluator, - SEMVER_LT => :semver_less_than_evaluator + SEMVER_LT => :semver_less_than_evaluator, + QUALIFIED_MATCH_TYPE => :qualified_evaluator }.freeze attr_reader :user_attributes - def initialize(user_attributes, logger) - @user_attributes = user_attributes + def initialize(user_context, logger) + @user_context = user_context + @user_attributes = user_context.user_attributes @logger = logger end @@ -69,7 +72,7 @@ def evaluate(leaf_condition) # Returns boolean if the given user attributes match/don't match the given conditions, # nil if the given conditions can't be evaluated. - unless leaf_condition['type'] == CUSTOM_ATTRIBUTE_CONDITION_TYPE + unless CONDITION_TYPES.include? leaf_condition['type'] @logger.log( Logger::WARN, format(Helpers::Constants::AUDIENCE_EVALUATION_LOGS['UNKNOWN_CONDITION_TYPE'], leaf_condition) @@ -79,7 +82,7 @@ def evaluate(leaf_condition) condition_match = leaf_condition['match'] || EXACT_MATCH_TYPE - if !@user_attributes.key?(leaf_condition['name']) && condition_match != EXISTS_MATCH_TYPE + if !@user_attributes.key?(leaf_condition['name']) && ![EXISTS_MATCH_TYPE, QUALIFIED_MATCH_TYPE].include?(condition_match) @logger.log( Logger::DEBUG, format( @@ -91,7 +94,7 @@ def evaluate(leaf_condition) return nil end - if @user_attributes[leaf_condition['name']].nil? && condition_match != EXISTS_MATCH_TYPE + if @user_attributes[leaf_condition['name']].nil? && ![EXISTS_MATCH_TYPE, QUALIFIED_MATCH_TYPE].include?(condition_match) @logger.log( Logger::DEBUG, format( @@ -327,6 +330,25 @@ def semver_less_than_or_equal_evaluator(condition) SemanticVersion.compare_user_version_with_target_version(target_version, user_version) <= 0 end + def qualified_evaluator(condition) + # Evaluate the given match condition for the given user qaulified segments. + # Returns boolean true if condition value is in the user's qualified segments, + # false if the condition value is not in the user's qualified segments, + # nil if the condition value isn't a string. + + condition_value = condition['value'] + + unless condition_value.is_a?(String) + @logger.log( + Logger::WARN, + format(Helpers::Constants::AUDIENCE_EVALUATION_LOGS['UNKNOWN_CONDITION_VALUE'], condition) + ) + return nil + end + + @user_context.qualified_for?(condition_value) + end + private def valid_numeric_values?(user_value, condition_value, condition) diff --git a/spec/audience_spec.rb b/spec/audience_spec.rb index 1f304a97..ddbd6101 100644 --- a/spec/audience_spec.rb +++ b/spec/audience_spec.rb @@ -19,13 +19,16 @@ describe Optimizely::Audience do let(:config_body_JSON) { OptimizelySpec::VALID_CONFIG_BODY_JSON } let(:config_typed_audience_JSON) { JSON.dump(OptimizelySpec::CONFIG_DICT_WITH_TYPED_AUDIENCES) } + let(:config_integration_JSON) { JSON.dump(OptimizelySpec::CONFIG_DICT_WITH_INTEGRATIONS) } let(:error_handler) { Optimizely::NoOpErrorHandler.new } let(:spy_logger) { spy('logger') } let(:config) { Optimizely::DatafileProjectConfig.new(config_body_JSON, spy_logger, error_handler) } let(:typed_audience_config) { Optimizely::DatafileProjectConfig.new(config_typed_audience_JSON, spy_logger, error_handler) } + let(:integration_config) { Optimizely::DatafileProjectConfig.new(config_integration_JSON, spy_logger, error_handler) } + let(:project_instance) { Optimizely::Project.new(config_body_JSON, nil, spy_logger, error_handler) } + let(:user_context) { project_instance.create_user_context('some-user', {}) } it 'should return true for user_meets_audience_conditions? when experiment is using no audience' do - user_attributes = {} # Both Audience Ids and Conditions are Empty experiment = config.experiment_key_map['test_experiment'] experiment['audienceIds'] = [] @@ -33,7 +36,7 @@ expect(Optimizely::Audience.user_meets_audience_conditions?(config, experiment, - user_attributes, + user_context, spy_logger)[0]).to be true # Audience Ids exist but Audience Conditions is Empty @@ -41,7 +44,7 @@ experiment['audienceIds'] = ['11154'] experiment['audienceConditions'] = [] - user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, user_attributes, spy_logger) + user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, user_context, spy_logger) expect(user_meets_audience_conditions).to be true expect(reasons).to eq(["Audiences for experiment 'test_experiment' collectively evaluated to TRUE."]) @@ -50,13 +53,13 @@ experiment['audienceIds'] = [] experiment['audienceConditions'] = nil - user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, user_attributes, spy_logger) + user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, user_context, spy_logger) expect(user_meets_audience_conditions).to be true expect(reasons).to eq(["Audiences for experiment 'test_experiment' collectively evaluated to TRUE."]) end it 'should pass conditions when audience conditions exist else audienceIds are passed' do - user_attributes = {'test_attribute' => 'test_value_1'} + user_context.instance_variable_set(:@user_attributes, 'test_attribute' => 'test_value_1') experiment = config.experiment_key_map['test_experiment'] experiment['audienceIds'] = ['11154'] allow(Optimizely::ConditionTreeEvaluator).to receive(:evaluate) @@ -65,7 +68,7 @@ experiment['audienceConditions'] = ['and', %w[or 3468206642 3988293898], %w[or 3988293899 3468206646 3468206647 3468206644 3468206643]] Optimizely::Audience.user_meets_audience_conditions?(config, experiment, - user_attributes, + user_context, spy_logger) expect(Optimizely::ConditionTreeEvaluator).to have_received(:evaluate).with(experiment['audienceConditions'], any_args).once @@ -73,17 +76,17 @@ experiment['audienceConditions'] = nil Optimizely::Audience.user_meets_audience_conditions?(config, experiment, - user_attributes, + user_context, spy_logger) expect(Optimizely::ConditionTreeEvaluator).to have_received(:evaluate).with(experiment['audienceIds'], any_args).once end it 'should return false for user_meets_audience_conditions? if there are audiences but nil or empty attributes' do experiment = config.experiment_key_map['test_experiment_with_audience'] - allow(Optimizely::CustomAttributeConditionEvaluator).to receive(:new).and_call_original + allow(Optimizely::UserConditionEvaluator).to receive(:new).and_call_original # attributes set to empty dict - user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, {}, spy_logger) + user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, user_context, spy_logger) expect(user_meets_audience_conditions).to be false expect(reasons).to eq([ "Starting to evaluate audience '11154' with conditions: [\"and\", [\"or\", [\"or\", {\"name\": \"browser_type\", \"type\": \"custom_attribute\", \"value\": \"firefox\"}]]].", @@ -92,7 +95,8 @@ ]) # attributes set to nil - user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, nil, spy_logger) + user_context = Optimizely::OptimizelyUserContext.new(project_instance, 'some-user', nil) + user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, user_context, spy_logger) expect(user_meets_audience_conditions).to be false expect(reasons).to eq([ "Starting to evaluate audience '11154' with conditions: [\"and\", [\"or\", [\"or\", {\"name\": \"browser_type\", \"type\": \"custom_attribute\", \"value\": \"firefox\"}]]].", @@ -101,16 +105,15 @@ ]) # asserts nil attributes default to empty dict - expect(Optimizely::CustomAttributeConditionEvaluator).to have_received(:new).with({}, spy_logger).twice + expect(Optimizely::UserConditionEvaluator).to have_received(:new).with(user_context, spy_logger).once end it 'should return true for user_meets_audience_conditions? when condition tree evaluator returns true' do experiment = config.experiment_key_map['test_experiment'] - user_attributes = { - 'test_attribute' => 'test_value_1' - } + user_context.instance_variable_set(:@user_attributes, 'test_attribute' => 'test_value_1') + allow(Optimizely::ConditionTreeEvaluator).to receive(:evaluate).and_return(true) - user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, user_attributes, spy_logger) + user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, user_context, spy_logger) expect(user_meets_audience_conditions).to be true expect(reasons).to eq([ "Audiences for experiment 'test_experiment' collectively evaluated to TRUE." @@ -119,14 +122,12 @@ it 'should return false for user_meets_audience_conditions? when condition tree evaluator returns false or nil' do experiment = config.experiment_key_map['test_experiment_with_audience'] - user_attributes = { - 'browser_type' => 'firefox' - } + user_context.instance_variable_set(:@user_attributes, 'browser_type' => 'firefox') # condition tree evaluator returns nil allow(Optimizely::ConditionTreeEvaluator).to receive(:evaluate).and_return(nil) - user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, user_attributes, spy_logger) + user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, user_context, spy_logger) expect(user_meets_audience_conditions).to be false expect(reasons).to eq([ "Audiences for experiment 'test_experiment_with_audience' collectively evaluated to FALSE." @@ -134,7 +135,7 @@ # condition tree evaluator returns false allow(Optimizely::ConditionTreeEvaluator).to receive(:evaluate).and_return(false) - user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, user_attributes, spy_logger) + user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, user_context, spy_logger) expect(user_meets_audience_conditions).to be false expect(reasons).to eq([ "Audiences for experiment 'test_experiment_with_audience' collectively evaluated to FALSE." @@ -143,9 +144,7 @@ it 'should correctly evaluate audience Ids and call custom attribute evaluator for leaf nodes' do experiment = config.experiment_key_map['test_experiment_with_audience'] - user_attributes = { - 'browser_type' => 'firefox' - } + user_context.instance_variable_set(:@user_attributes, 'browser_type' => 'firefox') experiment['audienceIds'] = %w[11154 11155] experiment['audienceConditions'] = nil @@ -154,7 +153,7 @@ audience_11154_condition = JSON.parse(audience_11154['conditions'])[1][1][1] audience_11155_condition = JSON.parse(audience_11155['conditions'])[1][1][1] - customer_attr = Optimizely::CustomAttributeConditionEvaluator.new(user_attributes, spy_logger) + customer_attr = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) allow(customer_attr).to receive(:exact_evaluator) customer_attr.evaluate(audience_11154_condition) customer_attr.evaluate(audience_11155_condition) @@ -164,10 +163,7 @@ end it 'should correctly evaluate audienceConditions and call custom attribute evaluator for leaf nodes' do - user_attributes = { - 'house' => 'Gryffindor', - 'lasers' => 45.5 - } + user_context.instance_variable_set(:@user_attributes, 'house' => 'Gryffindor', 'lasers' => 45.5) experiment = typed_audience_config.get_experiment_from_key('audience_combinations_experiment') experiment['audienceIds'] = [] experiment['audienceConditions'] = ['or', %w[or 3468206642 3988293898], %w[or 3988293899 3468206646]] @@ -182,7 +178,7 @@ audience_3988293899_condition = audience_3988293899['conditions'][1][1][1] audience_3468206646_condition = audience_3468206646['conditions'][1][1][1] - customer_attr = Optimizely::CustomAttributeConditionEvaluator.new(user_attributes, spy_logger) + customer_attr = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) allow(customer_attr).to receive(:exact_evaluator) allow(customer_attr).to receive(:substring_evaluator) allow(customer_attr).to receive(:exists_evaluator) @@ -198,12 +194,10 @@ end it 'should correctly evaluate leaf node in audienceConditions' do - user_attributes = { - 'browser' => 'chrome' - } + user_context.instance_variable_set(:@user_attributes, 'browser' => 'chrome') experiment = typed_audience_config.get_experiment_from_key('audience_combinations_experiment') experiment['audienceConditions'] = '3468206645' - customer_attr = Optimizely::CustomAttributeConditionEvaluator.new(user_attributes, spy_logger) + customer_attr = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) audience_3468206645 = typed_audience_config.get_audience_from_id('3468206645') audience_3468206645_condition1 = audience_3468206645['conditions'][1][1][1] @@ -218,12 +212,10 @@ it 'should return nil when audience not found' do experiment = config.experiment_key_map['test_experiment_with_audience'] - user_attributes = { - 'browser_type' => 5.5 - } + user_context.instance_variable_set(:@user_attributes, 'browser_type' => 5.5) experiment['audienceIds'] = %w[11110] - user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, user_attributes, spy_logger) + user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, user_context, spy_logger) expect(user_meets_audience_conditions).to be false expect(reasons).to eq([ "Audiences for experiment 'test_experiment_with_audience' collectively evaluated to FALSE." @@ -242,13 +234,11 @@ it 'should log and return false for user_meets_audience_conditions? evaluates audienceIds' do experiment = config.experiment_key_map['test_experiment_with_audience'] - user_attributes = { - 'browser_type' => 5.5 - } + user_context.instance_variable_set(:@user_attributes, 'browser_type' => 5.5) experiment['audienceIds'] = %w[11154 11155] experiment['audienceConditions'] = nil - user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, user_attributes, spy_logger) + user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, user_context, spy_logger) expect(user_meets_audience_conditions).to be false expect(reasons).to eq([ "Starting to evaluate audience '11154' with conditions: [\"and\", [\"or\", [\"or\", {\"name\": \"browser_type\", \"type\": \"custom_attribute\", \"value\": \"firefox\"}]]].", @@ -294,14 +284,12 @@ end it 'should log and return true for user_meets_audience_conditions? evaluates audienceConditions' do - user_attributes = { - 'lasers' => 45.5 - } + user_context.instance_variable_set(:@user_attributes, 'lasers' => 45.5) experiment = typed_audience_config.get_experiment_from_key('audience_combinations_experiment') experiment['audienceIds'] = [] experiment['audienceConditions'] = ['or', %w[or 3468206647 3988293898 3468206646]] - Optimizely::Audience.user_meets_audience_conditions?(typed_audience_config, experiment, user_attributes, spy_logger) + Optimizely::Audience.user_meets_audience_conditions?(typed_audience_config, experiment, user_context, spy_logger) expect(spy_logger).to have_received(:log).once.with( Logger::DEBUG, @@ -355,14 +343,12 @@ logging_hash = 'ROLLOUT_AUDIENCE_EVALUATION_LOGS' logging_key = 'some_key' - user_attributes = { - 'lasers' => 45.5 - } + user_context.instance_variable_set(:@user_attributes, 'lasers' => 45.5) experiment = typed_audience_config.get_experiment_from_key('audience_combinations_experiment') experiment['audienceIds'] = [] experiment['audienceConditions'] = ['or', %w[or 3468206647 3988293898 3468206646]] - Optimizely::Audience.user_meets_audience_conditions?(typed_audience_config, experiment, user_attributes, spy_logger, logging_hash, logging_key) + Optimizely::Audience.user_meets_audience_conditions?(typed_audience_config, experiment, user_context, spy_logger, logging_hash, logging_key) expect(spy_logger).to have_received(:log).once.with( Logger::DEBUG, @@ -375,4 +361,25 @@ "Audiences for rule 'some_key' collectively evaluated to TRUE." ) end + + it 'should return a unique array of odp segments' do + seg1 = {'name' => 'odp.audiences', 'type' => 'third_party_dimension', 'match' => 'qualified', 'value' => 'seg1'} + seg2 = {'name' => 'odp.audiences', 'type' => 'third_party_dimension', 'match' => 'qualified', 'value' => 'seg2'} + seg3 = {'name' => 'odp.audiences', 'type' => 'third_party_dimension', 'match' => 'qualified', 'value' => 'seg3'} + other = {'name' => 'other', 'type' => 'custom_attribute', 'match' => 'eq', 'value' => 'a'} + + expect(Optimizely::Audience.get_segments([seg1])).to match_array %w[seg1] + + expect(Optimizely::Audience.get_segments(['or', seg1])).to match_array %w[seg1] + + expect(Optimizely::Audience.get_segments(['and', ['or', seg1]])).to match_array %w[seg1] + + expect(Optimizely::Audience.get_segments(['and', ['or', seg1], ['or', seg2], ['and', other]])).to match_array %w[seg1 seg2] + + expect(Optimizely::Audience.get_segments(['and', ['or', seg1, other, seg2]])).to match_array %w[seg1 seg2] + + segments = Optimizely::Audience.get_segments(['and', ['or', seg1, other, seg2], ['and', seg1, seg2, seg3]]) + expect(segments.length).to be 3 + expect(segments).to match_array %w[seg1 seg2 seg3] + end end diff --git a/spec/config/datafile_project_config_spec.rb b/spec/config/datafile_project_config_spec.rb index 5a5ab42d..3cf2bd31 100644 --- a/spec/config/datafile_project_config_spec.rb +++ b/spec/config/datafile_project_config_spec.rb @@ -24,6 +24,8 @@ let(:config_body) { OptimizelySpec::VALID_CONFIG_BODY } let(:config_body_JSON) { OptimizelySpec::VALID_CONFIG_BODY_JSON } let(:decision_JSON) { OptimizelySpec::DECIDE_FORCED_DECISION_JSON } + let(:integrations_config) { OptimizelySpec::CONFIG_DICT_WITH_INTEGRATIONS } + let(:integrations_JSON) { OptimizelySpec::CONFIG_DICT_WITH_INTEGRATIONS_JSON } let(:error_handler) { Optimizely::NoOpErrorHandler.new } let(:logger) { Optimizely::NoOpLogger.new } let(:config) { Optimizely::DatafileProjectConfig.new(config_body_JSON, logger, error_handler) } @@ -783,6 +785,33 @@ expect(project_config.send_flag_decisions).to eq(false) end + + it 'should initialize properties correctly upon creating project with integrations' do + project_config = Optimizely::DatafileProjectConfig.new(integrations_JSON, logger, error_handler) + integrations = integrations_config['integrations'] + odp_integration = integrations[0] + + expect(project_config.integrations).to eq(integrations) + expect(project_config.integration_key_map['odp']).to eq(odp_integration) + + expect(project_config.public_key_for_odp).to eq(odp_integration['publicKey']) + expect(project_config.host_for_odp).to eq(odp_integration['host']) + + expect(project_config.all_segments).to eq(%w[odp-segment-1 odp-segment-2 odp-segment-3]) + end + + it 'should initialize properties correctly upon creating project with empty integrations' do + config = integrations_config.dup + config['integrations'] = [] + integrations_json = JSON.dump(config) + + project_config = Optimizely::DatafileProjectConfig.new(integrations_json, logger, error_handler) + + expect(project_config.integrations).to eq([]) + + expect(project_config.public_key_for_odp).to eq(nil) + expect(project_config.host_for_odp).to eq(nil) + end end describe '@logger' do diff --git a/spec/decision_service_spec.rb b/spec/decision_service_spec.rb index 33e1152d..3d4a687f 100644 --- a/spec/decision_service_spec.rb +++ b/spec/decision_service_spec.rb @@ -29,6 +29,7 @@ let(:config) { Optimizely::DatafileProjectConfig.new(config_body_JSON, spy_logger, error_handler) } let(:decision_service) { Optimizely::DecisionService.new(spy_logger, spy_user_profile_service) } let(:project_instance) { Optimizely::Project.new(config_body_JSON, nil, spy_logger, error_handler) } + let(:user_context) { project_instance.create_user_context('some-user', {}) } describe '#get_variation' do before(:example) do @@ -620,7 +621,6 @@ project_instance = Optimizely::Project.new(config_body_json, nil, nil, nil) user_context = project_instance.create_user_context('user_1', {}) user_id = 'user_1' - user_attributes = {} describe 'when the feature flag is not associated with a rollout' do it 'should log a message and return nil' do @@ -701,9 +701,9 @@ # make sure we only checked the audience for the first rule expect(Optimizely::Audience).to have_received(:user_meets_audience_conditions?).once - .with(config, rollout['experiments'][0], user_attributes, spy_logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', '1') + .with(config, rollout['experiments'][0], user_context, spy_logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', '1') expect(Optimizely::Audience).not_to have_received(:user_meets_audience_conditions?) - .with(config, rollout['experiments'][1], user_attributes, spy_logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', 2) + .with(config, rollout['experiments'][1], user_context, spy_logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', 2) end end @@ -733,9 +733,9 @@ # make sure we only checked the audience for the first rule expect(Optimizely::Audience).to have_received(:user_meets_audience_conditions?).once - .with(config, rollout['experiments'][0], user_attributes, spy_logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', '1') + .with(config, rollout['experiments'][0], user_context, spy_logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', '1') expect(Optimizely::Audience).not_to have_received(:user_meets_audience_conditions?) - .with(config, rollout['experiments'][1], user_attributes, spy_logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', 2) + .with(config, rollout['experiments'][1], user_context, spy_logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', 2) end end end @@ -751,7 +751,7 @@ allow(Optimizely::Audience).to receive(:user_meets_audience_conditions?).and_return(false) allow(Optimizely::Audience).to receive(:user_meets_audience_conditions?) - .with(config, everyone_else_experiment, user_attributes, spy_logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', 'Everyone Else') + .with(config, everyone_else_experiment, user_context, spy_logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', 'Everyone Else') .and_return(true) allow(decision_service.bucketer).to receive(:bucket) .with(config, everyone_else_experiment, user_id, user_id) @@ -797,11 +797,11 @@ # verify we tried to bucket in all targeting rules and the everyone else rule expect(Optimizely::Audience).to have_received(:user_meets_audience_conditions?).once - .with(config, rollout['experiments'][0], user_attributes, spy_logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', '1') + .with(config, rollout['experiments'][0], user_context, spy_logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', '1') expect(Optimizely::Audience).to have_received(:user_meets_audience_conditions?) - .with(config, rollout['experiments'][1], user_attributes, spy_logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', '2') + .with(config, rollout['experiments'][1], user_context, spy_logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', '2') expect(Optimizely::Audience).to have_received(:user_meets_audience_conditions?) - .with(config, rollout['experiments'][2], user_attributes, spy_logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', 'Everyone Else') + .with(config, rollout['experiments'][2], user_context, spy_logger, 'ROLLOUT_AUDIENCE_EVALUATION_LOGS', 'Everyone Else') # verify log messages expect(spy_logger).to have_received(:log).with(Logger::DEBUG, "User '#{user_id}' does not meet the conditions for targeting rule '1'.") diff --git a/spec/optimizely_user_context_spec.rb b/spec/optimizely_user_context_spec.rb index 962843ec..78011068 100644 --- a/spec/optimizely_user_context_spec.rb +++ b/spec/optimizely_user_context_spec.rb @@ -24,10 +24,12 @@ let(:config_body_JSON) { OptimizelySpec::VALID_CONFIG_BODY_JSON } let(:config_body_invalid_JSON) { OptimizelySpec::INVALID_CONFIG_BODY_JSON } let(:forced_decision_JSON) { OptimizelySpec::DECIDE_FORCED_DECISION_JSON } + let(:integration_JSON) { OptimizelySpec::CONFIG_DICT_WITH_INTEGRATIONS_JSON } let(:error_handler) { Optimizely::RaiseErrorHandler.new } let(:spy_logger) { spy('logger') } let(:project_instance) { Optimizely::Project.new(config_body_JSON, nil, spy_logger, error_handler) } let(:forced_decision_project_instance) { Optimizely::Project.new(forced_decision_JSON, nil, spy_logger, error_handler) } + let(:integration_project_instance) { Optimizely::Project.new(integration_JSON, nil, spy_logger, error_handler) } let(:impression_log_url) { 'https://logx.optimizely.com/v1/events' } describe '#initialize' do @@ -722,4 +724,74 @@ expect(user_context_obj).to have_received(:remove_all_forced_decisions).once end end + it 'should clone qualified segments in user context' do + user_context_obj = Optimizely::OptimizelyUserContext.new(integration_project_instance, 'tester', {}) + qualified_segments = %w[seg1 seg2] + user_context_obj.qualified_segments = qualified_segments + user_clone_1 = user_context_obj.clone + + expect(user_clone_1.qualified_segments).not_to be_empty + expect(user_clone_1.qualified_segments).to eq qualified_segments + expect(user_clone_1.qualified_segments).not_to be user_context_obj.qualified_segments + expect(user_clone_1.qualified_segments).not_to be qualified_segments + end + + it 'should hit segment in ab test' do + stub_request(:post, impression_log_url) + user_context_obj = Optimizely::OptimizelyUserContext.new(integration_project_instance, 'tester', {}) + user_context_obj.qualified_segments = %w[odp-segment-1 odp-segment-none] + + decision = user_context_obj.decide('flag-segment') + + expect(decision.variation_key).to eq 'variation-a' + end + + it 'should hit other audience with segments in ab test' do + stub_request(:post, impression_log_url) + user_context_obj = Optimizely::OptimizelyUserContext.new(integration_project_instance, 'tester', 'age' => 30) + user_context_obj.qualified_segments = %w[odp-segment-none] + + decision = user_context_obj.decide('flag-segment', [Optimizely::Decide::OptimizelyDecideOption::IGNORE_USER_PROFILE_SERVICE]) + + expect(decision.variation_key).to eq 'variation-a' + end + + it 'should hit segment in rollout' do + stub_request(:post, impression_log_url) + user_context_obj = Optimizely::OptimizelyUserContext.new(integration_project_instance, 'tester', {}) + user_context_obj.qualified_segments = %w[odp-segment-2] + + decision = user_context_obj.decide('flag-segment', [Optimizely::Decide::OptimizelyDecideOption::IGNORE_USER_PROFILE_SERVICE]) + + expect(decision.variation_key).to eq 'rollout-variation-on' + end + + it 'should miss segment in rollout' do + stub_request(:post, impression_log_url) + user_context_obj = Optimizely::OptimizelyUserContext.new(integration_project_instance, 'tester', {}) + user_context_obj.qualified_segments = %w[odp-segment-none] + + decision = user_context_obj.decide('flag-segment', [Optimizely::Decide::OptimizelyDecideOption::IGNORE_USER_PROFILE_SERVICE]) + + expect(decision.variation_key).to eq 'rollout-variation-off' + end + + it 'should miss segment with empty segments' do + stub_request(:post, impression_log_url) + user_context_obj = Optimizely::OptimizelyUserContext.new(integration_project_instance, 'tester', {}) + user_context_obj.qualified_segments = [] + + decision = user_context_obj.decide('flag-segment', [Optimizely::Decide::OptimizelyDecideOption::IGNORE_USER_PROFILE_SERVICE]) + + expect(decision.variation_key).to eq 'rollout-variation-off' + end + + it 'should not fail without any segments' do + stub_request(:post, impression_log_url) + user_context_obj = Optimizely::OptimizelyUserContext.new(integration_project_instance, 'tester', {}) + + decision = user_context_obj.decide('flag-segment', [Optimizely::Decide::OptimizelyDecideOption::IGNORE_USER_PROFILE_SERVICE]) + + expect(decision.variation_key).to eq 'rollout-variation-off' + end end diff --git a/spec/project_spec.rb b/spec/project_spec.rb index 452b6dc9..e30d0490 100644 --- a/spec/project_spec.rb +++ b/spec/project_spec.rb @@ -30,6 +30,7 @@ let(:config_body) { OptimizelySpec::VALID_CONFIG_BODY } let(:config_body_JSON) { OptimizelySpec::VALID_CONFIG_BODY_JSON } let(:config_body_invalid_JSON) { OptimizelySpec::INVALID_CONFIG_BODY_JSON } + let(:config_body_integrations) { OptimizelySpec::CONFIG_DICT_WITH_INTEGRATIONS } let(:error_handler) { Optimizely::RaiseErrorHandler.new } let(:spy_logger) { spy('logger') } let(:version) { Optimizely::VERSION } @@ -115,6 +116,35 @@ class InvalidErrorHandler; end Optimizely::Project.new(config_body_JSON, nil, nil, nil, true) end + it 'should be invalid when datafile contains integrations missing key' do + expect_any_instance_of(Optimizely::SimpleLogger).to receive(:log).once.with(Logger::ERROR, 'Provided datafile is in an invalid format.') + config = config_body_integrations.dup + config['integrations'][0].delete('key') + integrations_json = JSON.dump(config) + + Optimizely::Project.new(integrations_json) + end + + it 'should be valid when datafile contains integrations with only key' do + config = config_body_integrations.dup + config['integrations'].clear + config['integrations'].push('key' => '123') + integrations_json = JSON.dump(config) + + project_instance = Optimizely::Project.new(integrations_json) + expect(project_instance.is_valid).to be true + end + + it 'should be valid when datafile contains integrations with arbitrary fields' do + config = config_body_integrations.dup + config['integrations'].clear + config['integrations'].push('key' => 'future', 'any-key-1' => 1, 'any-key-2' => 'any-value-2') + integrations_json = JSON.dump(config) + + project_instance = Optimizely::Project.new(integrations_json) + expect(project_instance.is_valid).to be true + end + it 'should log and raise an error when provided a datafile that is not JSON and skip_json_validation is true' do expect_any_instance_of(Optimizely::SimpleLogger).to receive(:log).once.with(Logger::ERROR, 'Provided datafile is in an invalid format.') expect_any_instance_of(Optimizely::RaiseErrorHandler).to receive(:handle_error).once.with(Optimizely::InvalidInputError) diff --git a/spec/spec_params.rb b/spec/spec_params.rb index 588cde57..62c585a9 100644 --- a/spec/spec_params.rb +++ b/spec/spec_params.rb @@ -1132,6 +1132,199 @@ module OptimizelySpec 'sendFlagDecisions' => true }.freeze + CONFIG_DICT_WITH_INTEGRATIONS = { + 'version' => '4', + 'sendFlagDecisions' => true, + 'rollouts' => [ + { + 'experiments' => [ + { + 'audienceIds' => ['13389130056'], + 'forcedVariations' => {}, + 'id' => '3332020515', + 'key' => 'rollout-rule-1', + 'layerId' => '3319450668', + 'status' => 'Running', + 'trafficAllocation' => [ + { + 'endOfRange' => 10_000, + 'entityId' => '3324490633' + } + ], + 'variations' => [ + { + 'featureEnabled' => true, + 'id' => '3324490633', + 'key' => 'rollout-variation-on', + 'variables' => [] + } + ] + }, + { + 'audienceIds' => [], + 'forcedVariations' => {}, + 'id' => '3332020556', + 'key' => 'rollout-rule-2', + 'layerId' => '3319450668', + 'status' => 'Running', + 'trafficAllocation' => [ + { + 'endOfRange' => 10_000, + 'entityId' => '3324490644' + } + ], + 'variations' => [ + { + 'featureEnabled' => false, + 'id' => '3324490644', + 'key' => 'rollout-variation-off', + 'variables' => [] + } + ] + } + ], + 'id' => '3319450668' + } + ], + 'anonymizeIP' => true, + 'botFiltering' => true, + 'projectId': '10431130345', + 'variables': [], + 'featureFlags': [ + { + 'experimentIds' => ['10390977673'], + 'id' => '4482920077', + 'key' => 'flag-segment', + 'rolloutId' => '3319450668', + 'variables' => [ + { + 'defaultValue' => '42', + 'id' => '2687470095', + 'key' => 'i_42', + 'type' => 'integer' + } + ] + } + ], + 'experiments' => [ + { + 'status' => 'Running', + 'key' => 'experiment-segment', + 'layerId' => '10420273888', + 'trafficAllocation' => [ + { + 'entityId' => '10389729780', + 'endOfRange' => 10_000 + } + ], + 'audienceIds' => ['$opt_dummy_audience'], + 'audienceConditions' => %w[or 13389142234 13389141123], + 'variations' => [ + { + 'variables' => [], + 'featureEnabled' => true, + 'id' => '10389729780', + 'key' => 'variation-a' + }, + { + 'variables' => [], + 'id' => '10416523121', + 'key' => 'variation-b' + } + ], + 'forcedVariations' => {}, + 'id' => '10390977673' + } + ], + 'groups' => [], + 'integrations' => [ + { + 'key' => 'odp', + 'host' => 'https =>//api.zaius.com', + 'publicKey' => 'W4WzcEs-ABgXorzY7h1LCQ' + } + ], + 'typedAudiences' => [ + { + 'id' => '13389142234', + 'conditions' => [ + 'and', + [ + 'or', + [ + 'or', + { + 'value' => 'odp-segment-1', + 'type' => 'third_party_dimension', + 'name' => 'odp.audiences', + 'match' => 'qualified' + } + ] + ] + ], + 'name' => 'odp-segment-1' + }, + { + 'id' => '13389130056', + 'conditions' => [ + 'and', + [ + 'or', + [ + 'or', + { + 'value' => 'odp-segment-2', + 'type' => 'third_party_dimension', + 'name' => 'odp.audiences', + 'match' => 'qualified' + }, + { + 'value' => 'us', + 'type' => 'custom_attribute', + 'name' => 'country', + 'match' => 'exact' + } + ], + [ + 'or', + { + 'value' => 'odp-segment-3', + 'type' => 'third_party_dimension', + 'name' => 'odp.audiences', + 'match' => 'qualified' + } + ] + ] + ], + 'name' => 'odp-segment-2' + } + ], + 'audiences' => [ + { + 'id' => '13389141123', + 'conditions' => '["and", ["or", ["or", {"match": "gt", "name": "age", "type": "custom_attribute", "value": 20}]]]', + 'name' => 'adult' + } + ], + 'attributes' => [ + { + 'id' => '10401066117', + 'key' => 'gender' + }, + { + 'id' => '10401066170', + 'key' => 'testvar' + }, + { + 'id' => '10401066171', + 'key' => 'age' + } + ], + 'accountId' => '10367498574', + 'events' => [], + 'revision' => '101' + }.freeze + SIMILAR_EXP_KEYS = { 'version' => '4', 'rollouts' => [], @@ -1735,4 +1928,6 @@ module OptimizelySpec DECIDE_FORCED_DECISION_JSON = JSON.dump(DECIDE_FORCED_DECISION) # SEND_FLAG_DECISIONS_DISABLED_CONFIG = VALID_CONFIG_BODY.dup # SEND_FLAG_DECISIONS_DISABLED_CONFIG['sendFlagDecisions'] = false + + CONFIG_DICT_WITH_INTEGRATIONS_JSON = JSON.dump(CONFIG_DICT_WITH_INTEGRATIONS) end diff --git a/spec/custom_attribute_condition_evaluator_spec.rb b/spec/user_condition_evaluator_spec.rb similarity index 63% rename from spec/custom_attribute_condition_evaluator_spec.rb rename to spec/user_condition_evaluator_spec.rb index d64db611..d928cce3 100644 --- a/spec/custom_attribute_condition_evaluator_spec.rb +++ b/spec/user_condition_evaluator_spec.rb @@ -20,16 +20,23 @@ require 'optimizely/helpers/validator' require 'optimizely/logger' -describe Optimizely::CustomAttributeConditionEvaluator do +describe Optimizely::UserConditionEvaluator do let(:spy_logger) { spy('logger') } + let(:config_body_JSON) { OptimizelySpec::VALID_CONFIG_BODY_JSON } + let(:error_handler) { Optimizely::NoOpErrorHandler.new } + let(:spy_logger) { spy('logger') } + let(:project_instance) { Optimizely::Project.new(config_body_JSON, nil, spy_logger, error_handler) } + let(:user_context) { project_instance.create_user_context('some-user', {}) } it 'should return true when the attributes pass the audience conditions and no match type is provided' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'browser_type' => 'safari'}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'browser_type' => 'safari') + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate('name' => 'browser_type', 'type' => 'custom_attribute', 'value' => 'safari')).to be true end it 'should return false when the attributes pass the audience conditions and no match type is provided' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'browser_type' => 'firefox'}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'browser_type' => 'firefox') + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate('name' => 'browser_type', 'type' => 'custom_attribute', 'value' => 'safari')).to be false end @@ -40,7 +47,8 @@ 'num_users' => 10, 'pi_value' => 3.14 } - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new(user_attributes, spy_logger) + user_context.instance_variable_set(:@user_attributes, user_attributes) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate('name' => 'browser_type', 'type' => 'custom_attribute', 'value' => 'safari')).to be true expect(condition_evaluator.evaluate('name' => 'is_firefox', 'type' => 'custom_attribute', 'value' => true)).to be true @@ -50,7 +58,8 @@ it 'should log and return nil when condition has an invalid type property' do condition = {'match' => 'exact', 'name' => 'weird_condition', 'type' => 'weird', 'value' => 'hi'} - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'weird_condition' => 'bye'}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'weird_condition' => 'bye') + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(condition)).to eq(nil) expect(spy_logger).to have_received(:log).exactly(1).times expect(spy_logger).to have_received(:log).once.with( @@ -62,7 +71,8 @@ it 'should log and return nil when condition has no type property' do condition = {'match' => 'exact', 'name' => 'weird_condition', 'value' => 'hi'} - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'weird_condition' => 'bye'}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'weird_condition' => 'bye') + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(condition)).to eq(nil) expect(spy_logger).to have_received(:log).exactly(1).times expect(spy_logger).to have_received(:log).once.with( @@ -74,7 +84,8 @@ it 'should log and return nil when condition has an invalid match property' do condition = {'match' => 'invalid', 'name' => 'browser_type', 'type' => 'custom_attribute', 'value' => 'chrome'} - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'browser_type' => 'chrome'}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'browser_type' => 'chrome') + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(condition)).to eq(nil) expect(spy_logger).to have_received(:log).once.with( Logger::WARN, @@ -89,31 +100,36 @@ end it 'should return false if there is no user-provided value' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({}, spy_logger) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exists_conditions)).to be false expect(spy_logger).not_to have_received(:log) end it 'should return false if the user-provided value is nil' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => nil}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => nil) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exists_conditions)).to be false end it 'should return true if the user-provided value is a string' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 'test'}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 'test') + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exists_conditions)).to be true end it 'should return true if the user-provided value is a number' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 10}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 10) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exists_conditions)).to be true - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 10.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 10.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exists_conditions)).to be true end it 'should return true if the user-provided value is a boolean' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => false}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => false) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exists_conditions)).to be true end end @@ -125,17 +141,20 @@ end it 'should return true if the user-provided value is equal to the condition value' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'location' => 'san francisco'}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'location' => 'san francisco') + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_string_conditions)).to be true end it 'should return false if the user-provided value is not equal to the condition value' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'location' => 'new york'}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'location' => 'new york') + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_string_conditions)).to be false end it 'should log and return nil if the user-provided value is of a different type than the condition value' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'location' => false}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'location' => false) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_string_conditions)).to eq(nil) expect(spy_logger).to have_received(:log).once.with( Logger::WARN, @@ -144,7 +163,7 @@ end it 'should log and return nil if there is no user-provided value' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({}, spy_logger) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_string_conditions)).to eq(nil) expect(spy_logger).to have_received(:log).once.with( Logger::DEBUG, @@ -154,7 +173,8 @@ it 'should log and return nil if the user-provided value is of a unexpected type' do # attribute value: nil - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'location' => []}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'location' => []) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_string_conditions)).to eq(nil) expect(spy_logger).to have_received(:log).once.with( Logger::WARN, @@ -163,7 +183,8 @@ ) # attribute value: empty hash - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'location' => {}}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'location' => {}) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_string_conditions)).to eq(nil) expect(spy_logger).to have_received(:log).once.with( Logger::WARN, @@ -181,41 +202,47 @@ it 'should return true if the user-provided value is equal to the condition value' do # user-provided integer value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'sum' => 100}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'sum' => 100) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_integer_conditions)).to be true expect(condition_evaluator.evaluate(@exact_float_conditions)).to be true # user-provided float value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'sum' => 100.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'sum' => 100.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_integer_conditions)).to be true expect(condition_evaluator.evaluate(@exact_float_conditions)).to be true end it 'should return false if the user-provided value is not equal to the condition value' do # user-provided integer value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'sum' => 101}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'sum' => 101) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_integer_conditions)).to be false # user-provided float value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'sum' => 100.1}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'sum' => 100.1) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_float_conditions)).to be false end it 'should return nil if the user-provided value is of a different type than the condition value' do # user-provided boolean value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'sum' => false}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'sum' => false) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_integer_conditions)).to eq(nil) expect(condition_evaluator.evaluate(@exact_float_conditions)).to eq(nil) end it 'should return nil if there is no user-provided value' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({}, spy_logger) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_integer_conditions)).to eq(nil) expect(condition_evaluator.evaluate(@exact_float_conditions)).to eq(nil) end it 'should return nil when user-provided value is infinite' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'sum' => 1 / 0.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'sum' => 1 / 0.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_float_conditions)).to be nil expect(spy_logger).to have_received(:log).once.with( @@ -228,7 +255,8 @@ it 'should not return nil when finite_number? returns true for provided arguments' do @exact_integer_conditions['value'] = 10 allow(Optimizely::Helpers::Validator).to receive(:finite_number?).twice.and_return(true, true) - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'sum' => 10}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'sum' => 10) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_integer_conditions)).not_to be_nil end end @@ -239,27 +267,31 @@ end it 'should return true if the user-provided value is equal to the condition value' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'boolean' => false}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'boolean' => false) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_boolean_conditions)).to be true end it 'should return false if the user-provided value is not equal to the condition value' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'boolean' => true}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'boolean' => true) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_boolean_conditions)).to be false end it 'should return nil if the user-provided value is of a different type than the condition value' do # user-provided integer value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'boolean' => 10}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'boolean' => 10) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_boolean_conditions)).to eq(nil) # user-provided float value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'boolean' => 10.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'boolean' => 10.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_boolean_conditions)).to eq(nil) end it 'should return nil if there is no user-provided value' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({}, spy_logger) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@exact_boolean_conditions)).to eq(nil) end end @@ -271,27 +303,31 @@ end it 'should return true if the condition value is a substring of the user-provided value' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'text' => 'This is a test message!'}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'text' => 'This is a test message!') + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@substring_conditions)).to be true end it 'should return false if the user-provided value is not a substring of the condition value' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'text' => 'Not found!'}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'text' => 'Not found!') + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@substring_conditions)).to be false end it 'should return nil if the user-provided value is not a string' do # user-provided integer value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'text' => 10}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'text' => 10) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@substring_conditions)).to eq(nil) # user-provided float value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'text' => 10.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'text' => 10.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@substring_conditions)).to eq(nil) end it 'should log and return nil if there is no user-provided value' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({}, spy_logger) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@substring_conditions)).to eq(nil) expect(spy_logger).to have_received(:log).once.with( Logger::DEBUG, @@ -301,7 +337,8 @@ it 'should log and return nil if there user-provided value is of a unexpected type' do # attribute value: nil - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'text' => nil}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'text' => nil) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@substring_conditions)).to eq(nil) expect(spy_logger).to have_received(:log).once.with( Logger::DEBUG, @@ -309,7 +346,8 @@ ) # attribute value: empty hash - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'text' => {}}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'text' => {}) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@substring_conditions)).to eq(nil) expect(spy_logger).to have_received(:log).once.with( Logger::WARN, @@ -320,7 +358,8 @@ it 'should log and return nil when condition value is invalid' do @substring_conditions['value'] = 5 - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'text' => 'This is a test message!'}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'text' => 'This is a test message!') + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@substring_conditions)).to be_nil expect(spy_logger).to have_received(:log).once.with( Logger::WARN, @@ -338,48 +377,55 @@ it 'should return true if the user-provided value is greater than the condition value' do # user-provided integer value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 12}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 12) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to be true expect(condition_evaluator.evaluate(@gt_float_conditions)).to be true # user-provided float value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 12.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 12.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to be true expect(condition_evaluator.evaluate(@gt_float_conditions)).to be true end it 'should return false if the user-provided value is equal to condition value' do # user-provided integer value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 10}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 10) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to be false expect(condition_evaluator.evaluate(@gt_float_conditions)).to be false # user-provided float value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 10.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 10.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to be false expect(condition_evaluator.evaluate(@gt_float_conditions)).to be false end it 'should return true if the user-provided value is less than the condition value' do # user-provided integer value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 8}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 8) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to be false expect(condition_evaluator.evaluate(@gt_float_conditions)).to be false # user-provided float value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 8.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 8.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to be false expect(condition_evaluator.evaluate(@gt_float_conditions)).to be false end it 'should return nil if the user-provided value is not a number' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 'test'}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 'test') + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to eq(nil) expect(condition_evaluator.evaluate(@gt_float_conditions)).to eq(nil) end it 'should log and return nil if there is no user-provided value' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({}, spy_logger) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to eq(nil) expect(condition_evaluator.evaluate(@gt_float_conditions)).to eq(nil) expect(spy_logger).to have_received(:log).once.with( @@ -394,7 +440,8 @@ it 'should log and return nil if there user-provided value is of a unexpected type' do # attribute value: nil - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => nil}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => nil) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to eq(nil) expect(spy_logger).to have_received(:log).once.with( Logger::DEBUG, @@ -403,7 +450,8 @@ ) # attribute value: empty hash - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => {}}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => {}) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to eq(nil) expect(spy_logger).to have_received(:log).once.with( Logger::WARN, @@ -413,7 +461,8 @@ end it 'should return nil when user-provided value is infinite' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 1 / 0.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 1 / 0.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to be nil expect(spy_logger).to have_received(:log).once.with( @@ -426,13 +475,15 @@ it 'should not return nil when finite_number? returns true for provided arguments' do @gt_integer_conditions['value'] = 81 allow(Optimizely::Helpers::Validator).to receive(:finite_number?).twice.and_return(true, true) - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 51}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 51) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).not_to be_nil end it 'should log and return nil when condition value is infinite' do @gt_integer_conditions['value'] = 1 / 0.0 - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 51}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 51) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to be_nil expect(spy_logger).to have_received(:log).once.with( Logger::WARN, @@ -450,36 +501,42 @@ it 'should return true if the user-provided value is greater than the condition value' do # user-provided integer value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 12}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 12) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to be true expect(condition_evaluator.evaluate(@gt_float_conditions)).to be true # user-provided float value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 12.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 12.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to be true expect(condition_evaluator.evaluate(@gt_float_conditions)).to be true end it 'should return true if the user-provided value is equal to condition value' do # user-provided integer value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 10}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 10) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to be true expect(condition_evaluator.evaluate(@gt_float_conditions)).to be true # user-provided float value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 10.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 10.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to be true expect(condition_evaluator.evaluate(@gt_float_conditions)).to be true end it 'should return false if the user-provided value is less than the condition value' do # user-provided integer value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 8}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 8) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to be false expect(condition_evaluator.evaluate(@gt_float_conditions)).to be false # user-provided float value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 8.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 8.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@gt_integer_conditions)).to be false expect(condition_evaluator.evaluate(@gt_float_conditions)).to be false end @@ -493,48 +550,55 @@ it 'should return true if the user-provided value is less than the condition value' do # user-provided integer value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 8}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 8) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to be true expect(condition_evaluator.evaluate(@lt_float_conditions)).to be true # user-provided float value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 8.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 8.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to be true expect(condition_evaluator.evaluate(@lt_float_conditions)).to be true end it 'should return false if the user-provided value is equal to condition value' do # user-provided integer value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 10}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 10) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to be false expect(condition_evaluator.evaluate(@lt_float_conditions)).to be false # user-provided float value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 10.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 10.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to be false expect(condition_evaluator.evaluate(@lt_float_conditions)).to be false end it 'should return false if the user-provided value is greater than the condition value' do # user-provided integer value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 12}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 12) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to be false expect(condition_evaluator.evaluate(@lt_float_conditions)).to be false # user-provided float value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 12.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 12.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to be false expect(condition_evaluator.evaluate(@lt_float_conditions)).to be false end it 'should return nil if the user-provided value is not a number' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 'test'}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 'test') + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to eq(nil) expect(condition_evaluator.evaluate(@lt_float_conditions)).to eq(nil) end it 'should log and return nil if there is no user-provided value' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({}, spy_logger) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to eq(nil) expect(condition_evaluator.evaluate(@lt_float_conditions)).to eq(nil) expect(spy_logger).to have_received(:log).once.with( @@ -549,7 +613,8 @@ it 'should log and return nil if there user-provided value is of a unexpected type' do # attribute value: nil - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => nil}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => nil) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to eq(nil) expect(spy_logger).to have_received(:log).once.with( Logger::DEBUG, @@ -558,7 +623,8 @@ ) # attribute value: empty hash - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => {}}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => {}) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to eq(nil) expect(spy_logger).to have_received(:log).once.with( Logger::WARN, @@ -568,7 +634,8 @@ end it 'should return nil when user-provided value is infinite' do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 1 / 0.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 1 / 0.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to be nil expect(spy_logger).to have_received(:log).once.with( @@ -581,13 +648,15 @@ it 'should not return nil when finite_number? returns true for provided arguments' do @lt_integer_conditions['value'] = 65 allow(Optimizely::Helpers::Validator).to receive(:finite_number?).twice.and_return(true, true) - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 75}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 75) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).not_to be_nil end it 'should log and return nil when condition value is infinite' do @lt_integer_conditions['value'] = 1 / 0.0 - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 51}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 51) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to be_nil expect(spy_logger).to have_received(:log).once.with( Logger::WARN, @@ -605,36 +674,42 @@ it 'should return false if the user-provided value is greater than the condition value' do # user-provided integer value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 12}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 12) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to be false expect(condition_evaluator.evaluate(@lt_float_conditions)).to be false # user-provided float value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 12.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 12.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to be false expect(condition_evaluator.evaluate(@lt_float_conditions)).to be false end it 'should return true if the user-provided value is equal to condition value' do # user-provided integer value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 10}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 10) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to be true expect(condition_evaluator.evaluate(@lt_float_conditions)).to be true # user-provided float value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 10.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 10.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to be true expect(condition_evaluator.evaluate(@lt_float_conditions)).to be true end it 'should return true if the user-provided value is less than the condition value' do # user-provided integer value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 8}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 8) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to be true expect(condition_evaluator.evaluate(@lt_float_conditions)).to be true # user-provided float value - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'input_value' => 8.0}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'input_value' => 8.0) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@lt_integer_conditions)).to be true expect(condition_evaluator.evaluate(@lt_float_conditions)).to be true end @@ -647,14 +722,16 @@ ['2.0.0', '2.0'].each do |version| it "should return true for user version #{version}" do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'version' => version}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'version' => version) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@semver_condition)).to be true end end ['2.9', '1.9'].each do |version| it "should return false for user version #{version}" do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'version' => version}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'version' => version) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@semver_condition)).to be false end end @@ -667,14 +744,16 @@ ['2.0.0', '1.9'].each do |version| it "should return true for user version #{version}" do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'version' => version}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'version' => version) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@semver_condition)).to be true end end ['2.5.1'].each do |version| it "should return false for user version #{version}" do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'version' => version}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'version' => version) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@semver_condition)).to be false end end @@ -687,14 +766,16 @@ ['2.0.0', '2.9'].each do |version| it "should return true for user version #{version}" do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'version' => version}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'version' => version) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@semver_condition)).to be true end end ['1.9'].each do |version| it "should return false for user version #{version}" do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'version' => version}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'version' => version) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@semver_condition)).to be false end end @@ -707,14 +788,16 @@ ['1.9'].each do |version| it "should return true for user version #{version}" do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'version' => version}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'version' => version) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@semver_condition)).to be true end end ['2.0.0', '2.5.1'].each do |version| it "should return false for user version #{version}" do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'version' => version}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'version' => version) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@semver_condition)).to be false end end @@ -727,14 +810,16 @@ ['2.9'].each do |version| it "should return true for user version #{version}" do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'version' => version}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'version' => version) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@semver_condition)).to be true end end ['2.0.0', '1.9'].each do |version| it "should return false for user version #{version}" do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'version' => version}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'version' => version) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@semver_condition)).to be false end end @@ -748,7 +833,8 @@ # version not string [true, 37].each do |version| it "should return nil for user version #{version}" do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'version' => version}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'version' => version) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@semver_condition)).to be nil expect(spy_logger).to have_received(:log).once.with( Logger::WARN, @@ -760,7 +846,8 @@ # invalid semantic version ['3.7.2.2', '+'].each do |version| it "should return nil for user version #{version}" do - condition_evaluator = Optimizely::CustomAttributeConditionEvaluator.new({'version' => version}, spy_logger) + user_context.instance_variable_set(:@user_attributes, 'version' => version) + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) expect(condition_evaluator.evaluate(@semver_condition)).to be nil expect(spy_logger).to have_received(:log).once.with( Logger::WARN, @@ -769,4 +856,45 @@ end end end + describe 'qualified match type' do + before(:context) do + @qualified_conditions = {'match' => 'qualified', 'name' => 'odp.audiences', 'type' => 'third_party_dimension', 'value' => 'odp-segment-2'} + end + + it 'should return true when user is qualified' do + user_context.qualified_segments = ['odp-segment-2'] + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) + expect(condition_evaluator.evaluate(@qualified_conditions)).to be true + end + + it 'should return false when user is not qualified' do + user_context.qualified_segments = ['odp-segment-1'] + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) + expect(condition_evaluator.evaluate(@qualified_conditions)).to be false + end + + it 'should return false with no qualified segments' do + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) + expect(condition_evaluator.evaluate(@qualified_conditions)).to be false + end + + it 'should return true when name is different' do + @qualified_conditions['name'] = 'other-name' + user_context.qualified_segments = ['odp-segment-2'] + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) + expect(condition_evaluator.evaluate(@qualified_conditions)).to be true + end + + it 'should log and return nil when condition value is invalid' do + @qualified_conditions['value'] = 5 + user_context.instance_variable_set(:@user_attributes, 'text' => 'This is a test message!') + condition_evaluator = Optimizely::UserConditionEvaluator.new(user_context, spy_logger) + expect(condition_evaluator.evaluate(@qualified_conditions)).to be_nil + expect(spy_logger).to have_received(:log).once.with( + Logger::WARN, + "Audience condition #{@qualified_conditions} has an unsupported condition value. You may need to upgrade "\ + 'to a newer release of the Optimizely SDK.' + ) + end + end end