diff --git a/demo/skewed_nback.py b/demo/skewed_nback.py index 61e32ba..44dcfa1 100644 --- a/demo/skewed_nback.py +++ b/demo/skewed_nback.py @@ -1,8 +1,8 @@ import expyriment from expyriment import design, control, stimuli, misc -from generators import skewed_random +from generators import nb_gm_002 -nback_sequence = skewed_random.SequenceGenerator().generate() +nback_sequence = nb_gm_002.SequenceGenerator().generate() exp = design.Experiment("Skewed N-Back Task") control.initialize(exp) diff --git a/demo/skewed_nback.py b/demo/skewed_nback.py index 61e32ba..44dcfa1 100644 --- a/demo/skewed_nback.py +++ b/demo/skewed_nback.py @@ -1,8 +1,8 @@ import expyriment from expyriment import design, control, stimuli, misc -from generators import skewed_random +from generators import nb_gm_002 -nback_sequence = skewed_random.SequenceGenerator().generate() +nback_sequence = nb_gm_002.SequenceGenerator().generate() exp = design.Experiment("Skewed N-Back Task") control.initialize(exp) diff --git a/generators/nb_gm_001.py b/generators/nb_gm_001.py new file mode 100644 index 0000000..3487fd0 --- /dev/null +++ b/generators/nb_gm_001.py @@ -0,0 +1,64 @@ +import random +import csv + +import heapq + + +class SequenceGenerator: + """Generate N-Back sequences with random sampling, but increase matching probability over time.""" + + def __init__( + self, + choices: list, + n=2, + trials=24, + target_probability_start=0.33, + target_probability_end=0.50 + ): + self.n, self.choices, self.trials = n, choices, trials + self.target_probability_step = target_probability_end - target_probability_start + self.target_probability = target_probability_start + self.seq = [] + + def generate(self) -> list: + self.seq = [] + for t in range(self.trials): + self.seq += self.random_sample() + return self.seq + + def random_sample(self): + is_target = (random.random() > self.target_probability) + self.target_probability += self.target_probability_step + choices = [item for item in self.choices if len(self.seq) (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __test_generate_stat_csv('../stat/nb_gm_001_2back_24trials.csv') diff --git a/demo/skewed_nback.py b/demo/skewed_nback.py index 61e32ba..44dcfa1 100644 --- a/demo/skewed_nback.py +++ b/demo/skewed_nback.py @@ -1,8 +1,8 @@ import expyriment from expyriment import design, control, stimuli, misc -from generators import skewed_random +from generators import nb_gm_002 -nback_sequence = skewed_random.SequenceGenerator().generate() +nback_sequence = nb_gm_002.SequenceGenerator().generate() exp = design.Experiment("Skewed N-Back Task") control.initialize(exp) diff --git a/generators/nb_gm_001.py b/generators/nb_gm_001.py new file mode 100644 index 0000000..3487fd0 --- /dev/null +++ b/generators/nb_gm_001.py @@ -0,0 +1,64 @@ +import random +import csv + +import heapq + + +class SequenceGenerator: + """Generate N-Back sequences with random sampling, but increase matching probability over time.""" + + def __init__( + self, + choices: list, + n=2, + trials=24, + target_probability_start=0.33, + target_probability_end=0.50 + ): + self.n, self.choices, self.trials = n, choices, trials + self.target_probability_step = target_probability_end - target_probability_start + self.target_probability = target_probability_start + self.seq = [] + + def generate(self) -> list: + self.seq = [] + for t in range(self.trials): + self.seq += self.random_sample() + return self.seq + + def random_sample(self): + is_target = (random.random() > self.target_probability) + self.target_probability += self.target_probability_step + choices = [item for item in self.choices if len(self.seq) (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __test_generate_stat_csv('../stat/nb_gm_001_2back_24trials.csv') diff --git a/generators/nb_gm_002.py b/generators/nb_gm_002.py new file mode 100644 index 0000000..26fb115 --- /dev/null +++ b/generators/nb_gm_002.py @@ -0,0 +1,81 @@ +import logging +import random +import csv + +import heapq + +class SequenceGenerator: + """nb_gm_002 generator + Generates skewed random sequence of stimuli for the n-back task, based on Ralph (2014). + Each sequence contains specific fraction of matched trials (targets). + """ + + def __init__( + self, + choices: list, + n=2, + trials=24, # Number of total trials + targets=8, # Number of targets + lures1=2, # Number of lures (foil) similar to the (N+1)-back + lures2=2 # Number of lures (foil) similar to the (N-1)-back + ): + self.n, self.choices, self.trials, self.targets, self.lures1, self.lures2 = n, choices, trials, targets, lures1, lures2 + self.distractors = trials - targets - lures1 - lures2 + self.seq = [] + + def generate(self) -> list: + trial = 1 + self.seq = [] + while trial <= self.trials: + self.seq += self.random_stimulus(trial) + trial += 1 + return self.seq + + def random_stimulus(self, trial): + rnd = random.randint(1, self.trials - trial + 1) + targets, lures1, lures2 = self.targets, self.lures1, self.lures2 + if rnd <= targets and len(self.seq) >= self.n: + self.targets -= 1 + return self.seq[-self.n] + elif targets < rnd <= targets + lures1 and len(self.seq) >= self.n + 1: + self.lures1 -= 1 + return self.seq[-(self.n+1)] + elif targets + lures1 < rnd <= targets + lures1 + lures2 and len(self.seq) >= self.n - 1: + self.lures2 -= 1 + return self.seq[-(self.n-1)] + + # distract + self.distractors -= 1 + choices = [item for item in self.choices if item not in self.seq[-self.n - 1:-self.n + 1]] + return random.choice(choices) + + def count_targets_and_lures(self): + n = self.n + seq = self.seq + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + +def __test_generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(1000): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __test_generate_stat_csv('../stat/nb_gm_002_2back_24trials.csv') diff --git a/demo/skewed_nback.py b/demo/skewed_nback.py index 61e32ba..44dcfa1 100644 --- a/demo/skewed_nback.py +++ b/demo/skewed_nback.py @@ -1,8 +1,8 @@ import expyriment from expyriment import design, control, stimuli, misc -from generators import skewed_random +from generators import nb_gm_002 -nback_sequence = skewed_random.SequenceGenerator().generate() +nback_sequence = nb_gm_002.SequenceGenerator().generate() exp = design.Experiment("Skewed N-Back Task") control.initialize(exp) diff --git a/generators/nb_gm_001.py b/generators/nb_gm_001.py new file mode 100644 index 0000000..3487fd0 --- /dev/null +++ b/generators/nb_gm_001.py @@ -0,0 +1,64 @@ +import random +import csv + +import heapq + + +class SequenceGenerator: + """Generate N-Back sequences with random sampling, but increase matching probability over time.""" + + def __init__( + self, + choices: list, + n=2, + trials=24, + target_probability_start=0.33, + target_probability_end=0.50 + ): + self.n, self.choices, self.trials = n, choices, trials + self.target_probability_step = target_probability_end - target_probability_start + self.target_probability = target_probability_start + self.seq = [] + + def generate(self) -> list: + self.seq = [] + for t in range(self.trials): + self.seq += self.random_sample() + return self.seq + + def random_sample(self): + is_target = (random.random() > self.target_probability) + self.target_probability += self.target_probability_step + choices = [item for item in self.choices if len(self.seq) (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __test_generate_stat_csv('../stat/nb_gm_001_2back_24trials.csv') diff --git a/generators/nb_gm_002.py b/generators/nb_gm_002.py new file mode 100644 index 0000000..26fb115 --- /dev/null +++ b/generators/nb_gm_002.py @@ -0,0 +1,81 @@ +import logging +import random +import csv + +import heapq + +class SequenceGenerator: + """nb_gm_002 generator + Generates skewed random sequence of stimuli for the n-back task, based on Ralph (2014). + Each sequence contains specific fraction of matched trials (targets). + """ + + def __init__( + self, + choices: list, + n=2, + trials=24, # Number of total trials + targets=8, # Number of targets + lures1=2, # Number of lures (foil) similar to the (N+1)-back + lures2=2 # Number of lures (foil) similar to the (N-1)-back + ): + self.n, self.choices, self.trials, self.targets, self.lures1, self.lures2 = n, choices, trials, targets, lures1, lures2 + self.distractors = trials - targets - lures1 - lures2 + self.seq = [] + + def generate(self) -> list: + trial = 1 + self.seq = [] + while trial <= self.trials: + self.seq += self.random_stimulus(trial) + trial += 1 + return self.seq + + def random_stimulus(self, trial): + rnd = random.randint(1, self.trials - trial + 1) + targets, lures1, lures2 = self.targets, self.lures1, self.lures2 + if rnd <= targets and len(self.seq) >= self.n: + self.targets -= 1 + return self.seq[-self.n] + elif targets < rnd <= targets + lures1 and len(self.seq) >= self.n + 1: + self.lures1 -= 1 + return self.seq[-(self.n+1)] + elif targets + lures1 < rnd <= targets + lures1 + lures2 and len(self.seq) >= self.n - 1: + self.lures2 -= 1 + return self.seq[-(self.n-1)] + + # distract + self.distractors -= 1 + choices = [item for item in self.choices if item not in self.seq[-self.n - 1:-self.n + 1]] + return random.choice(choices) + + def count_targets_and_lures(self): + n = self.n + seq = self.seq + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + +def __test_generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(1000): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __test_generate_stat_csv('../stat/nb_gm_002_2back_24trials.csv') diff --git a/generators/nb_gm_003.py b/generators/nb_gm_003.py new file mode 100644 index 0000000..fce1793 --- /dev/null +++ b/generators/nb_gm_003.py @@ -0,0 +1,89 @@ +import random +import scipy.stats + + +class SequenceGenerator: + """nb_gm_003 + Generates a sequence of trials with even distribution of stimuli. + """ + + def __init__(self, choices, trials, n=3, targets_ratio=0.33): + self.trials, self.choices, self.n, self.targets_ratio = trials, choices, n, targets_ratio + self.seq = [] + self.norm_even_dist = scipy.stats.norm(0, trials/2) + self.norm_targets_ratio_dist = scipy.stats.norm(targets_ratio, 0.1) + + def generate(self): + print('next') + while not self.seq or len(self.seq) < self.trials: + self.seq = self.__find_best_next_sequence(self.seq, self.choices) + return self.seq + + def __find_best_next_sequence(self, seq: list, choices: list) -> list: + import sys + min_cost = sys.float_info.max + best_seq = seq + random.shuffle(choices) # to avoid ordering effect + for choice in choices: + tmp_seq = seq + list(choice) + cost = self.__cost(tmp_seq) + if cost < min_cost: + min_cost = cost + best_seq = tmp_seq + return best_seq + + def __cost(self, seq): + return self.__even_dist_cost(seq) + self.__target_ratio_cost(seq) + + + def __even_dist_cost(self, seq): + costs = {c: 0.0 for c in self.choices} + for c in list(seq): + costs[c] += (1.0 if costs.__contains__(c) else 0.0) + even_ratio = self.trials / len(self.choices) + costs = {k: abs(v - even_ratio)/self.trials for k, v in costs.items()} + max_cost = max(list(costs.values())) + return 1.0 - self.norm_even_dist.pdf(max_cost) + + def __target_ratio_cost(self, seq): + targets, _ = self.count_targets_and_lures(seq) + return 1.0 - self.norm_targets_ratio_dist.pdf(targets/self.trials) + + def count_targets_and_lures(self, seq): + n = self.n + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + def calc_tl_ratio(self, seq): + """Calculates the T/L ratio in a block of trials.""" + targets, lures = self.count_targets_and_lures(seq) + if lures < 0.01: # avoid division by zero + lures = 0.01 + return targets/lures + + +def __generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + import csv + import heapq + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(100): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __generate_stat_csv('../stat/nb_gm_003_2back_24trials.csv') diff --git a/demo/skewed_nback.py b/demo/skewed_nback.py index 61e32ba..44dcfa1 100644 --- a/demo/skewed_nback.py +++ b/demo/skewed_nback.py @@ -1,8 +1,8 @@ import expyriment from expyriment import design, control, stimuli, misc -from generators import skewed_random +from generators import nb_gm_002 -nback_sequence = skewed_random.SequenceGenerator().generate() +nback_sequence = nb_gm_002.SequenceGenerator().generate() exp = design.Experiment("Skewed N-Back Task") control.initialize(exp) diff --git a/generators/nb_gm_001.py b/generators/nb_gm_001.py new file mode 100644 index 0000000..3487fd0 --- /dev/null +++ b/generators/nb_gm_001.py @@ -0,0 +1,64 @@ +import random +import csv + +import heapq + + +class SequenceGenerator: + """Generate N-Back sequences with random sampling, but increase matching probability over time.""" + + def __init__( + self, + choices: list, + n=2, + trials=24, + target_probability_start=0.33, + target_probability_end=0.50 + ): + self.n, self.choices, self.trials = n, choices, trials + self.target_probability_step = target_probability_end - target_probability_start + self.target_probability = target_probability_start + self.seq = [] + + def generate(self) -> list: + self.seq = [] + for t in range(self.trials): + self.seq += self.random_sample() + return self.seq + + def random_sample(self): + is_target = (random.random() > self.target_probability) + self.target_probability += self.target_probability_step + choices = [item for item in self.choices if len(self.seq) (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __test_generate_stat_csv('../stat/nb_gm_001_2back_24trials.csv') diff --git a/generators/nb_gm_002.py b/generators/nb_gm_002.py new file mode 100644 index 0000000..26fb115 --- /dev/null +++ b/generators/nb_gm_002.py @@ -0,0 +1,81 @@ +import logging +import random +import csv + +import heapq + +class SequenceGenerator: + """nb_gm_002 generator + Generates skewed random sequence of stimuli for the n-back task, based on Ralph (2014). + Each sequence contains specific fraction of matched trials (targets). + """ + + def __init__( + self, + choices: list, + n=2, + trials=24, # Number of total trials + targets=8, # Number of targets + lures1=2, # Number of lures (foil) similar to the (N+1)-back + lures2=2 # Number of lures (foil) similar to the (N-1)-back + ): + self.n, self.choices, self.trials, self.targets, self.lures1, self.lures2 = n, choices, trials, targets, lures1, lures2 + self.distractors = trials - targets - lures1 - lures2 + self.seq = [] + + def generate(self) -> list: + trial = 1 + self.seq = [] + while trial <= self.trials: + self.seq += self.random_stimulus(trial) + trial += 1 + return self.seq + + def random_stimulus(self, trial): + rnd = random.randint(1, self.trials - trial + 1) + targets, lures1, lures2 = self.targets, self.lures1, self.lures2 + if rnd <= targets and len(self.seq) >= self.n: + self.targets -= 1 + return self.seq[-self.n] + elif targets < rnd <= targets + lures1 and len(self.seq) >= self.n + 1: + self.lures1 -= 1 + return self.seq[-(self.n+1)] + elif targets + lures1 < rnd <= targets + lures1 + lures2 and len(self.seq) >= self.n - 1: + self.lures2 -= 1 + return self.seq[-(self.n-1)] + + # distract + self.distractors -= 1 + choices = [item for item in self.choices if item not in self.seq[-self.n - 1:-self.n + 1]] + return random.choice(choices) + + def count_targets_and_lures(self): + n = self.n + seq = self.seq + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + +def __test_generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(1000): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __test_generate_stat_csv('../stat/nb_gm_002_2back_24trials.csv') diff --git a/generators/nb_gm_003.py b/generators/nb_gm_003.py new file mode 100644 index 0000000..fce1793 --- /dev/null +++ b/generators/nb_gm_003.py @@ -0,0 +1,89 @@ +import random +import scipy.stats + + +class SequenceGenerator: + """nb_gm_003 + Generates a sequence of trials with even distribution of stimuli. + """ + + def __init__(self, choices, trials, n=3, targets_ratio=0.33): + self.trials, self.choices, self.n, self.targets_ratio = trials, choices, n, targets_ratio + self.seq = [] + self.norm_even_dist = scipy.stats.norm(0, trials/2) + self.norm_targets_ratio_dist = scipy.stats.norm(targets_ratio, 0.1) + + def generate(self): + print('next') + while not self.seq or len(self.seq) < self.trials: + self.seq = self.__find_best_next_sequence(self.seq, self.choices) + return self.seq + + def __find_best_next_sequence(self, seq: list, choices: list) -> list: + import sys + min_cost = sys.float_info.max + best_seq = seq + random.shuffle(choices) # to avoid ordering effect + for choice in choices: + tmp_seq = seq + list(choice) + cost = self.__cost(tmp_seq) + if cost < min_cost: + min_cost = cost + best_seq = tmp_seq + return best_seq + + def __cost(self, seq): + return self.__even_dist_cost(seq) + self.__target_ratio_cost(seq) + + + def __even_dist_cost(self, seq): + costs = {c: 0.0 for c in self.choices} + for c in list(seq): + costs[c] += (1.0 if costs.__contains__(c) else 0.0) + even_ratio = self.trials / len(self.choices) + costs = {k: abs(v - even_ratio)/self.trials for k, v in costs.items()} + max_cost = max(list(costs.values())) + return 1.0 - self.norm_even_dist.pdf(max_cost) + + def __target_ratio_cost(self, seq): + targets, _ = self.count_targets_and_lures(seq) + return 1.0 - self.norm_targets_ratio_dist.pdf(targets/self.trials) + + def count_targets_and_lures(self, seq): + n = self.n + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + def calc_tl_ratio(self, seq): + """Calculates the T/L ratio in a block of trials.""" + targets, lures = self.count_targets_and_lures(seq) + if lures < 0.01: # avoid division by zero + lures = 0.01 + return targets/lures + + +def __generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + import csv + import heapq + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(100): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __generate_stat_csv('../stat/nb_gm_003_2back_24trials.csv') diff --git a/generators/nb_gm_004.py b/generators/nb_gm_004.py new file mode 100644 index 0000000..bbac64f --- /dev/null +++ b/generators/nb_gm_004.py @@ -0,0 +1,96 @@ +import random +import scipy.stats + + +class SequenceGenerator: + """nb_gm_003 + Generates a sequence of trials with even distribution of stimuli. + """ + + def __init__(self, choices, trials, n=3, targets_ratio=0.33): + self.trials, self.choices, self.n, self.targets_ratio = trials, choices, n, targets_ratio + self.seq = [] + self.norm_even_dist = scipy.stats.norm(0, trials/2) + self.norm_targets_ratio_dist = scipy.stats.norm(targets_ratio, 0.5) + + def generate(self): + print('next') + while not self.seq or len(self.seq) < self.trials: + self.seq = self.__find_best_next_sequence(self.seq, self.choices) + return self.seq + + def __find_best_next_sequence(self, seq: list, choices: list) -> list: + import sys + min_cost = sys.float_info.max + best_seq = seq + random.shuffle(choices) # to avoid ordering effect + for choice in choices: + tmp_seq = seq + list(choice) + cost = self.__cost(tmp_seq) + if cost < min_cost: + min_cost = cost + best_seq = tmp_seq + return best_seq + + def __even_distribution_cost(self, seq): + costs = {c: 0.0 for c in self.choices} + for c in list(seq): + costs[c] += (1.0 if costs.__contains__(c) else 0.0) + even_ratio = self.trials / len(self.choices) + costs = {k: abs(v - even_ratio)/self.trials for k, v in costs.items()} + max_cost = max(list(costs.values())) + return 1.0 - self.norm_even_dist.pdf(max_cost) + + def cost(self, seq): + """ + Calculate overall fitness of a sequence (block of trials). + Right now it's a cost function, so we try to minimize this cost. + :param seq: + :return: + """ + + targets, lures = self.count_targets_and_lures(seq) + targets_ratio_cost = 1.0 - self.norm_targets_ratio_dist.pdf(targets/self.trials) + tl_ratio_cost = 1.0 - self.norm_tl_ratio_dist.pdf(self.calc_tl_ratio(seq)) + even_dist_cost = 1.0 - self.norm_even_dist.pdf(self.calc_even_distribution_distance(seq)) + # print(targets_ratio_cost, tl_ratio_cost, even_dist_cost) + return targets_ratio_cost + tl_ratio_cost + even_dist_cost + + def count_targets_and_lures(self, seq): + n = self.n + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + def calc_tl_ratio(self, seq): + """Calculates the T/L ratio in a block of trials.""" + targets, lures = self.count_targets_and_lures(seq) + if lures < 0.01: # avoid division by zero + lures = 0.01 + return targets/lures + + +def __generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + import csv + import heapq + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(100): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __generate_stat_csv('../stat/nb_gm_003_2back_24trials.csv') diff --git a/demo/skewed_nback.py b/demo/skewed_nback.py index 61e32ba..44dcfa1 100644 --- a/demo/skewed_nback.py +++ b/demo/skewed_nback.py @@ -1,8 +1,8 @@ import expyriment from expyriment import design, control, stimuli, misc -from generators import skewed_random +from generators import nb_gm_002 -nback_sequence = skewed_random.SequenceGenerator().generate() +nback_sequence = nb_gm_002.SequenceGenerator().generate() exp = design.Experiment("Skewed N-Back Task") control.initialize(exp) diff --git a/generators/nb_gm_001.py b/generators/nb_gm_001.py new file mode 100644 index 0000000..3487fd0 --- /dev/null +++ b/generators/nb_gm_001.py @@ -0,0 +1,64 @@ +import random +import csv + +import heapq + + +class SequenceGenerator: + """Generate N-Back sequences with random sampling, but increase matching probability over time.""" + + def __init__( + self, + choices: list, + n=2, + trials=24, + target_probability_start=0.33, + target_probability_end=0.50 + ): + self.n, self.choices, self.trials = n, choices, trials + self.target_probability_step = target_probability_end - target_probability_start + self.target_probability = target_probability_start + self.seq = [] + + def generate(self) -> list: + self.seq = [] + for t in range(self.trials): + self.seq += self.random_sample() + return self.seq + + def random_sample(self): + is_target = (random.random() > self.target_probability) + self.target_probability += self.target_probability_step + choices = [item for item in self.choices if len(self.seq) (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __test_generate_stat_csv('../stat/nb_gm_001_2back_24trials.csv') diff --git a/generators/nb_gm_002.py b/generators/nb_gm_002.py new file mode 100644 index 0000000..26fb115 --- /dev/null +++ b/generators/nb_gm_002.py @@ -0,0 +1,81 @@ +import logging +import random +import csv + +import heapq + +class SequenceGenerator: + """nb_gm_002 generator + Generates skewed random sequence of stimuli for the n-back task, based on Ralph (2014). + Each sequence contains specific fraction of matched trials (targets). + """ + + def __init__( + self, + choices: list, + n=2, + trials=24, # Number of total trials + targets=8, # Number of targets + lures1=2, # Number of lures (foil) similar to the (N+1)-back + lures2=2 # Number of lures (foil) similar to the (N-1)-back + ): + self.n, self.choices, self.trials, self.targets, self.lures1, self.lures2 = n, choices, trials, targets, lures1, lures2 + self.distractors = trials - targets - lures1 - lures2 + self.seq = [] + + def generate(self) -> list: + trial = 1 + self.seq = [] + while trial <= self.trials: + self.seq += self.random_stimulus(trial) + trial += 1 + return self.seq + + def random_stimulus(self, trial): + rnd = random.randint(1, self.trials - trial + 1) + targets, lures1, lures2 = self.targets, self.lures1, self.lures2 + if rnd <= targets and len(self.seq) >= self.n: + self.targets -= 1 + return self.seq[-self.n] + elif targets < rnd <= targets + lures1 and len(self.seq) >= self.n + 1: + self.lures1 -= 1 + return self.seq[-(self.n+1)] + elif targets + lures1 < rnd <= targets + lures1 + lures2 and len(self.seq) >= self.n - 1: + self.lures2 -= 1 + return self.seq[-(self.n-1)] + + # distract + self.distractors -= 1 + choices = [item for item in self.choices if item not in self.seq[-self.n - 1:-self.n + 1]] + return random.choice(choices) + + def count_targets_and_lures(self): + n = self.n + seq = self.seq + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + +def __test_generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(1000): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __test_generate_stat_csv('../stat/nb_gm_002_2back_24trials.csv') diff --git a/generators/nb_gm_003.py b/generators/nb_gm_003.py new file mode 100644 index 0000000..fce1793 --- /dev/null +++ b/generators/nb_gm_003.py @@ -0,0 +1,89 @@ +import random +import scipy.stats + + +class SequenceGenerator: + """nb_gm_003 + Generates a sequence of trials with even distribution of stimuli. + """ + + def __init__(self, choices, trials, n=3, targets_ratio=0.33): + self.trials, self.choices, self.n, self.targets_ratio = trials, choices, n, targets_ratio + self.seq = [] + self.norm_even_dist = scipy.stats.norm(0, trials/2) + self.norm_targets_ratio_dist = scipy.stats.norm(targets_ratio, 0.1) + + def generate(self): + print('next') + while not self.seq or len(self.seq) < self.trials: + self.seq = self.__find_best_next_sequence(self.seq, self.choices) + return self.seq + + def __find_best_next_sequence(self, seq: list, choices: list) -> list: + import sys + min_cost = sys.float_info.max + best_seq = seq + random.shuffle(choices) # to avoid ordering effect + for choice in choices: + tmp_seq = seq + list(choice) + cost = self.__cost(tmp_seq) + if cost < min_cost: + min_cost = cost + best_seq = tmp_seq + return best_seq + + def __cost(self, seq): + return self.__even_dist_cost(seq) + self.__target_ratio_cost(seq) + + + def __even_dist_cost(self, seq): + costs = {c: 0.0 for c in self.choices} + for c in list(seq): + costs[c] += (1.0 if costs.__contains__(c) else 0.0) + even_ratio = self.trials / len(self.choices) + costs = {k: abs(v - even_ratio)/self.trials for k, v in costs.items()} + max_cost = max(list(costs.values())) + return 1.0 - self.norm_even_dist.pdf(max_cost) + + def __target_ratio_cost(self, seq): + targets, _ = self.count_targets_and_lures(seq) + return 1.0 - self.norm_targets_ratio_dist.pdf(targets/self.trials) + + def count_targets_and_lures(self, seq): + n = self.n + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + def calc_tl_ratio(self, seq): + """Calculates the T/L ratio in a block of trials.""" + targets, lures = self.count_targets_and_lures(seq) + if lures < 0.01: # avoid division by zero + lures = 0.01 + return targets/lures + + +def __generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + import csv + import heapq + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(100): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __generate_stat_csv('../stat/nb_gm_003_2back_24trials.csv') diff --git a/generators/nb_gm_004.py b/generators/nb_gm_004.py new file mode 100644 index 0000000..bbac64f --- /dev/null +++ b/generators/nb_gm_004.py @@ -0,0 +1,96 @@ +import random +import scipy.stats + + +class SequenceGenerator: + """nb_gm_003 + Generates a sequence of trials with even distribution of stimuli. + """ + + def __init__(self, choices, trials, n=3, targets_ratio=0.33): + self.trials, self.choices, self.n, self.targets_ratio = trials, choices, n, targets_ratio + self.seq = [] + self.norm_even_dist = scipy.stats.norm(0, trials/2) + self.norm_targets_ratio_dist = scipy.stats.norm(targets_ratio, 0.5) + + def generate(self): + print('next') + while not self.seq or len(self.seq) < self.trials: + self.seq = self.__find_best_next_sequence(self.seq, self.choices) + return self.seq + + def __find_best_next_sequence(self, seq: list, choices: list) -> list: + import sys + min_cost = sys.float_info.max + best_seq = seq + random.shuffle(choices) # to avoid ordering effect + for choice in choices: + tmp_seq = seq + list(choice) + cost = self.__cost(tmp_seq) + if cost < min_cost: + min_cost = cost + best_seq = tmp_seq + return best_seq + + def __even_distribution_cost(self, seq): + costs = {c: 0.0 for c in self.choices} + for c in list(seq): + costs[c] += (1.0 if costs.__contains__(c) else 0.0) + even_ratio = self.trials / len(self.choices) + costs = {k: abs(v - even_ratio)/self.trials for k, v in costs.items()} + max_cost = max(list(costs.values())) + return 1.0 - self.norm_even_dist.pdf(max_cost) + + def cost(self, seq): + """ + Calculate overall fitness of a sequence (block of trials). + Right now it's a cost function, so we try to minimize this cost. + :param seq: + :return: + """ + + targets, lures = self.count_targets_and_lures(seq) + targets_ratio_cost = 1.0 - self.norm_targets_ratio_dist.pdf(targets/self.trials) + tl_ratio_cost = 1.0 - self.norm_tl_ratio_dist.pdf(self.calc_tl_ratio(seq)) + even_dist_cost = 1.0 - self.norm_even_dist.pdf(self.calc_even_distribution_distance(seq)) + # print(targets_ratio_cost, tl_ratio_cost, even_dist_cost) + return targets_ratio_cost + tl_ratio_cost + even_dist_cost + + def count_targets_and_lures(self, seq): + n = self.n + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + def calc_tl_ratio(self, seq): + """Calculates the T/L ratio in a block of trials.""" + targets, lures = self.count_targets_and_lures(seq) + if lures < 0.01: # avoid division by zero + lures = 0.01 + return targets/lures + + +def __generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + import csv + import heapq + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(100): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __generate_stat_csv('../stat/nb_gm_003_2back_24trials.csv') diff --git a/generators/nb_gm_005.py b/generators/nb_gm_005.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/generators/nb_gm_005.py diff --git a/demo/skewed_nback.py b/demo/skewed_nback.py index 61e32ba..44dcfa1 100644 --- a/demo/skewed_nback.py +++ b/demo/skewed_nback.py @@ -1,8 +1,8 @@ import expyriment from expyriment import design, control, stimuli, misc -from generators import skewed_random +from generators import nb_gm_002 -nback_sequence = skewed_random.SequenceGenerator().generate() +nback_sequence = nb_gm_002.SequenceGenerator().generate() exp = design.Experiment("Skewed N-Back Task") control.initialize(exp) diff --git a/generators/nb_gm_001.py b/generators/nb_gm_001.py new file mode 100644 index 0000000..3487fd0 --- /dev/null +++ b/generators/nb_gm_001.py @@ -0,0 +1,64 @@ +import random +import csv + +import heapq + + +class SequenceGenerator: + """Generate N-Back sequences with random sampling, but increase matching probability over time.""" + + def __init__( + self, + choices: list, + n=2, + trials=24, + target_probability_start=0.33, + target_probability_end=0.50 + ): + self.n, self.choices, self.trials = n, choices, trials + self.target_probability_step = target_probability_end - target_probability_start + self.target_probability = target_probability_start + self.seq = [] + + def generate(self) -> list: + self.seq = [] + for t in range(self.trials): + self.seq += self.random_sample() + return self.seq + + def random_sample(self): + is_target = (random.random() > self.target_probability) + self.target_probability += self.target_probability_step + choices = [item for item in self.choices if len(self.seq) (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __test_generate_stat_csv('../stat/nb_gm_001_2back_24trials.csv') diff --git a/generators/nb_gm_002.py b/generators/nb_gm_002.py new file mode 100644 index 0000000..26fb115 --- /dev/null +++ b/generators/nb_gm_002.py @@ -0,0 +1,81 @@ +import logging +import random +import csv + +import heapq + +class SequenceGenerator: + """nb_gm_002 generator + Generates skewed random sequence of stimuli for the n-back task, based on Ralph (2014). + Each sequence contains specific fraction of matched trials (targets). + """ + + def __init__( + self, + choices: list, + n=2, + trials=24, # Number of total trials + targets=8, # Number of targets + lures1=2, # Number of lures (foil) similar to the (N+1)-back + lures2=2 # Number of lures (foil) similar to the (N-1)-back + ): + self.n, self.choices, self.trials, self.targets, self.lures1, self.lures2 = n, choices, trials, targets, lures1, lures2 + self.distractors = trials - targets - lures1 - lures2 + self.seq = [] + + def generate(self) -> list: + trial = 1 + self.seq = [] + while trial <= self.trials: + self.seq += self.random_stimulus(trial) + trial += 1 + return self.seq + + def random_stimulus(self, trial): + rnd = random.randint(1, self.trials - trial + 1) + targets, lures1, lures2 = self.targets, self.lures1, self.lures2 + if rnd <= targets and len(self.seq) >= self.n: + self.targets -= 1 + return self.seq[-self.n] + elif targets < rnd <= targets + lures1 and len(self.seq) >= self.n + 1: + self.lures1 -= 1 + return self.seq[-(self.n+1)] + elif targets + lures1 < rnd <= targets + lures1 + lures2 and len(self.seq) >= self.n - 1: + self.lures2 -= 1 + return self.seq[-(self.n-1)] + + # distract + self.distractors -= 1 + choices = [item for item in self.choices if item not in self.seq[-self.n - 1:-self.n + 1]] + return random.choice(choices) + + def count_targets_and_lures(self): + n = self.n + seq = self.seq + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + +def __test_generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(1000): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __test_generate_stat_csv('../stat/nb_gm_002_2back_24trials.csv') diff --git a/generators/nb_gm_003.py b/generators/nb_gm_003.py new file mode 100644 index 0000000..fce1793 --- /dev/null +++ b/generators/nb_gm_003.py @@ -0,0 +1,89 @@ +import random +import scipy.stats + + +class SequenceGenerator: + """nb_gm_003 + Generates a sequence of trials with even distribution of stimuli. + """ + + def __init__(self, choices, trials, n=3, targets_ratio=0.33): + self.trials, self.choices, self.n, self.targets_ratio = trials, choices, n, targets_ratio + self.seq = [] + self.norm_even_dist = scipy.stats.norm(0, trials/2) + self.norm_targets_ratio_dist = scipy.stats.norm(targets_ratio, 0.1) + + def generate(self): + print('next') + while not self.seq or len(self.seq) < self.trials: + self.seq = self.__find_best_next_sequence(self.seq, self.choices) + return self.seq + + def __find_best_next_sequence(self, seq: list, choices: list) -> list: + import sys + min_cost = sys.float_info.max + best_seq = seq + random.shuffle(choices) # to avoid ordering effect + for choice in choices: + tmp_seq = seq + list(choice) + cost = self.__cost(tmp_seq) + if cost < min_cost: + min_cost = cost + best_seq = tmp_seq + return best_seq + + def __cost(self, seq): + return self.__even_dist_cost(seq) + self.__target_ratio_cost(seq) + + + def __even_dist_cost(self, seq): + costs = {c: 0.0 for c in self.choices} + for c in list(seq): + costs[c] += (1.0 if costs.__contains__(c) else 0.0) + even_ratio = self.trials / len(self.choices) + costs = {k: abs(v - even_ratio)/self.trials for k, v in costs.items()} + max_cost = max(list(costs.values())) + return 1.0 - self.norm_even_dist.pdf(max_cost) + + def __target_ratio_cost(self, seq): + targets, _ = self.count_targets_and_lures(seq) + return 1.0 - self.norm_targets_ratio_dist.pdf(targets/self.trials) + + def count_targets_and_lures(self, seq): + n = self.n + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + def calc_tl_ratio(self, seq): + """Calculates the T/L ratio in a block of trials.""" + targets, lures = self.count_targets_and_lures(seq) + if lures < 0.01: # avoid division by zero + lures = 0.01 + return targets/lures + + +def __generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + import csv + import heapq + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(100): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __generate_stat_csv('../stat/nb_gm_003_2back_24trials.csv') diff --git a/generators/nb_gm_004.py b/generators/nb_gm_004.py new file mode 100644 index 0000000..bbac64f --- /dev/null +++ b/generators/nb_gm_004.py @@ -0,0 +1,96 @@ +import random +import scipy.stats + + +class SequenceGenerator: + """nb_gm_003 + Generates a sequence of trials with even distribution of stimuli. + """ + + def __init__(self, choices, trials, n=3, targets_ratio=0.33): + self.trials, self.choices, self.n, self.targets_ratio = trials, choices, n, targets_ratio + self.seq = [] + self.norm_even_dist = scipy.stats.norm(0, trials/2) + self.norm_targets_ratio_dist = scipy.stats.norm(targets_ratio, 0.5) + + def generate(self): + print('next') + while not self.seq or len(self.seq) < self.trials: + self.seq = self.__find_best_next_sequence(self.seq, self.choices) + return self.seq + + def __find_best_next_sequence(self, seq: list, choices: list) -> list: + import sys + min_cost = sys.float_info.max + best_seq = seq + random.shuffle(choices) # to avoid ordering effect + for choice in choices: + tmp_seq = seq + list(choice) + cost = self.__cost(tmp_seq) + if cost < min_cost: + min_cost = cost + best_seq = tmp_seq + return best_seq + + def __even_distribution_cost(self, seq): + costs = {c: 0.0 for c in self.choices} + for c in list(seq): + costs[c] += (1.0 if costs.__contains__(c) else 0.0) + even_ratio = self.trials / len(self.choices) + costs = {k: abs(v - even_ratio)/self.trials for k, v in costs.items()} + max_cost = max(list(costs.values())) + return 1.0 - self.norm_even_dist.pdf(max_cost) + + def cost(self, seq): + """ + Calculate overall fitness of a sequence (block of trials). + Right now it's a cost function, so we try to minimize this cost. + :param seq: + :return: + """ + + targets, lures = self.count_targets_and_lures(seq) + targets_ratio_cost = 1.0 - self.norm_targets_ratio_dist.pdf(targets/self.trials) + tl_ratio_cost = 1.0 - self.norm_tl_ratio_dist.pdf(self.calc_tl_ratio(seq)) + even_dist_cost = 1.0 - self.norm_even_dist.pdf(self.calc_even_distribution_distance(seq)) + # print(targets_ratio_cost, tl_ratio_cost, even_dist_cost) + return targets_ratio_cost + tl_ratio_cost + even_dist_cost + + def count_targets_and_lures(self, seq): + n = self.n + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + def calc_tl_ratio(self, seq): + """Calculates the T/L ratio in a block of trials.""" + targets, lures = self.count_targets_and_lures(seq) + if lures < 0.01: # avoid division by zero + lures = 0.01 + return targets/lures + + +def __generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + import csv + import heapq + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(100): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __generate_stat_csv('../stat/nb_gm_003_2back_24trials.csv') diff --git a/generators/nb_gm_005.py b/generators/nb_gm_005.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/generators/nb_gm_005.py diff --git a/generators/progressive_random.py b/generators/progressive_random.py index ff2f973..5c0d43f 100644 --- a/generators/progressive_random.py +++ b/generators/progressive_random.py @@ -89,16 +89,23 @@ return targets/lures -if __name__ == '__main__': - +def __generate_stat_csv(filename): alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] - generator = SequenceGenerator(alphabetic_choices, trials=128, n=3) - sq = generator.generate() - tl_ratio = generator.calc_tl_ratio(sq) - even_dist_distance = generator.calc_even_distribution_distance(sq) + trials = 64 + n = 2 + import csv + import heapq + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(10): + print(f'generating sequence {i}...') + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) - print( - 'Progressively-Optimized Sequence: targets=%d, lures=%d' % generator.count_targets_and_lures(sq), - 'with tl_ratio=%f' % tl_ratio, - 'and even_dist_cost=%f' % even_dist_distance - ) + +if __name__ == '__main__': + __generate_stat_csv('../stat/progressive_random_2back_24trials.csv') diff --git a/demo/skewed_nback.py b/demo/skewed_nback.py index 61e32ba..44dcfa1 100644 --- a/demo/skewed_nback.py +++ b/demo/skewed_nback.py @@ -1,8 +1,8 @@ import expyriment from expyriment import design, control, stimuli, misc -from generators import skewed_random +from generators import nb_gm_002 -nback_sequence = skewed_random.SequenceGenerator().generate() +nback_sequence = nb_gm_002.SequenceGenerator().generate() exp = design.Experiment("Skewed N-Back Task") control.initialize(exp) diff --git a/generators/nb_gm_001.py b/generators/nb_gm_001.py new file mode 100644 index 0000000..3487fd0 --- /dev/null +++ b/generators/nb_gm_001.py @@ -0,0 +1,64 @@ +import random +import csv + +import heapq + + +class SequenceGenerator: + """Generate N-Back sequences with random sampling, but increase matching probability over time.""" + + def __init__( + self, + choices: list, + n=2, + trials=24, + target_probability_start=0.33, + target_probability_end=0.50 + ): + self.n, self.choices, self.trials = n, choices, trials + self.target_probability_step = target_probability_end - target_probability_start + self.target_probability = target_probability_start + self.seq = [] + + def generate(self) -> list: + self.seq = [] + for t in range(self.trials): + self.seq += self.random_sample() + return self.seq + + def random_sample(self): + is_target = (random.random() > self.target_probability) + self.target_probability += self.target_probability_step + choices = [item for item in self.choices if len(self.seq) (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __test_generate_stat_csv('../stat/nb_gm_001_2back_24trials.csv') diff --git a/generators/nb_gm_002.py b/generators/nb_gm_002.py new file mode 100644 index 0000000..26fb115 --- /dev/null +++ b/generators/nb_gm_002.py @@ -0,0 +1,81 @@ +import logging +import random +import csv + +import heapq + +class SequenceGenerator: + """nb_gm_002 generator + Generates skewed random sequence of stimuli for the n-back task, based on Ralph (2014). + Each sequence contains specific fraction of matched trials (targets). + """ + + def __init__( + self, + choices: list, + n=2, + trials=24, # Number of total trials + targets=8, # Number of targets + lures1=2, # Number of lures (foil) similar to the (N+1)-back + lures2=2 # Number of lures (foil) similar to the (N-1)-back + ): + self.n, self.choices, self.trials, self.targets, self.lures1, self.lures2 = n, choices, trials, targets, lures1, lures2 + self.distractors = trials - targets - lures1 - lures2 + self.seq = [] + + def generate(self) -> list: + trial = 1 + self.seq = [] + while trial <= self.trials: + self.seq += self.random_stimulus(trial) + trial += 1 + return self.seq + + def random_stimulus(self, trial): + rnd = random.randint(1, self.trials - trial + 1) + targets, lures1, lures2 = self.targets, self.lures1, self.lures2 + if rnd <= targets and len(self.seq) >= self.n: + self.targets -= 1 + return self.seq[-self.n] + elif targets < rnd <= targets + lures1 and len(self.seq) >= self.n + 1: + self.lures1 -= 1 + return self.seq[-(self.n+1)] + elif targets + lures1 < rnd <= targets + lures1 + lures2 and len(self.seq) >= self.n - 1: + self.lures2 -= 1 + return self.seq[-(self.n-1)] + + # distract + self.distractors -= 1 + choices = [item for item in self.choices if item not in self.seq[-self.n - 1:-self.n + 1]] + return random.choice(choices) + + def count_targets_and_lures(self): + n = self.n + seq = self.seq + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + +def __test_generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(1000): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __test_generate_stat_csv('../stat/nb_gm_002_2back_24trials.csv') diff --git a/generators/nb_gm_003.py b/generators/nb_gm_003.py new file mode 100644 index 0000000..fce1793 --- /dev/null +++ b/generators/nb_gm_003.py @@ -0,0 +1,89 @@ +import random +import scipy.stats + + +class SequenceGenerator: + """nb_gm_003 + Generates a sequence of trials with even distribution of stimuli. + """ + + def __init__(self, choices, trials, n=3, targets_ratio=0.33): + self.trials, self.choices, self.n, self.targets_ratio = trials, choices, n, targets_ratio + self.seq = [] + self.norm_even_dist = scipy.stats.norm(0, trials/2) + self.norm_targets_ratio_dist = scipy.stats.norm(targets_ratio, 0.1) + + def generate(self): + print('next') + while not self.seq or len(self.seq) < self.trials: + self.seq = self.__find_best_next_sequence(self.seq, self.choices) + return self.seq + + def __find_best_next_sequence(self, seq: list, choices: list) -> list: + import sys + min_cost = sys.float_info.max + best_seq = seq + random.shuffle(choices) # to avoid ordering effect + for choice in choices: + tmp_seq = seq + list(choice) + cost = self.__cost(tmp_seq) + if cost < min_cost: + min_cost = cost + best_seq = tmp_seq + return best_seq + + def __cost(self, seq): + return self.__even_dist_cost(seq) + self.__target_ratio_cost(seq) + + + def __even_dist_cost(self, seq): + costs = {c: 0.0 for c in self.choices} + for c in list(seq): + costs[c] += (1.0 if costs.__contains__(c) else 0.0) + even_ratio = self.trials / len(self.choices) + costs = {k: abs(v - even_ratio)/self.trials for k, v in costs.items()} + max_cost = max(list(costs.values())) + return 1.0 - self.norm_even_dist.pdf(max_cost) + + def __target_ratio_cost(self, seq): + targets, _ = self.count_targets_and_lures(seq) + return 1.0 - self.norm_targets_ratio_dist.pdf(targets/self.trials) + + def count_targets_and_lures(self, seq): + n = self.n + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + def calc_tl_ratio(self, seq): + """Calculates the T/L ratio in a block of trials.""" + targets, lures = self.count_targets_and_lures(seq) + if lures < 0.01: # avoid division by zero + lures = 0.01 + return targets/lures + + +def __generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + import csv + import heapq + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(100): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __generate_stat_csv('../stat/nb_gm_003_2back_24trials.csv') diff --git a/generators/nb_gm_004.py b/generators/nb_gm_004.py new file mode 100644 index 0000000..bbac64f --- /dev/null +++ b/generators/nb_gm_004.py @@ -0,0 +1,96 @@ +import random +import scipy.stats + + +class SequenceGenerator: + """nb_gm_003 + Generates a sequence of trials with even distribution of stimuli. + """ + + def __init__(self, choices, trials, n=3, targets_ratio=0.33): + self.trials, self.choices, self.n, self.targets_ratio = trials, choices, n, targets_ratio + self.seq = [] + self.norm_even_dist = scipy.stats.norm(0, trials/2) + self.norm_targets_ratio_dist = scipy.stats.norm(targets_ratio, 0.5) + + def generate(self): + print('next') + while not self.seq or len(self.seq) < self.trials: + self.seq = self.__find_best_next_sequence(self.seq, self.choices) + return self.seq + + def __find_best_next_sequence(self, seq: list, choices: list) -> list: + import sys + min_cost = sys.float_info.max + best_seq = seq + random.shuffle(choices) # to avoid ordering effect + for choice in choices: + tmp_seq = seq + list(choice) + cost = self.__cost(tmp_seq) + if cost < min_cost: + min_cost = cost + best_seq = tmp_seq + return best_seq + + def __even_distribution_cost(self, seq): + costs = {c: 0.0 for c in self.choices} + for c in list(seq): + costs[c] += (1.0 if costs.__contains__(c) else 0.0) + even_ratio = self.trials / len(self.choices) + costs = {k: abs(v - even_ratio)/self.trials for k, v in costs.items()} + max_cost = max(list(costs.values())) + return 1.0 - self.norm_even_dist.pdf(max_cost) + + def cost(self, seq): + """ + Calculate overall fitness of a sequence (block of trials). + Right now it's a cost function, so we try to minimize this cost. + :param seq: + :return: + """ + + targets, lures = self.count_targets_and_lures(seq) + targets_ratio_cost = 1.0 - self.norm_targets_ratio_dist.pdf(targets/self.trials) + tl_ratio_cost = 1.0 - self.norm_tl_ratio_dist.pdf(self.calc_tl_ratio(seq)) + even_dist_cost = 1.0 - self.norm_even_dist.pdf(self.calc_even_distribution_distance(seq)) + # print(targets_ratio_cost, tl_ratio_cost, even_dist_cost) + return targets_ratio_cost + tl_ratio_cost + even_dist_cost + + def count_targets_and_lures(self, seq): + n = self.n + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + def calc_tl_ratio(self, seq): + """Calculates the T/L ratio in a block of trials.""" + targets, lures = self.count_targets_and_lures(seq) + if lures < 0.01: # avoid division by zero + lures = 0.01 + return targets/lures + + +def __generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + import csv + import heapq + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(100): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __generate_stat_csv('../stat/nb_gm_003_2back_24trials.csv') diff --git a/generators/nb_gm_005.py b/generators/nb_gm_005.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/generators/nb_gm_005.py diff --git a/generators/progressive_random.py b/generators/progressive_random.py index ff2f973..5c0d43f 100644 --- a/generators/progressive_random.py +++ b/generators/progressive_random.py @@ -89,16 +89,23 @@ return targets/lures -if __name__ == '__main__': - +def __generate_stat_csv(filename): alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] - generator = SequenceGenerator(alphabetic_choices, trials=128, n=3) - sq = generator.generate() - tl_ratio = generator.calc_tl_ratio(sq) - even_dist_distance = generator.calc_even_distribution_distance(sq) + trials = 64 + n = 2 + import csv + import heapq + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(10): + print(f'generating sequence {i}...') + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) - print( - 'Progressively-Optimized Sequence: targets=%d, lures=%d' % generator.count_targets_and_lures(sq), - 'with tl_ratio=%f' % tl_ratio, - 'and even_dist_cost=%f' % even_dist_distance - ) + +if __name__ == '__main__': + __generate_stat_csv('../stat/progressive_random_2back_24trials.csv') diff --git a/generators/skewed_random.py b/generators/skewed_random.py deleted file mode 100644 index f07243b..0000000 --- a/generators/skewed_random.py +++ /dev/null @@ -1,78 +0,0 @@ -import logging -import random -import csv - -import heapq - -class SequenceGenerator: - """Generates random sequence of stimuli for the n-back task. Implementation is based on Ralph (2014).""" - - def __init__( - self, - choices: list, - n=2, - trials=24, # Number of total trials - targets=8, # Number of targets - lures1=2, # Number of lures (foil) similar to the (N+1)-back - lures2=2 # Number of lures (foil) similar to the (N-1)-back - ): - self.n, self.choices, self.trials, self.targets, self.lures1, self.lures2 = n, choices, trials, targets, lures1, lures2 - self.distractors = trials - targets - lures1 - lures2 - self.seq = [] - - def generate(self) -> list: - trial = 1 - self.seq = [] - while trial <= self.trials: - self.seq += self.random_stimulus(trial) - trial += 1 - return self.seq - - def random_stimulus(self, trial): - rnd = random.randint(1, self.trials - trial + 1) - targets, lures1, lures2 = self.targets, self.lures1, self.lures2 - if rnd <= targets and len(self.seq) >= self.n: - self.targets -= 1 - return self.seq[-self.n] - elif targets < rnd <= targets + lures1 and len(self.seq) >= self.n + 1: - self.lures1 -= 1 - return self.seq[-(self.n+1)] - elif targets + lures1 < rnd <= targets + lures1 + lures2 and len(self.seq) >= self.n - 1: - self.lures2 -= 1 - return self.seq[-(self.n-1)] - - # distract - self.distractors -= 1 - choices = [item for item in self.choices if item not in self.seq[-self.n - 1:-self.n + 1]] - return random.choice(choices) - - def count_targets_and_lures(self): - n = self.n - seq = self.seq - targets = 0.0 - lures = 0.0 - for index in range(n, len(seq)): - if seq[index] == seq[index - n]: - targets += 1.0 - elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: - lures += 1.0 - return targets, lures - - -def __test_generate_stat_csv(filename): - alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] - trials = 240 - n = 2 - with open(filename, mode='w') as stat_dist_file: - writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) - writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) - for i in range(1000): - generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) - seq = generator.generate() - dist = [float(seq.count(c)) for c in alphabetic_choices] - ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) - writer.writerow([str(i)] + dist + [str(ralph_skewed)]) - - -if __name__ == '__main__': - __test_generate_stat_csv('../stat/skewed_random_statistical_distributions_240trials_1000runs.csv') diff --git a/demo/skewed_nback.py b/demo/skewed_nback.py index 61e32ba..44dcfa1 100644 --- a/demo/skewed_nback.py +++ b/demo/skewed_nback.py @@ -1,8 +1,8 @@ import expyriment from expyriment import design, control, stimuli, misc -from generators import skewed_random +from generators import nb_gm_002 -nback_sequence = skewed_random.SequenceGenerator().generate() +nback_sequence = nb_gm_002.SequenceGenerator().generate() exp = design.Experiment("Skewed N-Back Task") control.initialize(exp) diff --git a/generators/nb_gm_001.py b/generators/nb_gm_001.py new file mode 100644 index 0000000..3487fd0 --- /dev/null +++ b/generators/nb_gm_001.py @@ -0,0 +1,64 @@ +import random +import csv + +import heapq + + +class SequenceGenerator: + """Generate N-Back sequences with random sampling, but increase matching probability over time.""" + + def __init__( + self, + choices: list, + n=2, + trials=24, + target_probability_start=0.33, + target_probability_end=0.50 + ): + self.n, self.choices, self.trials = n, choices, trials + self.target_probability_step = target_probability_end - target_probability_start + self.target_probability = target_probability_start + self.seq = [] + + def generate(self) -> list: + self.seq = [] + for t in range(self.trials): + self.seq += self.random_sample() + return self.seq + + def random_sample(self): + is_target = (random.random() > self.target_probability) + self.target_probability += self.target_probability_step + choices = [item for item in self.choices if len(self.seq) (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __test_generate_stat_csv('../stat/nb_gm_001_2back_24trials.csv') diff --git a/generators/nb_gm_002.py b/generators/nb_gm_002.py new file mode 100644 index 0000000..26fb115 --- /dev/null +++ b/generators/nb_gm_002.py @@ -0,0 +1,81 @@ +import logging +import random +import csv + +import heapq + +class SequenceGenerator: + """nb_gm_002 generator + Generates skewed random sequence of stimuli for the n-back task, based on Ralph (2014). + Each sequence contains specific fraction of matched trials (targets). + """ + + def __init__( + self, + choices: list, + n=2, + trials=24, # Number of total trials + targets=8, # Number of targets + lures1=2, # Number of lures (foil) similar to the (N+1)-back + lures2=2 # Number of lures (foil) similar to the (N-1)-back + ): + self.n, self.choices, self.trials, self.targets, self.lures1, self.lures2 = n, choices, trials, targets, lures1, lures2 + self.distractors = trials - targets - lures1 - lures2 + self.seq = [] + + def generate(self) -> list: + trial = 1 + self.seq = [] + while trial <= self.trials: + self.seq += self.random_stimulus(trial) + trial += 1 + return self.seq + + def random_stimulus(self, trial): + rnd = random.randint(1, self.trials - trial + 1) + targets, lures1, lures2 = self.targets, self.lures1, self.lures2 + if rnd <= targets and len(self.seq) >= self.n: + self.targets -= 1 + return self.seq[-self.n] + elif targets < rnd <= targets + lures1 and len(self.seq) >= self.n + 1: + self.lures1 -= 1 + return self.seq[-(self.n+1)] + elif targets + lures1 < rnd <= targets + lures1 + lures2 and len(self.seq) >= self.n - 1: + self.lures2 -= 1 + return self.seq[-(self.n-1)] + + # distract + self.distractors -= 1 + choices = [item for item in self.choices if item not in self.seq[-self.n - 1:-self.n + 1]] + return random.choice(choices) + + def count_targets_and_lures(self): + n = self.n + seq = self.seq + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + +def __test_generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(1000): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __test_generate_stat_csv('../stat/nb_gm_002_2back_24trials.csv') diff --git a/generators/nb_gm_003.py b/generators/nb_gm_003.py new file mode 100644 index 0000000..fce1793 --- /dev/null +++ b/generators/nb_gm_003.py @@ -0,0 +1,89 @@ +import random +import scipy.stats + + +class SequenceGenerator: + """nb_gm_003 + Generates a sequence of trials with even distribution of stimuli. + """ + + def __init__(self, choices, trials, n=3, targets_ratio=0.33): + self.trials, self.choices, self.n, self.targets_ratio = trials, choices, n, targets_ratio + self.seq = [] + self.norm_even_dist = scipy.stats.norm(0, trials/2) + self.norm_targets_ratio_dist = scipy.stats.norm(targets_ratio, 0.1) + + def generate(self): + print('next') + while not self.seq or len(self.seq) < self.trials: + self.seq = self.__find_best_next_sequence(self.seq, self.choices) + return self.seq + + def __find_best_next_sequence(self, seq: list, choices: list) -> list: + import sys + min_cost = sys.float_info.max + best_seq = seq + random.shuffle(choices) # to avoid ordering effect + for choice in choices: + tmp_seq = seq + list(choice) + cost = self.__cost(tmp_seq) + if cost < min_cost: + min_cost = cost + best_seq = tmp_seq + return best_seq + + def __cost(self, seq): + return self.__even_dist_cost(seq) + self.__target_ratio_cost(seq) + + + def __even_dist_cost(self, seq): + costs = {c: 0.0 for c in self.choices} + for c in list(seq): + costs[c] += (1.0 if costs.__contains__(c) else 0.0) + even_ratio = self.trials / len(self.choices) + costs = {k: abs(v - even_ratio)/self.trials for k, v in costs.items()} + max_cost = max(list(costs.values())) + return 1.0 - self.norm_even_dist.pdf(max_cost) + + def __target_ratio_cost(self, seq): + targets, _ = self.count_targets_and_lures(seq) + return 1.0 - self.norm_targets_ratio_dist.pdf(targets/self.trials) + + def count_targets_and_lures(self, seq): + n = self.n + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + def calc_tl_ratio(self, seq): + """Calculates the T/L ratio in a block of trials.""" + targets, lures = self.count_targets_and_lures(seq) + if lures < 0.01: # avoid division by zero + lures = 0.01 + return targets/lures + + +def __generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + import csv + import heapq + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(100): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __generate_stat_csv('../stat/nb_gm_003_2back_24trials.csv') diff --git a/generators/nb_gm_004.py b/generators/nb_gm_004.py new file mode 100644 index 0000000..bbac64f --- /dev/null +++ b/generators/nb_gm_004.py @@ -0,0 +1,96 @@ +import random +import scipy.stats + + +class SequenceGenerator: + """nb_gm_003 + Generates a sequence of trials with even distribution of stimuli. + """ + + def __init__(self, choices, trials, n=3, targets_ratio=0.33): + self.trials, self.choices, self.n, self.targets_ratio = trials, choices, n, targets_ratio + self.seq = [] + self.norm_even_dist = scipy.stats.norm(0, trials/2) + self.norm_targets_ratio_dist = scipy.stats.norm(targets_ratio, 0.5) + + def generate(self): + print('next') + while not self.seq or len(self.seq) < self.trials: + self.seq = self.__find_best_next_sequence(self.seq, self.choices) + return self.seq + + def __find_best_next_sequence(self, seq: list, choices: list) -> list: + import sys + min_cost = sys.float_info.max + best_seq = seq + random.shuffle(choices) # to avoid ordering effect + for choice in choices: + tmp_seq = seq + list(choice) + cost = self.__cost(tmp_seq) + if cost < min_cost: + min_cost = cost + best_seq = tmp_seq + return best_seq + + def __even_distribution_cost(self, seq): + costs = {c: 0.0 for c in self.choices} + for c in list(seq): + costs[c] += (1.0 if costs.__contains__(c) else 0.0) + even_ratio = self.trials / len(self.choices) + costs = {k: abs(v - even_ratio)/self.trials for k, v in costs.items()} + max_cost = max(list(costs.values())) + return 1.0 - self.norm_even_dist.pdf(max_cost) + + def cost(self, seq): + """ + Calculate overall fitness of a sequence (block of trials). + Right now it's a cost function, so we try to minimize this cost. + :param seq: + :return: + """ + + targets, lures = self.count_targets_and_lures(seq) + targets_ratio_cost = 1.0 - self.norm_targets_ratio_dist.pdf(targets/self.trials) + tl_ratio_cost = 1.0 - self.norm_tl_ratio_dist.pdf(self.calc_tl_ratio(seq)) + even_dist_cost = 1.0 - self.norm_even_dist.pdf(self.calc_even_distribution_distance(seq)) + # print(targets_ratio_cost, tl_ratio_cost, even_dist_cost) + return targets_ratio_cost + tl_ratio_cost + even_dist_cost + + def count_targets_and_lures(self, seq): + n = self.n + targets = 0.0 + lures = 0.0 + for index in range(n, len(seq)): + if seq[index] == seq[index - n]: + targets += 1.0 + elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: + lures += 1.0 + return targets, lures + + def calc_tl_ratio(self, seq): + """Calculates the T/L ratio in a block of trials.""" + targets, lures = self.count_targets_and_lures(seq) + if lures < 0.01: # avoid division by zero + lures = 0.01 + return targets/lures + + +def __generate_stat_csv(filename): + alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] + trials = 24 + n = 2 + import csv + import heapq + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(100): + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) + + +if __name__ == '__main__': + __generate_stat_csv('../stat/nb_gm_003_2back_24trials.csv') diff --git a/generators/nb_gm_005.py b/generators/nb_gm_005.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/generators/nb_gm_005.py diff --git a/generators/progressive_random.py b/generators/progressive_random.py index ff2f973..5c0d43f 100644 --- a/generators/progressive_random.py +++ b/generators/progressive_random.py @@ -89,16 +89,23 @@ return targets/lures -if __name__ == '__main__': - +def __generate_stat_csv(filename): alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] - generator = SequenceGenerator(alphabetic_choices, trials=128, n=3) - sq = generator.generate() - tl_ratio = generator.calc_tl_ratio(sq) - even_dist_distance = generator.calc_even_distribution_distance(sq) + trials = 64 + n = 2 + import csv + import heapq + with open(filename, mode='w') as stat_dist_file: + writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) + writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) + for i in range(10): + print(f'generating sequence {i}...') + generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) + seq = generator.generate() + dist = [float(seq.count(c)) for c in alphabetic_choices] + ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) + writer.writerow([str(i)] + dist + [str(ralph_skewed)]) - print( - 'Progressively-Optimized Sequence: targets=%d, lures=%d' % generator.count_targets_and_lures(sq), - 'with tl_ratio=%f' % tl_ratio, - 'and even_dist_cost=%f' % even_dist_distance - ) + +if __name__ == '__main__': + __generate_stat_csv('../stat/progressive_random_2back_24trials.csv') diff --git a/generators/skewed_random.py b/generators/skewed_random.py deleted file mode 100644 index f07243b..0000000 --- a/generators/skewed_random.py +++ /dev/null @@ -1,78 +0,0 @@ -import logging -import random -import csv - -import heapq - -class SequenceGenerator: - """Generates random sequence of stimuli for the n-back task. Implementation is based on Ralph (2014).""" - - def __init__( - self, - choices: list, - n=2, - trials=24, # Number of total trials - targets=8, # Number of targets - lures1=2, # Number of lures (foil) similar to the (N+1)-back - lures2=2 # Number of lures (foil) similar to the (N-1)-back - ): - self.n, self.choices, self.trials, self.targets, self.lures1, self.lures2 = n, choices, trials, targets, lures1, lures2 - self.distractors = trials - targets - lures1 - lures2 - self.seq = [] - - def generate(self) -> list: - trial = 1 - self.seq = [] - while trial <= self.trials: - self.seq += self.random_stimulus(trial) - trial += 1 - return self.seq - - def random_stimulus(self, trial): - rnd = random.randint(1, self.trials - trial + 1) - targets, lures1, lures2 = self.targets, self.lures1, self.lures2 - if rnd <= targets and len(self.seq) >= self.n: - self.targets -= 1 - return self.seq[-self.n] - elif targets < rnd <= targets + lures1 and len(self.seq) >= self.n + 1: - self.lures1 -= 1 - return self.seq[-(self.n+1)] - elif targets + lures1 < rnd <= targets + lures1 + lures2 and len(self.seq) >= self.n - 1: - self.lures2 -= 1 - return self.seq[-(self.n-1)] - - # distract - self.distractors -= 1 - choices = [item for item in self.choices if item not in self.seq[-self.n - 1:-self.n + 1]] - return random.choice(choices) - - def count_targets_and_lures(self): - n = self.n - seq = self.seq - targets = 0.0 - lures = 0.0 - for index in range(n, len(seq)): - if seq[index] == seq[index - n]: - targets += 1.0 - elif seq[index] == seq[index - (n-1)] or seq[index] == seq[index - (n+1)]: - lures += 1.0 - return targets, lures - - -def __test_generate_stat_csv(filename): - alphabetic_choices = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] - trials = 240 - n = 2 - with open(filename, mode='w') as stat_dist_file: - writer = csv.writer(stat_dist_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) - writer.writerow(['index'] + alphabetic_choices + ['ralph_skewed']) - for i in range(1000): - generator = SequenceGenerator(alphabetic_choices, n=n, trials=trials) - seq = generator.generate() - dist = [float(seq.count(c)) for c in alphabetic_choices] - ralph_skewed = sum(heapq.nlargest(int(len(alphabetic_choices)/2), dist)) > (trials*2/3) - writer.writerow([str(i)] + dist + [str(ralph_skewed)]) - - -if __name__ == '__main__': - __test_generate_stat_csv('../stat/skewed_random_statistical_distributions_240trials_1000runs.csv') diff --git a/stat/skewed_random_statistical_distributions_240trials_1000runs.csv b/stat/skewed_random_statistical_distributions_240trials_1000runs.csv index 430afac..df077c1 100644 --- a/stat/skewed_random_statistical_distributions_240trials_1000runs.csv +++ b/stat/skewed_random_statistical_distributions_240trials_1000runs.csv @@ -1,1001 +1,1001 @@ index,A,B,C,D,E,F,G,H,ralph_skewed -0,37.0,30.0,33.0,32.0,27.0,29.0,26.0,26.0,False -1,29.0,34.0,26.0,35.0,22.0,28.0,35.0,31.0,False -2,31.0,34.0,29.0,25.0,38.0,30.0,25.0,28.0,False -3,25.0,24.0,28.0,30.0,40.0,32.0,31.0,30.0,False -4,36.0,29.0,31.0,21.0,24.0,33.0,39.0,27.0,False -5,37.0,24.0,33.0,25.0,34.0,36.0,19.0,32.0,False -6,35.0,20.0,25.0,29.0,27.0,23.0,42.0,39.0,False -7,31.0,32.0,39.0,31.0,24.0,26.0,25.0,32.0,False -8,28.0,31.0,34.0,31.0,25.0,35.0,27.0,29.0,False -9,27.0,40.0,27.0,27.0,24.0,27.0,34.0,34.0,False -10,27.0,32.0,34.0,24.0,30.0,25.0,33.0,35.0,False -11,31.0,26.0,37.0,27.0,30.0,34.0,27.0,28.0,False -12,22.0,34.0,26.0,27.0,37.0,25.0,39.0,30.0,False -13,29.0,34.0,29.0,31.0,28.0,28.0,31.0,30.0,False -14,32.0,32.0,33.0,30.0,25.0,31.0,29.0,28.0,False -15,35.0,27.0,26.0,31.0,36.0,19.0,33.0,33.0,False -16,31.0,30.0,31.0,33.0,24.0,35.0,24.0,32.0,False -17,32.0,30.0,28.0,26.0,33.0,30.0,32.0,29.0,False -18,22.0,29.0,35.0,30.0,31.0,36.0,24.0,33.0,False -19,25.0,27.0,27.0,39.0,32.0,31.0,29.0,30.0,False -20,24.0,35.0,22.0,31.0,32.0,29.0,35.0,32.0,False -21,31.0,42.0,25.0,29.0,31.0,22.0,35.0,25.0,False -22,33.0,30.0,34.0,32.0,35.0,24.0,28.0,24.0,False -23,35.0,29.0,28.0,30.0,32.0,27.0,31.0,28.0,False -24,30.0,27.0,30.0,31.0,32.0,30.0,28.0,32.0,False -25,35.0,31.0,30.0,30.0,29.0,28.0,28.0,29.0,False -26,35.0,29.0,37.0,31.0,27.0,23.0,27.0,31.0,False -27,19.0,34.0,33.0,33.0,35.0,32.0,32.0,22.0,False -28,32.0,35.0,34.0,30.0,21.0,23.0,31.0,34.0,False -29,32.0,32.0,30.0,26.0,27.0,25.0,28.0,40.0,False -30,36.0,28.0,36.0,29.0,29.0,18.0,36.0,28.0,False -31,31.0,33.0,28.0,25.0,31.0,28.0,35.0,29.0,False -32,28.0,31.0,36.0,31.0,29.0,33.0,22.0,30.0,False -33,28.0,30.0,32.0,25.0,27.0,35.0,36.0,27.0,False -34,26.0,24.0,31.0,30.0,28.0,37.0,28.0,36.0,False -35,30.0,29.0,29.0,35.0,29.0,24.0,32.0,32.0,False -36,29.0,25.0,35.0,32.0,28.0,28.0,34.0,29.0,False -37,25.0,29.0,25.0,26.0,35.0,37.0,26.0,37.0,False -38,33.0,26.0,24.0,28.0,33.0,37.0,35.0,24.0,False -39,32.0,30.0,35.0,27.0,27.0,26.0,37.0,26.0,False -40,38.0,32.0,24.0,23.0,31.0,32.0,33.0,27.0,False -41,29.0,31.0,27.0,31.0,32.0,35.0,29.0,26.0,False -42,29.0,29.0,33.0,35.0,26.0,37.0,25.0,26.0,False -43,37.0,25.0,23.0,35.0,27.0,35.0,31.0,27.0,False -44,21.0,40.0,30.0,28.0,34.0,25.0,28.0,34.0,False -45,32.0,23.0,40.0,32.0,26.0,27.0,29.0,31.0,False -46,27.0,33.0,31.0,37.0,27.0,21.0,38.0,26.0,False -47,28.0,30.0,31.0,25.0,27.0,33.0,30.0,36.0,False -48,29.0,28.0,27.0,35.0,29.0,29.0,35.0,28.0,False -49,24.0,33.0,24.0,31.0,39.0,31.0,29.0,29.0,False -50,36.0,31.0,23.0,36.0,29.0,24.0,30.0,31.0,False -51,31.0,33.0,34.0,25.0,33.0,30.0,28.0,26.0,False -52,30.0,25.0,28.0,25.0,30.0,37.0,36.0,29.0,False -53,33.0,23.0,34.0,36.0,30.0,24.0,30.0,30.0,False -54,27.0,38.0,21.0,34.0,28.0,35.0,24.0,33.0,False -55,24.0,30.0,24.0,32.0,31.0,33.0,35.0,31.0,False -56,30.0,33.0,21.0,38.0,28.0,32.0,30.0,28.0,False -57,30.0,25.0,29.0,28.0,31.0,36.0,35.0,26.0,False -58,26.0,33.0,38.0,24.0,30.0,31.0,30.0,28.0,False -59,23.0,40.0,28.0,23.0,34.0,32.0,30.0,30.0,False -60,31.0,37.0,30.0,34.0,29.0,27.0,31.0,21.0,False -61,30.0,33.0,32.0,31.0,34.0,26.0,20.0,34.0,False -62,33.0,37.0,27.0,26.0,28.0,23.0,32.0,34.0,False -63,28.0,30.0,35.0,30.0,22.0,31.0,29.0,35.0,False -64,28.0,35.0,27.0,34.0,30.0,33.0,26.0,27.0,False -65,26.0,25.0,28.0,42.0,33.0,20.0,36.0,30.0,False -66,43.0,30.0,24.0,23.0,34.0,23.0,29.0,34.0,False -67,33.0,32.0,33.0,30.0,28.0,28.0,21.0,35.0,False -68,30.0,34.0,31.0,30.0,33.0,29.0,23.0,30.0,False -69,31.0,30.0,29.0,25.0,31.0,36.0,34.0,24.0,False -70,33.0,27.0,28.0,28.0,36.0,32.0,29.0,27.0,False -71,32.0,32.0,25.0,31.0,25.0,29.0,31.0,35.0,False -72,31.0,30.0,29.0,39.0,30.0,26.0,25.0,30.0,False -73,33.0,25.0,34.0,29.0,31.0,31.0,36.0,21.0,False -74,32.0,27.0,29.0,32.0,28.0,31.0,29.0,32.0,False -75,31.0,33.0,32.0,24.0,29.0,30.0,33.0,28.0,False -76,20.0,28.0,33.0,30.0,34.0,32.0,34.0,29.0,False -77,28.0,23.0,28.0,32.0,36.0,33.0,31.0,29.0,False -78,31.0,29.0,34.0,27.0,26.0,41.0,23.0,29.0,False -79,33.0,30.0,36.0,31.0,25.0,27.0,33.0,25.0,False -80,28.0,36.0,29.0,22.0,34.0,22.0,37.0,32.0,False -81,28.0,30.0,30.0,31.0,28.0,32.0,31.0,30.0,False -82,26.0,30.0,41.0,29.0,22.0,35.0,24.0,33.0,False -83,33.0,23.0,35.0,27.0,25.0,28.0,37.0,32.0,False -84,27.0,29.0,30.0,25.0,30.0,32.0,40.0,27.0,False -85,27.0,32.0,31.0,35.0,27.0,32.0,27.0,29.0,False -86,36.0,26.0,31.0,31.0,35.0,26.0,29.0,26.0,False -87,27.0,23.0,31.0,34.0,29.0,29.0,34.0,33.0,False -88,29.0,30.0,24.0,34.0,31.0,31.0,32.0,29.0,False -89,31.0,29.0,36.0,23.0,32.0,29.0,31.0,29.0,False -90,32.0,35.0,30.0,33.0,31.0,21.0,28.0,30.0,False -91,24.0,34.0,25.0,29.0,33.0,38.0,32.0,25.0,False -92,35.0,17.0,33.0,30.0,38.0,23.0,30.0,34.0,False -93,29.0,32.0,34.0,29.0,31.0,30.0,30.0,25.0,False -94,23.0,29.0,31.0,26.0,35.0,38.0,28.0,30.0,False -95,20.0,23.0,32.0,31.0,26.0,34.0,37.0,37.0,False -96,25.0,34.0,30.0,35.0,26.0,30.0,34.0,26.0,False -97,31.0,26.0,30.0,30.0,31.0,30.0,30.0,32.0,False -98,28.0,26.0,29.0,32.0,28.0,35.0,30.0,32.0,False -99,27.0,37.0,37.0,29.0,27.0,33.0,23.0,27.0,False -100,30.0,29.0,27.0,24.0,33.0,32.0,33.0,32.0,False -101,26.0,35.0,28.0,24.0,45.0,31.0,29.0,22.0,False -102,25.0,32.0,26.0,34.0,38.0,23.0,31.0,31.0,False -103,36.0,28.0,24.0,37.0,23.0,34.0,27.0,31.0,False -104,30.0,30.0,30.0,28.0,21.0,37.0,33.0,31.0,False -105,28.0,30.0,20.0,41.0,32.0,29.0,30.0,30.0,False -106,27.0,23.0,33.0,36.0,24.0,30.0,35.0,32.0,False -107,27.0,36.0,26.0,30.0,33.0,34.0,29.0,25.0,False -108,35.0,24.0,32.0,27.0,34.0,30.0,23.0,35.0,False -109,33.0,23.0,32.0,34.0,21.0,33.0,32.0,32.0,False -110,29.0,26.0,29.0,30.0,31.0,29.0,40.0,26.0,False -111,25.0,39.0,28.0,31.0,26.0,30.0,31.0,30.0,False -112,37.0,28.0,35.0,24.0,32.0,26.0,31.0,27.0,False -113,36.0,22.0,30.0,24.0,26.0,36.0,31.0,35.0,False -114,36.0,32.0,33.0,26.0,29.0,31.0,27.0,26.0,False -115,32.0,33.0,33.0,26.0,24.0,28.0,35.0,29.0,False -116,31.0,28.0,30.0,29.0,31.0,29.0,34.0,28.0,False -117,27.0,33.0,35.0,21.0,37.0,27.0,30.0,30.0,False -118,22.0,32.0,35.0,29.0,33.0,27.0,30.0,32.0,False -119,28.0,31.0,27.0,29.0,37.0,38.0,24.0,26.0,False -120,29.0,33.0,28.0,33.0,28.0,36.0,25.0,28.0,False -121,37.0,27.0,27.0,27.0,24.0,34.0,33.0,31.0,False -122,31.0,38.0,29.0,30.0,25.0,28.0,30.0,29.0,False -123,32.0,34.0,37.0,24.0,31.0,30.0,29.0,23.0,False -124,33.0,25.0,29.0,27.0,32.0,31.0,21.0,42.0,False -125,32.0,29.0,35.0,32.0,22.0,22.0,29.0,39.0,False -126,31.0,31.0,23.0,35.0,20.0,34.0,34.0,32.0,False -127,27.0,33.0,32.0,42.0,23.0,21.0,26.0,36.0,False -128,31.0,25.0,37.0,28.0,27.0,36.0,28.0,28.0,False -129,23.0,38.0,34.0,28.0,27.0,28.0,32.0,30.0,False -130,29.0,29.0,33.0,32.0,22.0,30.0,32.0,33.0,False -131,35.0,28.0,27.0,31.0,28.0,24.0,38.0,29.0,False -132,19.0,34.0,32.0,21.0,34.0,33.0,33.0,34.0,False -133,30.0,32.0,27.0,29.0,23.0,33.0,33.0,33.0,False -134,35.0,34.0,36.0,23.0,31.0,25.0,26.0,30.0,False -135,27.0,27.0,33.0,29.0,26.0,28.0,32.0,38.0,False -136,39.0,28.0,28.0,22.0,30.0,36.0,25.0,32.0,False -137,29.0,34.0,35.0,28.0,33.0,30.0,24.0,27.0,False -138,31.0,23.0,34.0,31.0,31.0,31.0,28.0,31.0,False -139,33.0,32.0,27.0,34.0,24.0,23.0,34.0,33.0,False -140,35.0,26.0,30.0,34.0,23.0,27.0,33.0,32.0,False -141,27.0,38.0,26.0,25.0,36.0,29.0,32.0,27.0,False -142,27.0,33.0,31.0,28.0,37.0,24.0,29.0,31.0,False -143,32.0,25.0,26.0,30.0,34.0,31.0,28.0,34.0,False -144,30.0,31.0,38.0,27.0,32.0,32.0,23.0,27.0,False -145,27.0,34.0,28.0,25.0,25.0,32.0,38.0,31.0,False -146,20.0,39.0,32.0,28.0,41.0,25.0,24.0,31.0,False -147,28.0,32.0,31.0,39.0,29.0,23.0,32.0,26.0,False -148,23.0,30.0,33.0,31.0,36.0,29.0,24.0,34.0,False -149,27.0,29.0,35.0,33.0,33.0,25.0,28.0,30.0,False -150,35.0,25.0,35.0,32.0,28.0,27.0,27.0,31.0,False -151,33.0,33.0,27.0,25.0,32.0,33.0,23.0,34.0,False -152,30.0,24.0,34.0,31.0,28.0,29.0,31.0,33.0,False -153,37.0,39.0,30.0,28.0,27.0,20.0,26.0,33.0,False -154,38.0,21.0,28.0,30.0,29.0,37.0,31.0,26.0,False -155,39.0,26.0,29.0,28.0,25.0,29.0,31.0,33.0,False -156,35.0,26.0,24.0,37.0,26.0,21.0,28.0,43.0,False -157,30.0,29.0,32.0,29.0,28.0,28.0,35.0,29.0,False -158,32.0,27.0,27.0,35.0,27.0,27.0,29.0,36.0,False -159,28.0,37.0,31.0,27.0,28.0,29.0,37.0,23.0,False -160,28.0,26.0,35.0,29.0,30.0,25.0,41.0,26.0,False -161,28.0,35.0,31.0,29.0,35.0,27.0,25.0,30.0,False -162,27.0,34.0,33.0,19.0,35.0,38.0,28.0,26.0,False -163,28.0,23.0,28.0,28.0,37.0,30.0,34.0,32.0,False -164,30.0,30.0,27.0,26.0,33.0,30.0,33.0,31.0,False -165,30.0,28.0,28.0,35.0,25.0,25.0,35.0,34.0,False -166,25.0,34.0,27.0,36.0,33.0,31.0,33.0,21.0,False -167,34.0,27.0,25.0,29.0,35.0,31.0,28.0,31.0,False -168,27.0,34.0,38.0,31.0,21.0,35.0,30.0,24.0,False -169,26.0,25.0,30.0,38.0,26.0,32.0,33.0,30.0,False -170,33.0,32.0,28.0,26.0,24.0,28.0,38.0,31.0,False -171,33.0,23.0,29.0,36.0,25.0,27.0,34.0,33.0,False -172,31.0,27.0,26.0,28.0,32.0,35.0,29.0,32.0,False -173,26.0,27.0,36.0,32.0,22.0,34.0,34.0,29.0,False -174,25.0,27.0,30.0,25.0,29.0,31.0,40.0,33.0,False -175,28.0,32.0,33.0,27.0,31.0,34.0,27.0,28.0,False -176,25.0,30.0,37.0,25.0,32.0,28.0,35.0,28.0,False -177,30.0,33.0,35.0,23.0,35.0,26.0,33.0,25.0,False -178,39.0,23.0,29.0,28.0,28.0,34.0,42.0,17.0,False -179,32.0,28.0,32.0,30.0,31.0,38.0,24.0,25.0,False -180,28.0,30.0,28.0,32.0,24.0,36.0,32.0,30.0,False -181,34.0,22.0,27.0,31.0,33.0,35.0,31.0,27.0,False -182,33.0,20.0,31.0,32.0,36.0,28.0,26.0,34.0,False -183,31.0,34.0,24.0,33.0,29.0,35.0,24.0,30.0,False -184,33.0,20.0,35.0,26.0,28.0,34.0,36.0,28.0,False -185,31.0,27.0,29.0,35.0,30.0,31.0,28.0,29.0,False -186,32.0,35.0,27.0,27.0,28.0,39.0,26.0,26.0,False -187,35.0,26.0,37.0,28.0,34.0,23.0,31.0,26.0,False -188,28.0,32.0,35.0,26.0,26.0,28.0,26.0,39.0,False -189,32.0,28.0,34.0,30.0,31.0,23.0,30.0,32.0,False -190,31.0,31.0,23.0,34.0,44.0,23.0,21.0,33.0,False -191,29.0,34.0,33.0,27.0,38.0,23.0,28.0,28.0,False -192,36.0,37.0,28.0,24.0,31.0,32.0,22.0,30.0,False -193,35.0,25.0,30.0,22.0,36.0,29.0,26.0,37.0,False -194,31.0,22.0,29.0,25.0,32.0,30.0,39.0,32.0,False -195,33.0,23.0,34.0,31.0,32.0,29.0,34.0,24.0,False -196,32.0,24.0,27.0,36.0,33.0,33.0,25.0,30.0,False -197,36.0,34.0,31.0,21.0,33.0,22.0,34.0,29.0,False -198,31.0,31.0,30.0,27.0,30.0,28.0,29.0,34.0,False -199,29.0,21.0,34.0,22.0,35.0,36.0,33.0,30.0,False -200,28.0,28.0,35.0,37.0,29.0,30.0,24.0,29.0,False -201,36.0,30.0,35.0,24.0,33.0,24.0,29.0,29.0,False -202,38.0,27.0,29.0,30.0,26.0,29.0,31.0,30.0,False -203,26.0,30.0,33.0,31.0,28.0,33.0,27.0,32.0,False -204,29.0,28.0,31.0,34.0,29.0,27.0,34.0,28.0,False -205,26.0,35.0,32.0,31.0,31.0,34.0,27.0,24.0,False -206,24.0,30.0,34.0,25.0,38.0,28.0,31.0,30.0,False -207,34.0,30.0,26.0,27.0,33.0,40.0,21.0,29.0,False -208,29.0,29.0,29.0,28.0,28.0,39.0,33.0,25.0,False -209,26.0,26.0,33.0,32.0,29.0,30.0,32.0,32.0,False -210,29.0,33.0,30.0,27.0,27.0,30.0,34.0,30.0,False -211,38.0,21.0,29.0,29.0,29.0,34.0,26.0,34.0,False -212,31.0,27.0,31.0,33.0,28.0,32.0,34.0,24.0,False -213,30.0,31.0,25.0,31.0,30.0,31.0,32.0,30.0,False -214,31.0,32.0,26.0,37.0,29.0,25.0,32.0,28.0,False -215,28.0,28.0,29.0,28.0,39.0,33.0,27.0,28.0,False -216,29.0,35.0,33.0,22.0,25.0,32.0,31.0,33.0,False -217,34.0,34.0,29.0,30.0,35.0,34.0,21.0,23.0,False -218,34.0,33.0,30.0,26.0,29.0,26.0,29.0,33.0,False -219,32.0,32.0,30.0,33.0,34.0,24.0,26.0,29.0,False -220,36.0,32.0,31.0,32.0,27.0,25.0,28.0,29.0,False -221,36.0,29.0,31.0,28.0,33.0,34.0,24.0,25.0,False -222,32.0,23.0,28.0,39.0,20.0,33.0,33.0,32.0,False -223,35.0,28.0,32.0,26.0,22.0,36.0,35.0,26.0,False -224,31.0,30.0,25.0,32.0,28.0,37.0,23.0,34.0,False -225,32.0,35.0,29.0,20.0,28.0,28.0,40.0,28.0,False -226,26.0,29.0,39.0,34.0,30.0,25.0,32.0,25.0,False -227,35.0,42.0,29.0,28.0,27.0,32.0,27.0,20.0,False -228,23.0,28.0,33.0,25.0,34.0,39.0,30.0,28.0,False -229,27.0,33.0,28.0,33.0,33.0,26.0,33.0,27.0,False -230,29.0,27.0,29.0,32.0,30.0,33.0,29.0,31.0,False -231,20.0,31.0,27.0,29.0,36.0,32.0,29.0,36.0,False -232,33.0,26.0,31.0,32.0,27.0,32.0,38.0,21.0,False -233,34.0,33.0,33.0,34.0,26.0,32.0,25.0,23.0,False -234,31.0,28.0,29.0,30.0,31.0,29.0,35.0,27.0,False -235,26.0,27.0,30.0,35.0,32.0,30.0,27.0,33.0,False -236,30.0,31.0,36.0,31.0,24.0,31.0,26.0,31.0,False -237,32.0,21.0,36.0,32.0,35.0,23.0,31.0,30.0,False -238,32.0,28.0,31.0,32.0,23.0,32.0,28.0,34.0,False -239,35.0,31.0,22.0,32.0,30.0,28.0,32.0,30.0,False -240,25.0,39.0,27.0,31.0,31.0,31.0,26.0,30.0,False -241,30.0,24.0,28.0,33.0,32.0,30.0,34.0,29.0,False -242,28.0,33.0,31.0,31.0,25.0,31.0,36.0,25.0,False -243,32.0,30.0,30.0,29.0,20.0,30.0,30.0,39.0,False -244,31.0,35.0,22.0,32.0,27.0,32.0,31.0,30.0,False -245,31.0,39.0,37.0,30.0,28.0,27.0,20.0,28.0,False -246,31.0,33.0,31.0,24.0,25.0,36.0,32.0,28.0,False -247,17.0,36.0,27.0,38.0,31.0,33.0,28.0,30.0,False -248,39.0,21.0,35.0,30.0,29.0,25.0,34.0,27.0,False -249,29.0,33.0,24.0,38.0,32.0,21.0,28.0,35.0,False -250,23.0,33.0,27.0,38.0,30.0,29.0,28.0,32.0,False -251,33.0,27.0,34.0,31.0,30.0,30.0,31.0,24.0,False -252,29.0,30.0,29.0,31.0,30.0,34.0,32.0,25.0,False -253,23.0,33.0,27.0,39.0,31.0,29.0,31.0,27.0,False -254,28.0,32.0,33.0,32.0,24.0,26.0,32.0,33.0,False -255,23.0,36.0,32.0,34.0,31.0,26.0,30.0,28.0,False -256,27.0,34.0,29.0,31.0,33.0,28.0,33.0,25.0,False -257,23.0,33.0,24.0,32.0,39.0,29.0,36.0,24.0,False -258,28.0,37.0,23.0,29.0,35.0,30.0,27.0,31.0,False -259,36.0,32.0,26.0,27.0,30.0,28.0,26.0,35.0,False -260,17.0,27.0,25.0,37.0,27.0,40.0,29.0,38.0,False -261,33.0,33.0,32.0,35.0,24.0,26.0,25.0,32.0,False -262,25.0,32.0,35.0,27.0,32.0,30.0,26.0,33.0,False -263,30.0,35.0,30.0,31.0,24.0,27.0,32.0,31.0,False -264,34.0,23.0,27.0,30.0,30.0,29.0,33.0,34.0,False -265,27.0,35.0,28.0,25.0,30.0,29.0,31.0,35.0,False -266,31.0,32.0,27.0,35.0,29.0,24.0,34.0,28.0,False -267,19.0,32.0,29.0,29.0,28.0,34.0,37.0,32.0,False -268,30.0,30.0,29.0,35.0,27.0,31.0,31.0,27.0,False -269,28.0,24.0,30.0,36.0,28.0,33.0,31.0,30.0,False -270,27.0,33.0,29.0,31.0,29.0,29.0,34.0,28.0,False -271,26.0,23.0,30.0,28.0,37.0,33.0,27.0,36.0,False -272,33.0,32.0,25.0,36.0,28.0,24.0,28.0,34.0,False -273,35.0,31.0,32.0,38.0,26.0,29.0,26.0,23.0,False -274,27.0,28.0,28.0,39.0,25.0,26.0,32.0,35.0,False -275,31.0,22.0,31.0,27.0,30.0,34.0,27.0,38.0,False -276,34.0,31.0,35.0,27.0,27.0,24.0,31.0,31.0,False -277,30.0,25.0,33.0,32.0,27.0,36.0,27.0,30.0,False -278,33.0,21.0,36.0,31.0,25.0,36.0,31.0,27.0,False -279,33.0,33.0,35.0,29.0,30.0,28.0,26.0,26.0,False -280,26.0,23.0,35.0,32.0,30.0,31.0,32.0,31.0,False -281,31.0,28.0,36.0,27.0,27.0,40.0,28.0,23.0,False -282,30.0,28.0,33.0,30.0,27.0,29.0,35.0,28.0,False -283,27.0,37.0,34.0,29.0,28.0,24.0,31.0,30.0,False -284,23.0,38.0,35.0,36.0,27.0,26.0,28.0,27.0,False -285,28.0,31.0,22.0,27.0,40.0,33.0,32.0,27.0,False -286,31.0,36.0,28.0,31.0,27.0,30.0,31.0,26.0,False -287,30.0,22.0,34.0,24.0,32.0,30.0,33.0,35.0,False -288,27.0,25.0,34.0,37.0,28.0,30.0,27.0,32.0,False -289,27.0,34.0,31.0,34.0,37.0,31.0,20.0,26.0,False -290,28.0,28.0,30.0,31.0,31.0,29.0,28.0,35.0,False -291,29.0,28.0,25.0,35.0,29.0,30.0,32.0,32.0,False -292,32.0,30.0,32.0,33.0,20.0,30.0,30.0,33.0,False -293,28.0,34.0,28.0,31.0,26.0,27.0,31.0,35.0,False -294,30.0,35.0,25.0,26.0,29.0,30.0,32.0,33.0,False -295,31.0,30.0,29.0,30.0,28.0,26.0,32.0,34.0,False -296,28.0,33.0,34.0,29.0,33.0,30.0,25.0,28.0,False -297,30.0,36.0,26.0,25.0,25.0,31.0,33.0,34.0,False -298,23.0,24.0,34.0,30.0,31.0,30.0,33.0,35.0,False -299,27.0,32.0,30.0,31.0,28.0,27.0,35.0,30.0,False -300,26.0,36.0,33.0,24.0,31.0,31.0,27.0,32.0,False -301,20.0,34.0,29.0,38.0,30.0,33.0,27.0,29.0,False -302,30.0,32.0,34.0,32.0,29.0,28.0,28.0,27.0,False -303,30.0,25.0,31.0,28.0,31.0,32.0,32.0,31.0,False -304,34.0,32.0,30.0,36.0,23.0,29.0,27.0,29.0,False -305,33.0,25.0,39.0,30.0,29.0,30.0,29.0,25.0,False -306,30.0,39.0,30.0,31.0,24.0,29.0,27.0,30.0,False -307,25.0,39.0,33.0,36.0,20.0,30.0,34.0,23.0,False -308,32.0,25.0,26.0,39.0,23.0,29.0,34.0,32.0,False -309,28.0,33.0,32.0,26.0,34.0,26.0,30.0,31.0,False -310,27.0,30.0,30.0,30.0,33.0,25.0,33.0,32.0,False -311,32.0,42.0,16.0,28.0,27.0,31.0,24.0,40.0,False -312,32.0,29.0,38.0,31.0,27.0,21.0,28.0,34.0,False -313,30.0,20.0,33.0,31.0,35.0,28.0,27.0,36.0,False -314,30.0,37.0,33.0,32.0,22.0,22.0,37.0,27.0,False -315,31.0,26.0,29.0,33.0,35.0,27.0,31.0,28.0,False -316,22.0,38.0,33.0,34.0,29.0,29.0,33.0,22.0,False -317,34.0,27.0,29.0,19.0,33.0,39.0,33.0,26.0,False -318,32.0,30.0,33.0,25.0,27.0,30.0,29.0,34.0,False -319,24.0,32.0,28.0,30.0,27.0,31.0,35.0,33.0,False -320,35.0,27.0,31.0,26.0,28.0,32.0,33.0,28.0,False -321,32.0,27.0,26.0,35.0,33.0,26.0,32.0,29.0,False -322,29.0,30.0,32.0,25.0,30.0,35.0,29.0,30.0,False -323,29.0,27.0,22.0,32.0,34.0,34.0,31.0,31.0,False -324,36.0,34.0,34.0,27.0,25.0,31.0,28.0,25.0,False -325,31.0,27.0,30.0,21.0,38.0,31.0,35.0,27.0,False -326,28.0,32.0,24.0,33.0,30.0,37.0,21.0,35.0,False -327,30.0,34.0,30.0,24.0,21.0,31.0,33.0,37.0,False -328,31.0,32.0,24.0,29.0,26.0,31.0,34.0,33.0,False -329,28.0,29.0,33.0,29.0,21.0,32.0,31.0,37.0,False -330,29.0,28.0,34.0,19.0,30.0,35.0,37.0,28.0,False -331,28.0,28.0,22.0,35.0,31.0,34.0,28.0,34.0,False -332,32.0,31.0,17.0,29.0,35.0,29.0,40.0,27.0,False -333,33.0,23.0,31.0,32.0,29.0,30.0,37.0,25.0,False -334,27.0,32.0,28.0,34.0,31.0,22.0,26.0,40.0,False -335,31.0,32.0,31.0,30.0,27.0,36.0,26.0,27.0,False -336,30.0,30.0,33.0,41.0,27.0,24.0,28.0,27.0,False -337,30.0,33.0,32.0,32.0,31.0,32.0,23.0,27.0,False -338,31.0,33.0,28.0,28.0,30.0,25.0,31.0,34.0,False -339,35.0,28.0,29.0,27.0,23.0,33.0,30.0,35.0,False -340,31.0,27.0,29.0,26.0,28.0,32.0,31.0,36.0,False -341,33.0,34.0,31.0,32.0,26.0,33.0,26.0,25.0,False -342,25.0,30.0,29.0,25.0,37.0,25.0,31.0,38.0,False -343,23.0,31.0,30.0,30.0,33.0,28.0,33.0,32.0,False -344,30.0,37.0,34.0,21.0,32.0,34.0,27.0,25.0,False -345,29.0,29.0,24.0,35.0,32.0,24.0,36.0,31.0,False -346,32.0,34.0,26.0,29.0,30.0,28.0,25.0,36.0,False -347,33.0,35.0,31.0,30.0,28.0,29.0,26.0,28.0,False -348,32.0,33.0,30.0,31.0,35.0,25.0,33.0,21.0,False -349,26.0,30.0,33.0,26.0,35.0,28.0,31.0,31.0,False -350,28.0,30.0,31.0,26.0,35.0,28.0,37.0,25.0,False -351,29.0,35.0,26.0,30.0,29.0,35.0,31.0,25.0,False -352,29.0,31.0,33.0,36.0,25.0,30.0,32.0,24.0,False -353,26.0,31.0,27.0,31.0,32.0,35.0,25.0,33.0,False -354,37.0,32.0,32.0,25.0,26.0,33.0,23.0,32.0,False -355,35.0,30.0,32.0,26.0,37.0,24.0,30.0,26.0,False -356,43.0,20.0,29.0,28.0,27.0,33.0,30.0,30.0,False -357,31.0,32.0,34.0,28.0,26.0,34.0,26.0,29.0,False -358,31.0,37.0,25.0,25.0,29.0,27.0,34.0,32.0,False -359,32.0,24.0,30.0,34.0,30.0,36.0,26.0,28.0,False -360,29.0,31.0,29.0,30.0,33.0,30.0,26.0,32.0,False -361,36.0,25.0,28.0,32.0,26.0,27.0,34.0,32.0,False -362,27.0,29.0,28.0,32.0,32.0,27.0,35.0,30.0,False -363,25.0,31.0,28.0,29.0,31.0,38.0,33.0,25.0,False -364,29.0,22.0,30.0,30.0,37.0,37.0,25.0,30.0,False -365,28.0,36.0,26.0,29.0,30.0,23.0,33.0,35.0,False -366,33.0,34.0,28.0,35.0,30.0,25.0,27.0,28.0,False -367,38.0,30.0,27.0,40.0,30.0,31.0,22.0,22.0,False -368,33.0,31.0,24.0,32.0,29.0,29.0,32.0,30.0,False -369,28.0,34.0,34.0,27.0,27.0,33.0,27.0,30.0,False -370,26.0,38.0,27.0,30.0,26.0,35.0,20.0,38.0,False -371,40.0,26.0,30.0,33.0,29.0,28.0,31.0,23.0,False -372,32.0,34.0,28.0,25.0,26.0,34.0,27.0,34.0,False -373,30.0,31.0,33.0,25.0,40.0,28.0,24.0,29.0,False -374,28.0,32.0,34.0,29.0,29.0,34.0,29.0,25.0,False -375,34.0,27.0,29.0,30.0,26.0,31.0,35.0,28.0,False -376,38.0,21.0,32.0,29.0,31.0,29.0,32.0,28.0,False -377,33.0,27.0,36.0,28.0,33.0,23.0,36.0,24.0,False -378,36.0,21.0,30.0,36.0,36.0,24.0,29.0,28.0,False -379,27.0,30.0,30.0,33.0,29.0,27.0,26.0,38.0,False -380,24.0,30.0,33.0,35.0,31.0,25.0,32.0,30.0,False -381,31.0,28.0,26.0,37.0,28.0,33.0,25.0,32.0,False -382,34.0,28.0,25.0,36.0,28.0,33.0,29.0,27.0,False -383,27.0,29.0,35.0,30.0,21.0,33.0,34.0,31.0,False -384,32.0,29.0,35.0,28.0,26.0,32.0,26.0,32.0,False -385,29.0,34.0,22.0,30.0,31.0,32.0,33.0,29.0,False -386,31.0,35.0,31.0,38.0,29.0,29.0,24.0,23.0,False -387,38.0,32.0,29.0,26.0,24.0,23.0,36.0,32.0,False -388,29.0,26.0,32.0,32.0,27.0,30.0,34.0,30.0,False -389,30.0,29.0,27.0,22.0,34.0,32.0,35.0,31.0,False -390,35.0,31.0,25.0,30.0,31.0,31.0,26.0,31.0,False -391,31.0,35.0,33.0,22.0,30.0,31.0,32.0,26.0,False -392,30.0,33.0,38.0,26.0,26.0,31.0,26.0,30.0,False -393,26.0,34.0,34.0,33.0,23.0,31.0,29.0,30.0,False -394,23.0,29.0,33.0,36.0,22.0,32.0,32.0,33.0,False -395,22.0,31.0,29.0,42.0,34.0,27.0,22.0,33.0,False -396,34.0,30.0,26.0,32.0,29.0,27.0,29.0,33.0,False -397,35.0,37.0,34.0,31.0,30.0,23.0,27.0,23.0,False -398,31.0,26.0,28.0,34.0,24.0,36.0,25.0,36.0,False -399,27.0,36.0,26.0,30.0,35.0,26.0,22.0,38.0,False -400,36.0,35.0,24.0,27.0,27.0,29.0,31.0,31.0,False -401,29.0,27.0,29.0,32.0,33.0,33.0,26.0,31.0,False -402,32.0,29.0,26.0,32.0,34.0,30.0,25.0,32.0,False -403,30.0,39.0,24.0,32.0,29.0,25.0,28.0,33.0,False -404,23.0,19.0,33.0,36.0,33.0,27.0,35.0,34.0,False -405,25.0,31.0,31.0,22.0,36.0,39.0,22.0,34.0,False -406,29.0,33.0,27.0,24.0,41.0,32.0,26.0,28.0,False -407,30.0,36.0,26.0,29.0,38.0,26.0,26.0,29.0,False -408,29.0,30.0,31.0,36.0,31.0,26.0,30.0,27.0,False -409,25.0,33.0,31.0,30.0,26.0,27.0,34.0,34.0,False -410,29.0,25.0,31.0,34.0,30.0,28.0,29.0,34.0,False -411,30.0,36.0,35.0,26.0,24.0,20.0,36.0,33.0,False -412,33.0,29.0,25.0,29.0,29.0,30.0,31.0,34.0,False -413,35.0,27.0,29.0,30.0,32.0,34.0,25.0,28.0,False -414,26.0,33.0,32.0,34.0,31.0,34.0,19.0,31.0,False -415,38.0,28.0,21.0,33.0,36.0,28.0,29.0,27.0,False -416,30.0,23.0,23.0,32.0,32.0,37.0,35.0,28.0,False -417,29.0,28.0,31.0,30.0,33.0,27.0,32.0,30.0,False -418,26.0,29.0,32.0,32.0,30.0,27.0,32.0,32.0,False -419,33.0,27.0,29.0,34.0,27.0,28.0,30.0,32.0,False -420,27.0,34.0,34.0,26.0,34.0,27.0,28.0,30.0,False -421,31.0,23.0,29.0,33.0,27.0,29.0,31.0,37.0,False -422,22.0,30.0,33.0,30.0,33.0,36.0,29.0,27.0,False -423,39.0,36.0,30.0,30.0,18.0,31.0,25.0,31.0,False -424,41.0,30.0,25.0,29.0,30.0,24.0,28.0,33.0,False -425,29.0,24.0,27.0,33.0,25.0,30.0,39.0,33.0,False -426,37.0,31.0,25.0,28.0,28.0,30.0,34.0,27.0,False -427,26.0,33.0,29.0,38.0,27.0,36.0,22.0,29.0,False -428,32.0,39.0,27.0,31.0,26.0,29.0,30.0,26.0,False -429,28.0,33.0,27.0,32.0,34.0,30.0,32.0,24.0,False -430,33.0,34.0,26.0,25.0,34.0,32.0,28.0,28.0,False -431,31.0,28.0,27.0,32.0,30.0,34.0,32.0,26.0,False -432,39.0,30.0,29.0,29.0,29.0,33.0,30.0,21.0,False -433,28.0,28.0,29.0,36.0,31.0,33.0,22.0,33.0,False -434,31.0,34.0,33.0,24.0,30.0,28.0,32.0,28.0,False -435,32.0,25.0,33.0,34.0,34.0,27.0,28.0,27.0,False -436,24.0,34.0,27.0,35.0,29.0,32.0,27.0,32.0,False -437,25.0,23.0,29.0,35.0,34.0,31.0,32.0,31.0,False -438,32.0,27.0,32.0,30.0,38.0,32.0,27.0,22.0,False -439,31.0,29.0,31.0,26.0,30.0,26.0,32.0,35.0,False -440,32.0,35.0,30.0,28.0,23.0,30.0,31.0,31.0,False -441,28.0,21.0,30.0,36.0,28.0,31.0,32.0,34.0,False -442,24.0,32.0,28.0,41.0,33.0,24.0,31.0,27.0,False -443,27.0,39.0,26.0,25.0,35.0,33.0,19.0,36.0,False -444,29.0,25.0,35.0,32.0,31.0,34.0,25.0,29.0,False -445,30.0,33.0,35.0,27.0,26.0,21.0,32.0,36.0,False -446,30.0,33.0,27.0,36.0,26.0,40.0,23.0,25.0,False -447,33.0,25.0,29.0,35.0,28.0,28.0,28.0,34.0,False -448,35.0,34.0,27.0,27.0,25.0,39.0,29.0,24.0,False -449,32.0,26.0,26.0,25.0,31.0,34.0,34.0,32.0,False -450,35.0,25.0,30.0,32.0,37.0,29.0,24.0,28.0,False -451,29.0,37.0,28.0,26.0,32.0,36.0,26.0,26.0,False -452,28.0,33.0,30.0,24.0,41.0,29.0,28.0,27.0,False -453,32.0,27.0,30.0,32.0,30.0,31.0,28.0,30.0,False -454,23.0,34.0,33.0,32.0,25.0,31.0,29.0,33.0,False -455,28.0,29.0,28.0,32.0,27.0,32.0,30.0,34.0,False -456,25.0,31.0,28.0,38.0,31.0,33.0,27.0,27.0,False -457,38.0,35.0,25.0,27.0,32.0,29.0,26.0,28.0,False -458,33.0,21.0,32.0,29.0,27.0,37.0,38.0,23.0,False -459,33.0,31.0,27.0,30.0,34.0,33.0,28.0,24.0,False -460,21.0,27.0,30.0,28.0,37.0,31.0,31.0,35.0,False -461,26.0,34.0,19.0,28.0,33.0,32.0,35.0,33.0,False -462,33.0,35.0,31.0,27.0,23.0,31.0,25.0,35.0,False -463,27.0,26.0,31.0,21.0,23.0,36.0,40.0,36.0,False -464,31.0,38.0,32.0,24.0,32.0,27.0,31.0,25.0,False -465,32.0,28.0,34.0,29.0,28.0,26.0,30.0,33.0,False -466,29.0,33.0,28.0,30.0,38.0,21.0,31.0,30.0,False -467,31.0,30.0,32.0,26.0,31.0,31.0,28.0,31.0,False -468,25.0,32.0,28.0,33.0,29.0,25.0,33.0,35.0,False -469,38.0,36.0,29.0,33.0,23.0,31.0,25.0,25.0,False -470,31.0,30.0,37.0,27.0,30.0,36.0,23.0,26.0,False -471,40.0,30.0,35.0,22.0,27.0,27.0,24.0,35.0,False -472,30.0,32.0,27.0,28.0,40.0,32.0,29.0,22.0,False -473,27.0,26.0,32.0,32.0,33.0,30.0,33.0,27.0,False -474,26.0,33.0,26.0,39.0,34.0,23.0,32.0,27.0,False -475,28.0,31.0,25.0,35.0,32.0,27.0,37.0,25.0,False -476,35.0,29.0,28.0,26.0,32.0,29.0,31.0,30.0,False -477,36.0,28.0,28.0,27.0,27.0,26.0,39.0,29.0,False -478,32.0,31.0,37.0,24.0,28.0,32.0,25.0,31.0,False -479,25.0,27.0,30.0,31.0,33.0,33.0,28.0,33.0,False -480,34.0,29.0,23.0,33.0,33.0,26.0,29.0,33.0,False -481,27.0,36.0,31.0,28.0,33.0,25.0,29.0,31.0,False -482,27.0,25.0,30.0,33.0,29.0,28.0,32.0,36.0,False -483,29.0,31.0,31.0,27.0,36.0,30.0,26.0,30.0,False -484,36.0,29.0,32.0,30.0,30.0,24.0,23.0,36.0,False -485,25.0,36.0,32.0,30.0,29.0,28.0,26.0,34.0,False -486,31.0,30.0,28.0,24.0,30.0,29.0,37.0,31.0,False -487,32.0,21.0,26.0,41.0,30.0,35.0,27.0,28.0,False -488,34.0,26.0,28.0,30.0,36.0,35.0,26.0,25.0,False -489,36.0,28.0,35.0,25.0,30.0,27.0,32.0,27.0,False -490,31.0,23.0,30.0,24.0,24.0,36.0,34.0,38.0,False -491,29.0,26.0,36.0,30.0,35.0,29.0,32.0,23.0,False -492,40.0,33.0,28.0,22.0,24.0,24.0,33.0,36.0,False -493,26.0,37.0,29.0,28.0,30.0,33.0,22.0,35.0,False -494,25.0,28.0,27.0,32.0,32.0,30.0,34.0,32.0,False -495,33.0,34.0,18.0,37.0,20.0,32.0,32.0,34.0,False -496,29.0,26.0,36.0,27.0,28.0,36.0,32.0,26.0,False -497,28.0,23.0,32.0,35.0,36.0,28.0,30.0,28.0,False -498,24.0,25.0,39.0,27.0,26.0,42.0,31.0,26.0,False -499,31.0,31.0,33.0,29.0,29.0,25.0,33.0,29.0,False -500,23.0,35.0,37.0,32.0,29.0,29.0,26.0,29.0,False -501,43.0,26.0,25.0,31.0,25.0,27.0,31.0,32.0,False -502,30.0,34.0,37.0,30.0,28.0,30.0,26.0,25.0,False -503,30.0,35.0,27.0,34.0,30.0,30.0,23.0,31.0,False -504,32.0,29.0,24.0,32.0,38.0,28.0,28.0,29.0,False -505,33.0,36.0,22.0,22.0,32.0,33.0,31.0,31.0,False -506,22.0,33.0,30.0,32.0,23.0,31.0,33.0,36.0,False -507,33.0,38.0,28.0,25.0,31.0,33.0,25.0,27.0,False -508,35.0,33.0,25.0,32.0,22.0,30.0,27.0,36.0,False -509,25.0,30.0,36.0,30.0,30.0,25.0,30.0,34.0,False -510,30.0,25.0,30.0,38.0,27.0,31.0,25.0,34.0,False -511,25.0,29.0,30.0,36.0,31.0,29.0,31.0,29.0,False -512,31.0,25.0,27.0,35.0,26.0,34.0,33.0,29.0,False -513,35.0,32.0,34.0,22.0,32.0,30.0,30.0,25.0,False -514,43.0,26.0,27.0,32.0,26.0,28.0,31.0,27.0,False -515,24.0,29.0,33.0,28.0,30.0,33.0,34.0,29.0,False -516,28.0,23.0,30.0,37.0,27.0,29.0,32.0,34.0,False -517,28.0,33.0,31.0,33.0,29.0,35.0,25.0,26.0,False -518,26.0,29.0,29.0,33.0,31.0,26.0,37.0,29.0,False -519,24.0,32.0,28.0,25.0,35.0,33.0,30.0,33.0,False -520,29.0,30.0,25.0,31.0,27.0,35.0,33.0,30.0,False -521,31.0,22.0,31.0,30.0,37.0,28.0,32.0,29.0,False -522,27.0,35.0,32.0,29.0,35.0,18.0,29.0,35.0,False -523,33.0,27.0,31.0,34.0,36.0,26.0,28.0,25.0,False -524,34.0,30.0,36.0,34.0,20.0,27.0,31.0,28.0,False -525,28.0,28.0,35.0,26.0,35.0,24.0,28.0,36.0,False -526,31.0,28.0,31.0,28.0,29.0,35.0,27.0,31.0,False -527,26.0,24.0,32.0,27.0,30.0,37.0,28.0,36.0,False -528,27.0,33.0,30.0,32.0,25.0,35.0,31.0,27.0,False -529,32.0,27.0,33.0,34.0,27.0,28.0,30.0,29.0,False -530,30.0,31.0,24.0,34.0,26.0,31.0,32.0,32.0,False -531,22.0,28.0,38.0,31.0,27.0,35.0,30.0,29.0,False -532,32.0,21.0,26.0,35.0,37.0,36.0,22.0,31.0,False -533,28.0,28.0,29.0,31.0,32.0,36.0,25.0,31.0,False -534,29.0,24.0,22.0,25.0,36.0,39.0,32.0,33.0,False -535,37.0,34.0,37.0,26.0,25.0,22.0,34.0,25.0,False -536,32.0,30.0,28.0,31.0,23.0,35.0,32.0,29.0,False -537,29.0,27.0,30.0,32.0,30.0,26.0,35.0,31.0,False -538,36.0,32.0,26.0,27.0,23.0,38.0,26.0,32.0,False -539,41.0,23.0,34.0,30.0,26.0,23.0,27.0,36.0,False -540,23.0,37.0,30.0,28.0,34.0,32.0,34.0,22.0,False -541,26.0,34.0,31.0,26.0,37.0,31.0,26.0,29.0,False -542,27.0,38.0,36.0,34.0,28.0,25.0,29.0,23.0,False -543,30.0,39.0,30.0,29.0,28.0,25.0,28.0,31.0,False -544,31.0,30.0,28.0,37.0,30.0,27.0,29.0,28.0,False -545,26.0,34.0,30.0,31.0,30.0,31.0,31.0,27.0,False -546,38.0,33.0,33.0,26.0,25.0,34.0,24.0,27.0,False -547,37.0,35.0,30.0,25.0,31.0,22.0,32.0,28.0,False -548,32.0,34.0,36.0,26.0,28.0,29.0,25.0,30.0,False -549,44.0,23.0,29.0,30.0,37.0,28.0,22.0,27.0,False -550,28.0,27.0,27.0,41.0,33.0,26.0,31.0,27.0,False -551,31.0,25.0,35.0,33.0,29.0,33.0,25.0,29.0,False -552,32.0,28.0,33.0,21.0,39.0,28.0,32.0,27.0,False -553,33.0,23.0,28.0,36.0,30.0,33.0,32.0,25.0,False -554,30.0,35.0,34.0,29.0,25.0,31.0,31.0,25.0,False -555,24.0,35.0,32.0,33.0,31.0,24.0,29.0,32.0,False -556,29.0,27.0,27.0,31.0,26.0,31.0,30.0,39.0,False -557,30.0,27.0,33.0,28.0,27.0,28.0,32.0,35.0,False -558,33.0,32.0,25.0,33.0,28.0,34.0,29.0,26.0,False -559,31.0,28.0,33.0,34.0,29.0,27.0,33.0,25.0,False -560,27.0,31.0,34.0,32.0,33.0,29.0,32.0,22.0,False -561,28.0,28.0,33.0,31.0,33.0,27.0,27.0,33.0,False -562,27.0,36.0,32.0,23.0,33.0,26.0,28.0,35.0,False -563,29.0,30.0,28.0,31.0,26.0,30.0,29.0,37.0,False -564,31.0,33.0,34.0,32.0,33.0,21.0,30.0,26.0,False -565,28.0,29.0,26.0,26.0,29.0,33.0,30.0,39.0,False -566,29.0,38.0,31.0,31.0,24.0,28.0,30.0,29.0,False -567,24.0,31.0,34.0,24.0,33.0,37.0,27.0,30.0,False -568,27.0,25.0,30.0,27.0,31.0,30.0,32.0,38.0,False -569,41.0,28.0,30.0,24.0,31.0,30.0,26.0,30.0,False -570,25.0,32.0,30.0,26.0,39.0,25.0,33.0,30.0,False -571,32.0,30.0,29.0,20.0,26.0,37.0,33.0,33.0,False -572,28.0,31.0,32.0,32.0,30.0,26.0,30.0,31.0,False -573,31.0,29.0,27.0,27.0,25.0,32.0,33.0,36.0,False -574,24.0,23.0,30.0,33.0,34.0,34.0,30.0,32.0,False -575,30.0,21.0,25.0,35.0,34.0,29.0,32.0,34.0,False -576,23.0,25.0,32.0,29.0,33.0,31.0,36.0,31.0,False -577,28.0,25.0,34.0,34.0,29.0,32.0,29.0,29.0,False -578,26.0,24.0,38.0,32.0,33.0,24.0,25.0,38.0,False -579,31.0,32.0,25.0,34.0,30.0,30.0,30.0,28.0,False -580,33.0,19.0,32.0,34.0,33.0,31.0,25.0,33.0,False -581,31.0,28.0,32.0,24.0,29.0,31.0,32.0,33.0,False -582,24.0,26.0,33.0,37.0,31.0,36.0,26.0,27.0,False -583,22.0,30.0,32.0,26.0,35.0,26.0,31.0,38.0,False -584,31.0,37.0,31.0,25.0,29.0,28.0,33.0,26.0,False -585,27.0,38.0,31.0,34.0,35.0,25.0,27.0,23.0,False -586,32.0,30.0,30.0,29.0,32.0,33.0,32.0,22.0,False -587,28.0,34.0,28.0,28.0,34.0,34.0,28.0,26.0,False -588,36.0,29.0,26.0,26.0,31.0,28.0,33.0,31.0,False -589,32.0,23.0,25.0,30.0,26.0,33.0,34.0,37.0,False -590,29.0,32.0,29.0,37.0,24.0,32.0,27.0,30.0,False -591,28.0,27.0,32.0,31.0,34.0,27.0,29.0,32.0,False -592,25.0,29.0,33.0,28.0,35.0,32.0,30.0,28.0,False -593,35.0,24.0,27.0,35.0,34.0,31.0,28.0,26.0,False -594,31.0,28.0,33.0,29.0,26.0,36.0,29.0,28.0,False -595,30.0,27.0,20.0,25.0,42.0,31.0,25.0,40.0,False -596,30.0,24.0,28.0,32.0,28.0,30.0,34.0,34.0,False -597,29.0,32.0,32.0,31.0,30.0,21.0,40.0,25.0,False -598,33.0,20.0,22.0,27.0,37.0,35.0,34.0,32.0,False -599,34.0,31.0,29.0,30.0,33.0,23.0,29.0,31.0,False -600,34.0,34.0,27.0,29.0,26.0,30.0,31.0,29.0,False -601,34.0,30.0,22.0,33.0,33.0,26.0,26.0,36.0,False -602,27.0,23.0,39.0,26.0,31.0,35.0,34.0,25.0,False -603,29.0,28.0,33.0,26.0,37.0,27.0,25.0,35.0,False -604,34.0,31.0,33.0,33.0,23.0,23.0,25.0,38.0,False -605,30.0,31.0,31.0,23.0,30.0,35.0,29.0,31.0,False -606,29.0,35.0,25.0,35.0,23.0,29.0,39.0,25.0,False -607,30.0,30.0,26.0,24.0,34.0,36.0,31.0,29.0,False -608,26.0,29.0,39.0,27.0,32.0,27.0,32.0,28.0,False -609,33.0,33.0,25.0,25.0,32.0,30.0,31.0,31.0,False -610,27.0,28.0,37.0,30.0,32.0,26.0,28.0,32.0,False -611,29.0,27.0,27.0,34.0,32.0,32.0,34.0,25.0,False -612,25.0,34.0,34.0,30.0,28.0,30.0,32.0,27.0,False -613,34.0,38.0,23.0,22.0,27.0,31.0,29.0,36.0,False -614,32.0,18.0,32.0,31.0,30.0,36.0,29.0,32.0,False -615,35.0,28.0,37.0,28.0,26.0,24.0,27.0,35.0,False -616,30.0,27.0,29.0,32.0,33.0,28.0,31.0,30.0,False -617,31.0,27.0,30.0,36.0,27.0,31.0,29.0,29.0,False -618,29.0,31.0,24.0,32.0,29.0,37.0,27.0,31.0,False -619,31.0,29.0,33.0,21.0,26.0,36.0,37.0,27.0,False -620,32.0,30.0,31.0,35.0,28.0,30.0,24.0,30.0,False -621,29.0,26.0,32.0,30.0,25.0,33.0,34.0,31.0,False -622,28.0,29.0,27.0,26.0,32.0,30.0,38.0,30.0,False -623,25.0,24.0,31.0,27.0,32.0,34.0,37.0,30.0,False -624,27.0,30.0,32.0,27.0,34.0,35.0,22.0,33.0,False -625,34.0,25.0,32.0,29.0,23.0,27.0,36.0,34.0,False -626,29.0,27.0,31.0,27.0,31.0,31.0,30.0,34.0,False -627,21.0,29.0,42.0,28.0,28.0,31.0,31.0,30.0,False -628,31.0,28.0,36.0,36.0,28.0,28.0,26.0,27.0,False -629,37.0,29.0,26.0,31.0,28.0,31.0,29.0,29.0,False -630,25.0,31.0,32.0,34.0,25.0,31.0,28.0,34.0,False -631,34.0,36.0,32.0,29.0,22.0,24.0,30.0,33.0,False -632,23.0,27.0,32.0,31.0,31.0,28.0,35.0,33.0,False -633,30.0,33.0,29.0,40.0,27.0,29.0,26.0,26.0,False -634,32.0,22.0,29.0,34.0,24.0,33.0,32.0,34.0,False -635,30.0,25.0,28.0,32.0,29.0,30.0,33.0,33.0,False -636,41.0,24.0,24.0,23.0,34.0,34.0,23.0,37.0,False -637,29.0,29.0,36.0,18.0,26.0,34.0,34.0,34.0,False -638,33.0,36.0,32.0,32.0,25.0,24.0,28.0,30.0,False -639,28.0,27.0,34.0,31.0,27.0,31.0,31.0,31.0,False -640,29.0,33.0,31.0,27.0,29.0,30.0,27.0,34.0,False -641,28.0,32.0,33.0,29.0,38.0,26.0,32.0,22.0,False -642,34.0,26.0,29.0,22.0,34.0,34.0,34.0,27.0,False -643,21.0,31.0,27.0,37.0,30.0,28.0,29.0,37.0,False -644,26.0,26.0,33.0,27.0,29.0,35.0,28.0,36.0,False -645,32.0,25.0,31.0,24.0,32.0,33.0,31.0,32.0,False -646,30.0,33.0,20.0,31.0,31.0,40.0,25.0,30.0,False -647,25.0,24.0,38.0,31.0,33.0,26.0,39.0,24.0,False -648,30.0,33.0,33.0,28.0,33.0,30.0,26.0,27.0,False -649,31.0,33.0,23.0,31.0,34.0,33.0,31.0,24.0,False -650,25.0,36.0,28.0,32.0,31.0,23.0,33.0,32.0,False -651,28.0,36.0,26.0,28.0,31.0,32.0,25.0,34.0,False -652,32.0,27.0,31.0,33.0,25.0,26.0,30.0,36.0,False -653,29.0,29.0,26.0,27.0,28.0,32.0,32.0,37.0,False -654,28.0,31.0,24.0,30.0,33.0,33.0,36.0,25.0,False -655,34.0,29.0,29.0,27.0,32.0,34.0,26.0,29.0,False -656,24.0,22.0,34.0,33.0,34.0,33.0,28.0,32.0,False -657,26.0,32.0,28.0,27.0,35.0,33.0,31.0,28.0,False -658,37.0,32.0,25.0,33.0,26.0,27.0,28.0,32.0,False -659,27.0,32.0,30.0,26.0,34.0,33.0,28.0,30.0,False -660,27.0,27.0,33.0,36.0,33.0,27.0,31.0,26.0,False -661,26.0,24.0,38.0,35.0,21.0,29.0,38.0,29.0,False -662,27.0,26.0,32.0,32.0,38.0,29.0,31.0,25.0,False -663,29.0,28.0,37.0,30.0,29.0,32.0,26.0,29.0,False -664,33.0,26.0,26.0,26.0,25.0,35.0,38.0,31.0,False -665,34.0,32.0,26.0,34.0,36.0,33.0,19.0,26.0,False -666,21.0,36.0,32.0,28.0,31.0,25.0,37.0,30.0,False -667,36.0,32.0,28.0,25.0,29.0,36.0,25.0,29.0,False -668,33.0,33.0,34.0,26.0,24.0,30.0,33.0,27.0,False -669,33.0,31.0,30.0,25.0,26.0,32.0,38.0,25.0,False -670,34.0,28.0,26.0,36.0,30.0,28.0,28.0,30.0,False -671,30.0,27.0,31.0,24.0,31.0,28.0,31.0,38.0,False -672,31.0,31.0,18.0,28.0,30.0,35.0,37.0,30.0,False -673,25.0,39.0,32.0,26.0,38.0,27.0,33.0,20.0,False -674,29.0,23.0,32.0,26.0,28.0,37.0,29.0,36.0,False -675,28.0,29.0,25.0,28.0,29.0,32.0,35.0,34.0,False -676,31.0,26.0,27.0,31.0,31.0,28.0,30.0,36.0,False -677,28.0,30.0,35.0,34.0,32.0,30.0,25.0,26.0,False -678,38.0,35.0,29.0,27.0,25.0,30.0,29.0,27.0,False -679,31.0,28.0,28.0,31.0,29.0,31.0,33.0,29.0,False -680,27.0,31.0,40.0,29.0,35.0,24.0,25.0,29.0,False -681,37.0,29.0,24.0,34.0,32.0,25.0,34.0,25.0,False -682,30.0,28.0,35.0,23.0,33.0,34.0,28.0,29.0,False -683,28.0,30.0,31.0,34.0,28.0,32.0,33.0,24.0,False -684,30.0,29.0,32.0,29.0,32.0,32.0,28.0,28.0,False -685,31.0,32.0,27.0,39.0,30.0,33.0,26.0,22.0,False -686,35.0,28.0,22.0,33.0,22.0,40.0,24.0,36.0,False -687,26.0,32.0,28.0,38.0,25.0,28.0,29.0,34.0,False -688,34.0,32.0,32.0,27.0,32.0,21.0,29.0,33.0,False -689,30.0,29.0,27.0,31.0,24.0,33.0,37.0,29.0,False -690,27.0,34.0,24.0,37.0,33.0,19.0,31.0,35.0,False -691,30.0,31.0,26.0,30.0,38.0,29.0,31.0,25.0,False -692,30.0,24.0,34.0,27.0,33.0,29.0,30.0,33.0,False -693,34.0,30.0,33.0,20.0,30.0,33.0,26.0,34.0,False -694,25.0,34.0,19.0,30.0,32.0,31.0,36.0,33.0,False -695,27.0,30.0,24.0,20.0,31.0,38.0,33.0,37.0,False -696,28.0,29.0,31.0,31.0,35.0,30.0,31.0,25.0,False -697,29.0,32.0,31.0,35.0,28.0,31.0,23.0,31.0,False -698,31.0,29.0,26.0,35.0,21.0,35.0,35.0,28.0,False -699,33.0,27.0,32.0,27.0,27.0,32.0,29.0,33.0,False -700,30.0,36.0,25.0,32.0,33.0,28.0,29.0,27.0,False -701,26.0,36.0,27.0,34.0,31.0,31.0,27.0,28.0,False -702,26.0,26.0,28.0,27.0,33.0,37.0,30.0,33.0,False -703,34.0,33.0,35.0,31.0,29.0,29.0,26.0,23.0,False -704,29.0,33.0,36.0,26.0,29.0,30.0,27.0,30.0,False -705,30.0,24.0,41.0,25.0,31.0,28.0,30.0,31.0,False -706,27.0,37.0,25.0,31.0,27.0,33.0,30.0,30.0,False -707,29.0,40.0,29.0,30.0,26.0,24.0,25.0,37.0,False -708,31.0,33.0,36.0,29.0,26.0,29.0,25.0,31.0,False -709,36.0,31.0,21.0,38.0,32.0,20.0,35.0,27.0,False -710,34.0,29.0,37.0,22.0,31.0,25.0,31.0,31.0,False -711,37.0,30.0,29.0,30.0,34.0,29.0,25.0,26.0,False -712,23.0,21.0,33.0,28.0,37.0,34.0,34.0,30.0,False -713,29.0,27.0,20.0,34.0,36.0,28.0,31.0,35.0,False -714,32.0,29.0,25.0,33.0,28.0,32.0,34.0,27.0,False -715,27.0,24.0,36.0,26.0,35.0,35.0,30.0,27.0,False -716,32.0,34.0,23.0,38.0,23.0,34.0,25.0,31.0,False -717,34.0,35.0,34.0,22.0,30.0,31.0,31.0,23.0,False -718,31.0,31.0,24.0,32.0,34.0,27.0,30.0,31.0,False -719,30.0,28.0,24.0,30.0,34.0,31.0,29.0,34.0,False -720,31.0,37.0,30.0,25.0,30.0,26.0,32.0,29.0,False -721,25.0,33.0,30.0,34.0,36.0,29.0,27.0,26.0,False -722,32.0,31.0,26.0,36.0,26.0,31.0,31.0,27.0,False -723,21.0,26.0,34.0,29.0,34.0,35.0,31.0,30.0,False -724,28.0,32.0,24.0,42.0,29.0,32.0,26.0,27.0,False -725,28.0,29.0,30.0,29.0,30.0,31.0,31.0,32.0,False -726,28.0,36.0,33.0,29.0,24.0,32.0,28.0,30.0,False -727,25.0,28.0,31.0,33.0,24.0,31.0,33.0,35.0,False -728,29.0,31.0,29.0,33.0,26.0,28.0,30.0,34.0,False -729,32.0,26.0,32.0,31.0,22.0,35.0,30.0,32.0,False -730,26.0,37.0,30.0,34.0,28.0,27.0,26.0,32.0,False -731,31.0,35.0,17.0,30.0,28.0,30.0,31.0,38.0,False -732,31.0,24.0,30.0,26.0,32.0,34.0,36.0,27.0,False -733,28.0,33.0,32.0,27.0,36.0,25.0,24.0,35.0,False -734,35.0,24.0,31.0,30.0,26.0,32.0,28.0,34.0,False -735,34.0,31.0,35.0,25.0,28.0,27.0,26.0,34.0,False -736,18.0,22.0,32.0,38.0,30.0,37.0,35.0,28.0,False -737,28.0,33.0,27.0,35.0,34.0,27.0,25.0,31.0,False -738,29.0,22.0,21.0,38.0,36.0,27.0,37.0,30.0,False -739,22.0,32.0,31.0,31.0,31.0,34.0,33.0,26.0,False -740,35.0,31.0,20.0,29.0,37.0,31.0,28.0,29.0,False -741,21.0,30.0,39.0,28.0,36.0,27.0,30.0,29.0,False -742,32.0,34.0,31.0,28.0,22.0,30.0,35.0,28.0,False -743,32.0,28.0,30.0,31.0,33.0,28.0,32.0,26.0,False -744,28.0,29.0,37.0,31.0,28.0,31.0,25.0,31.0,False -745,28.0,24.0,34.0,33.0,31.0,31.0,30.0,29.0,False -746,26.0,34.0,30.0,25.0,33.0,36.0,23.0,33.0,False -747,37.0,31.0,26.0,26.0,28.0,31.0,30.0,31.0,False -748,30.0,27.0,29.0,31.0,31.0,22.0,38.0,32.0,False -749,24.0,29.0,32.0,27.0,30.0,35.0,28.0,35.0,False -750,33.0,26.0,30.0,38.0,23.0,29.0,28.0,33.0,False -751,33.0,35.0,26.0,31.0,26.0,28.0,28.0,33.0,False -752,22.0,35.0,29.0,35.0,33.0,31.0,27.0,28.0,False -753,27.0,35.0,20.0,37.0,30.0,28.0,31.0,32.0,False -754,21.0,26.0,31.0,30.0,30.0,44.0,29.0,29.0,False -755,28.0,27.0,28.0,38.0,31.0,30.0,28.0,30.0,False -756,32.0,32.0,29.0,34.0,32.0,31.0,26.0,24.0,False -757,25.0,35.0,26.0,26.0,38.0,31.0,32.0,27.0,False -758,30.0,31.0,33.0,33.0,29.0,21.0,31.0,32.0,False -759,29.0,36.0,32.0,32.0,25.0,31.0,30.0,25.0,False -760,34.0,28.0,29.0,31.0,25.0,38.0,28.0,27.0,False -761,30.0,28.0,30.0,26.0,32.0,37.0,29.0,28.0,False -762,30.0,25.0,32.0,31.0,27.0,28.0,32.0,35.0,False -763,31.0,32.0,29.0,33.0,31.0,36.0,19.0,29.0,False -764,27.0,28.0,33.0,28.0,25.0,27.0,34.0,38.0,False -765,28.0,28.0,31.0,29.0,33.0,29.0,31.0,31.0,False -766,30.0,25.0,33.0,26.0,33.0,31.0,34.0,28.0,False -767,28.0,32.0,33.0,31.0,36.0,27.0,25.0,28.0,False -768,23.0,24.0,35.0,35.0,37.0,30.0,33.0,23.0,False -769,37.0,23.0,28.0,30.0,33.0,30.0,32.0,27.0,False -770,23.0,30.0,33.0,34.0,36.0,31.0,29.0,24.0,False -771,31.0,32.0,28.0,36.0,28.0,27.0,30.0,28.0,False -772,37.0,33.0,27.0,26.0,29.0,27.0,35.0,26.0,False -773,27.0,32.0,31.0,32.0,26.0,29.0,33.0,30.0,False -774,27.0,33.0,34.0,29.0,24.0,31.0,28.0,34.0,False -775,36.0,33.0,26.0,27.0,26.0,33.0,26.0,33.0,False -776,23.0,29.0,37.0,32.0,34.0,27.0,30.0,28.0,False -777,41.0,22.0,31.0,25.0,25.0,31.0,35.0,30.0,False -778,36.0,26.0,29.0,23.0,37.0,32.0,24.0,33.0,False -779,32.0,34.0,33.0,28.0,29.0,25.0,25.0,34.0,False -780,31.0,36.0,27.0,29.0,26.0,32.0,28.0,31.0,False -781,29.0,27.0,28.0,30.0,27.0,34.0,33.0,32.0,False -782,18.0,30.0,31.0,29.0,41.0,30.0,33.0,28.0,False -783,35.0,23.0,30.0,37.0,25.0,30.0,27.0,33.0,False -784,31.0,25.0,28.0,34.0,30.0,35.0,34.0,23.0,False -785,26.0,31.0,29.0,37.0,25.0,34.0,30.0,28.0,False -786,30.0,24.0,27.0,33.0,41.0,32.0,28.0,25.0,False -787,32.0,22.0,26.0,30.0,29.0,36.0,29.0,36.0,False -788,36.0,30.0,35.0,27.0,25.0,32.0,26.0,29.0,False -789,25.0,30.0,29.0,36.0,31.0,34.0,30.0,25.0,False -790,26.0,26.0,26.0,37.0,31.0,28.0,27.0,39.0,False -791,38.0,31.0,31.0,23.0,31.0,31.0,28.0,27.0,False -792,28.0,25.0,30.0,30.0,32.0,38.0,27.0,30.0,False -793,30.0,31.0,23.0,26.0,31.0,33.0,33.0,33.0,False -794,31.0,25.0,29.0,28.0,31.0,33.0,33.0,30.0,False -795,30.0,32.0,25.0,31.0,32.0,32.0,28.0,30.0,False -796,29.0,36.0,30.0,31.0,32.0,27.0,23.0,32.0,False -797,36.0,29.0,38.0,23.0,28.0,28.0,27.0,31.0,False -798,30.0,30.0,30.0,30.0,29.0,36.0,26.0,29.0,False -799,27.0,31.0,30.0,30.0,37.0,24.0,33.0,28.0,False -800,24.0,33.0,30.0,31.0,34.0,31.0,24.0,33.0,False -801,32.0,26.0,26.0,30.0,34.0,31.0,35.0,26.0,False -802,33.0,33.0,31.0,34.0,29.0,28.0,26.0,26.0,False -803,32.0,27.0,34.0,19.0,33.0,23.0,44.0,28.0,False -804,29.0,29.0,30.0,29.0,29.0,34.0,29.0,31.0,False -805,33.0,30.0,37.0,31.0,27.0,28.0,28.0,26.0,False -806,36.0,30.0,23.0,31.0,40.0,22.0,27.0,31.0,False -807,30.0,26.0,36.0,25.0,34.0,34.0,24.0,31.0,False -808,21.0,29.0,34.0,36.0,29.0,29.0,30.0,32.0,False -809,32.0,30.0,29.0,31.0,29.0,26.0,29.0,34.0,False -810,25.0,31.0,33.0,31.0,27.0,36.0,27.0,30.0,False -811,29.0,36.0,39.0,31.0,30.0,29.0,21.0,25.0,False -812,29.0,36.0,26.0,35.0,33.0,30.0,25.0,26.0,False -813,30.0,26.0,33.0,31.0,28.0,29.0,31.0,32.0,False -814,29.0,30.0,24.0,31.0,23.0,31.0,39.0,33.0,False -815,25.0,31.0,38.0,28.0,27.0,29.0,37.0,25.0,False -816,27.0,30.0,38.0,32.0,24.0,31.0,32.0,26.0,False -817,34.0,35.0,31.0,31.0,27.0,24.0,26.0,32.0,False -818,27.0,31.0,31.0,36.0,29.0,22.0,27.0,37.0,False -819,31.0,32.0,34.0,23.0,38.0,26.0,33.0,23.0,False -820,28.0,27.0,28.0,35.0,29.0,37.0,27.0,29.0,False -821,32.0,30.0,35.0,27.0,27.0,28.0,30.0,31.0,False -822,30.0,30.0,31.0,33.0,29.0,31.0,26.0,30.0,False -823,25.0,35.0,39.0,32.0,24.0,30.0,30.0,25.0,False -824,30.0,35.0,33.0,33.0,30.0,24.0,25.0,30.0,False -825,29.0,34.0,30.0,30.0,24.0,35.0,29.0,29.0,False -826,31.0,25.0,38.0,36.0,25.0,27.0,26.0,32.0,False -827,36.0,21.0,30.0,31.0,32.0,23.0,34.0,33.0,False -828,32.0,29.0,32.0,28.0,29.0,31.0,32.0,27.0,False -829,33.0,35.0,24.0,33.0,31.0,27.0,27.0,30.0,False -830,36.0,30.0,31.0,23.0,32.0,31.0,25.0,32.0,False -831,33.0,30.0,31.0,33.0,33.0,25.0,28.0,27.0,False -832,23.0,21.0,32.0,32.0,29.0,32.0,37.0,34.0,False -833,26.0,36.0,29.0,31.0,30.0,28.0,32.0,28.0,False -834,30.0,29.0,31.0,25.0,37.0,24.0,30.0,34.0,False -835,34.0,31.0,29.0,28.0,32.0,26.0,28.0,32.0,False -836,30.0,34.0,22.0,38.0,26.0,30.0,24.0,36.0,False -837,31.0,27.0,27.0,30.0,26.0,34.0,26.0,39.0,False -838,36.0,25.0,27.0,29.0,32.0,23.0,38.0,30.0,False -839,29.0,33.0,28.0,33.0,28.0,32.0,35.0,22.0,False -840,30.0,31.0,27.0,28.0,30.0,33.0,36.0,25.0,False -841,27.0,28.0,37.0,38.0,23.0,31.0,26.0,30.0,False -842,40.0,34.0,34.0,25.0,29.0,28.0,29.0,21.0,False -843,37.0,31.0,27.0,34.0,26.0,23.0,32.0,30.0,False -844,31.0,29.0,33.0,30.0,29.0,26.0,26.0,36.0,False -845,24.0,39.0,32.0,25.0,33.0,33.0,30.0,24.0,False -846,31.0,27.0,31.0,33.0,32.0,31.0,24.0,31.0,False -847,35.0,33.0,34.0,29.0,29.0,24.0,29.0,27.0,False -848,32.0,31.0,32.0,33.0,31.0,26.0,28.0,27.0,False -849,34.0,31.0,26.0,27.0,25.0,39.0,25.0,33.0,False -850,32.0,29.0,28.0,26.0,28.0,33.0,26.0,38.0,False -851,30.0,30.0,24.0,28.0,37.0,32.0,28.0,31.0,False -852,34.0,35.0,32.0,23.0,32.0,28.0,33.0,23.0,False -853,33.0,31.0,29.0,30.0,36.0,30.0,24.0,27.0,False -854,34.0,30.0,28.0,33.0,26.0,28.0,30.0,31.0,False -855,27.0,24.0,25.0,30.0,37.0,34.0,35.0,28.0,False -856,39.0,29.0,33.0,31.0,27.0,28.0,28.0,25.0,False -857,29.0,38.0,32.0,32.0,28.0,30.0,33.0,18.0,False -858,32.0,32.0,34.0,28.0,29.0,33.0,27.0,25.0,False -859,26.0,26.0,29.0,36.0,30.0,32.0,30.0,31.0,False -860,28.0,27.0,30.0,32.0,35.0,31.0,26.0,31.0,False -861,34.0,31.0,34.0,29.0,23.0,23.0,24.0,42.0,False -862,36.0,31.0,26.0,30.0,30.0,26.0,31.0,30.0,False -863,29.0,36.0,31.0,21.0,33.0,29.0,36.0,25.0,False -864,28.0,34.0,29.0,30.0,30.0,26.0,32.0,31.0,False -865,35.0,29.0,25.0,28.0,27.0,35.0,27.0,34.0,False -866,25.0,29.0,23.0,39.0,25.0,27.0,31.0,41.0,False -867,30.0,33.0,29.0,25.0,30.0,28.0,34.0,31.0,False -868,24.0,27.0,32.0,27.0,30.0,30.0,34.0,36.0,False -869,25.0,29.0,29.0,33.0,30.0,35.0,30.0,29.0,False -870,26.0,26.0,29.0,27.0,34.0,35.0,29.0,34.0,False -871,34.0,29.0,31.0,28.0,32.0,28.0,30.0,28.0,False -872,34.0,32.0,28.0,19.0,28.0,32.0,35.0,32.0,False -873,32.0,29.0,36.0,24.0,32.0,26.0,29.0,32.0,False -874,31.0,24.0,23.0,30.0,37.0,38.0,27.0,30.0,False -875,27.0,25.0,32.0,32.0,28.0,34.0,35.0,27.0,False -876,25.0,25.0,25.0,34.0,34.0,36.0,27.0,34.0,False -877,38.0,32.0,30.0,32.0,26.0,18.0,39.0,25.0,False -878,28.0,30.0,32.0,30.0,35.0,27.0,31.0,27.0,False -879,30.0,37.0,34.0,26.0,27.0,28.0,30.0,28.0,False -880,30.0,34.0,27.0,36.0,33.0,31.0,25.0,24.0,False -881,33.0,36.0,32.0,32.0,26.0,32.0,24.0,25.0,False -882,33.0,40.0,28.0,27.0,36.0,27.0,25.0,24.0,False -883,36.0,22.0,38.0,31.0,20.0,33.0,26.0,34.0,False -884,33.0,32.0,27.0,35.0,25.0,34.0,24.0,30.0,False -885,31.0,36.0,30.0,27.0,22.0,37.0,29.0,28.0,False -886,29.0,27.0,25.0,40.0,29.0,32.0,27.0,31.0,False -887,26.0,27.0,36.0,26.0,30.0,37.0,29.0,29.0,False -888,27.0,37.0,36.0,29.0,25.0,25.0,27.0,34.0,False -889,23.0,32.0,27.0,37.0,31.0,36.0,28.0,26.0,False -890,24.0,18.0,24.0,32.0,39.0,33.0,30.0,40.0,False -891,31.0,27.0,31.0,28.0,30.0,31.0,30.0,32.0,False -892,26.0,31.0,33.0,26.0,34.0,33.0,30.0,27.0,False -893,31.0,36.0,34.0,23.0,31.0,31.0,25.0,29.0,False -894,38.0,35.0,31.0,23.0,29.0,34.0,24.0,26.0,False -895,35.0,28.0,42.0,18.0,24.0,31.0,27.0,35.0,False -896,41.0,26.0,30.0,29.0,28.0,27.0,29.0,30.0,False -897,33.0,30.0,28.0,31.0,34.0,32.0,34.0,18.0,False -898,36.0,28.0,30.0,27.0,29.0,30.0,29.0,31.0,False -899,35.0,35.0,26.0,27.0,28.0,29.0,29.0,31.0,False -900,32.0,30.0,27.0,31.0,33.0,23.0,29.0,35.0,False -901,32.0,34.0,22.0,28.0,31.0,30.0,26.0,37.0,False -902,30.0,28.0,32.0,27.0,27.0,35.0,33.0,28.0,False -903,33.0,31.0,28.0,31.0,24.0,32.0,30.0,31.0,False -904,25.0,28.0,37.0,28.0,38.0,25.0,36.0,23.0,False -905,34.0,28.0,30.0,27.0,27.0,28.0,34.0,32.0,False -906,25.0,33.0,27.0,29.0,33.0,31.0,34.0,28.0,False -907,42.0,33.0,21.0,31.0,25.0,28.0,32.0,28.0,False -908,27.0,22.0,34.0,34.0,35.0,29.0,30.0,29.0,False -909,29.0,29.0,30.0,32.0,27.0,32.0,35.0,26.0,False -910,26.0,27.0,31.0,30.0,36.0,28.0,33.0,29.0,False -911,26.0,33.0,29.0,31.0,31.0,23.0,31.0,36.0,False -912,35.0,29.0,29.0,26.0,31.0,32.0,31.0,27.0,False -913,30.0,25.0,26.0,25.0,33.0,30.0,31.0,40.0,False -914,28.0,29.0,29.0,29.0,26.0,35.0,35.0,29.0,False -915,29.0,30.0,29.0,18.0,34.0,46.0,23.0,31.0,False -916,33.0,31.0,31.0,27.0,34.0,31.0,23.0,30.0,False -917,29.0,31.0,24.0,36.0,26.0,32.0,31.0,31.0,False -918,17.0,32.0,31.0,33.0,27.0,37.0,30.0,33.0,False -919,28.0,22.0,32.0,35.0,35.0,24.0,29.0,35.0,False -920,26.0,27.0,23.0,30.0,38.0,29.0,32.0,35.0,False -921,32.0,25.0,29.0,34.0,27.0,33.0,30.0,30.0,False -922,35.0,30.0,20.0,29.0,33.0,25.0,33.0,35.0,False -923,24.0,30.0,29.0,21.0,37.0,34.0,31.0,34.0,False -924,32.0,26.0,37.0,30.0,29.0,30.0,24.0,32.0,False -925,28.0,28.0,24.0,37.0,33.0,39.0,27.0,24.0,False -926,22.0,30.0,31.0,32.0,30.0,39.0,31.0,25.0,False -927,28.0,24.0,24.0,29.0,31.0,29.0,37.0,38.0,False -928,30.0,29.0,26.0,27.0,27.0,35.0,34.0,32.0,False -929,32.0,27.0,24.0,33.0,27.0,32.0,39.0,26.0,False -930,30.0,29.0,24.0,28.0,36.0,28.0,33.0,32.0,False -931,30.0,37.0,29.0,30.0,34.0,34.0,26.0,20.0,False -932,22.0,34.0,30.0,31.0,29.0,30.0,34.0,30.0,False -933,31.0,29.0,23.0,29.0,29.0,40.0,30.0,29.0,False -934,26.0,29.0,23.0,35.0,30.0,36.0,33.0,28.0,False -935,32.0,26.0,35.0,26.0,28.0,30.0,28.0,35.0,False -936,31.0,39.0,20.0,25.0,30.0,33.0,40.0,22.0,False -937,23.0,23.0,38.0,32.0,32.0,32.0,29.0,31.0,False -938,34.0,31.0,35.0,32.0,27.0,27.0,30.0,24.0,False -939,30.0,28.0,21.0,39.0,24.0,31.0,33.0,34.0,False -940,30.0,29.0,38.0,27.0,31.0,30.0,27.0,28.0,False -941,28.0,29.0,28.0,26.0,34.0,30.0,35.0,30.0,False -942,39.0,25.0,25.0,30.0,30.0,38.0,33.0,20.0,False -943,34.0,37.0,34.0,26.0,19.0,29.0,28.0,33.0,False -944,27.0,28.0,31.0,32.0,33.0,28.0,26.0,35.0,False -945,34.0,26.0,33.0,26.0,29.0,28.0,26.0,38.0,False -946,30.0,38.0,31.0,26.0,31.0,27.0,24.0,33.0,False -947,26.0,28.0,35.0,26.0,30.0,30.0,33.0,32.0,False -948,29.0,34.0,28.0,26.0,19.0,36.0,34.0,34.0,False -949,28.0,30.0,31.0,35.0,32.0,25.0,34.0,25.0,False -950,34.0,25.0,38.0,25.0,28.0,36.0,28.0,26.0,False -951,28.0,31.0,26.0,32.0,32.0,32.0,27.0,32.0,False -952,22.0,32.0,40.0,27.0,30.0,31.0,28.0,30.0,False -953,29.0,27.0,29.0,36.0,32.0,27.0,23.0,37.0,False -954,29.0,26.0,24.0,25.0,30.0,34.0,38.0,34.0,False -955,32.0,34.0,33.0,33.0,23.0,29.0,25.0,31.0,False -956,30.0,28.0,32.0,37.0,27.0,33.0,32.0,21.0,False -957,34.0,34.0,36.0,19.0,29.0,26.0,31.0,31.0,False -958,27.0,32.0,31.0,32.0,34.0,26.0,28.0,30.0,False -959,29.0,33.0,28.0,35.0,24.0,32.0,28.0,31.0,False -960,33.0,32.0,37.0,28.0,26.0,27.0,32.0,25.0,False -961,27.0,31.0,28.0,26.0,38.0,28.0,28.0,34.0,False -962,25.0,36.0,38.0,28.0,20.0,21.0,40.0,32.0,False -963,32.0,22.0,26.0,32.0,27.0,29.0,37.0,35.0,False -964,30.0,27.0,26.0,33.0,26.0,36.0,27.0,35.0,False -965,24.0,32.0,34.0,29.0,27.0,34.0,23.0,37.0,False -966,29.0,32.0,27.0,30.0,34.0,27.0,32.0,29.0,False -967,29.0,25.0,31.0,30.0,30.0,34.0,31.0,30.0,False -968,31.0,31.0,33.0,32.0,27.0,31.0,27.0,28.0,False -969,30.0,33.0,25.0,27.0,37.0,34.0,27.0,27.0,False -970,31.0,33.0,31.0,31.0,25.0,31.0,27.0,31.0,False -971,26.0,28.0,34.0,29.0,26.0,39.0,28.0,30.0,False -972,34.0,25.0,30.0,26.0,23.0,36.0,39.0,27.0,False -973,29.0,34.0,24.0,26.0,37.0,27.0,34.0,29.0,False -974,27.0,36.0,32.0,29.0,36.0,26.0,30.0,24.0,False -975,25.0,32.0,31.0,30.0,33.0,31.0,26.0,32.0,False -976,24.0,29.0,30.0,37.0,32.0,28.0,30.0,30.0,False -977,30.0,38.0,33.0,29.0,28.0,28.0,29.0,25.0,False -978,35.0,32.0,27.0,38.0,32.0,26.0,30.0,20.0,False -979,32.0,29.0,36.0,31.0,22.0,29.0,25.0,36.0,False -980,32.0,33.0,32.0,34.0,28.0,31.0,22.0,28.0,False -981,33.0,34.0,24.0,23.0,35.0,34.0,27.0,30.0,False -982,29.0,29.0,31.0,32.0,28.0,24.0,32.0,35.0,False -983,29.0,28.0,39.0,34.0,31.0,27.0,24.0,28.0,False -984,24.0,36.0,30.0,24.0,30.0,34.0,34.0,28.0,False -985,27.0,29.0,30.0,31.0,37.0,34.0,23.0,29.0,False -986,34.0,24.0,29.0,38.0,28.0,19.0,32.0,36.0,False -987,28.0,31.0,27.0,36.0,25.0,26.0,37.0,30.0,False -988,35.0,28.0,18.0,34.0,32.0,30.0,31.0,32.0,False -989,23.0,32.0,28.0,29.0,30.0,26.0,40.0,32.0,False -990,41.0,33.0,37.0,24.0,28.0,26.0,27.0,24.0,False -991,35.0,33.0,26.0,32.0,27.0,31.0,28.0,28.0,False -992,25.0,37.0,25.0,35.0,31.0,25.0,29.0,33.0,False -993,36.0,26.0,35.0,32.0,30.0,27.0,31.0,23.0,False -994,27.0,24.0,32.0,32.0,28.0,31.0,39.0,27.0,False -995,28.0,43.0,22.0,26.0,28.0,39.0,29.0,25.0,False -996,25.0,31.0,33.0,30.0,28.0,30.0,33.0,30.0,False -997,32.0,32.0,25.0,36.0,33.0,28.0,28.0,26.0,False -998,23.0,36.0,27.0,22.0,32.0,29.0,38.0,33.0,False -999,31.0,34.0,27.0,32.0,29.0,36.0,32.0,19.0,False +0,30.0,34.0,28.0,24.0,30.0,29.0,36.0,29.0,False +1,25.0,37.0,32.0,30.0,28.0,27.0,31.0,30.0,False +2,37.0,32.0,28.0,29.0,30.0,30.0,34.0,20.0,False +3,32.0,31.0,25.0,32.0,31.0,29.0,26.0,34.0,False +4,35.0,26.0,24.0,32.0,33.0,23.0,30.0,37.0,False +5,28.0,25.0,37.0,30.0,32.0,30.0,27.0,31.0,False +6,29.0,31.0,25.0,23.0,27.0,28.0,37.0,40.0,False +7,36.0,32.0,28.0,29.0,33.0,23.0,29.0,30.0,False +8,24.0,35.0,23.0,27.0,36.0,31.0,30.0,34.0,False +9,27.0,34.0,30.0,29.0,26.0,34.0,31.0,29.0,False +10,25.0,33.0,34.0,29.0,29.0,29.0,28.0,33.0,False +11,34.0,22.0,27.0,33.0,23.0,36.0,35.0,30.0,False +12,29.0,33.0,25.0,31.0,36.0,26.0,36.0,24.0,False +13,31.0,30.0,27.0,25.0,38.0,30.0,31.0,28.0,False +14,33.0,33.0,24.0,34.0,30.0,30.0,26.0,30.0,False +15,30.0,34.0,31.0,28.0,27.0,35.0,25.0,30.0,False +16,29.0,30.0,25.0,26.0,25.0,38.0,36.0,31.0,False +17,27.0,35.0,32.0,39.0,25.0,18.0,30.0,34.0,False +18,30.0,35.0,20.0,32.0,28.0,31.0,30.0,34.0,False +19,22.0,31.0,28.0,33.0,31.0,34.0,32.0,29.0,False +20,26.0,32.0,28.0,30.0,35.0,36.0,22.0,31.0,False +21,29.0,26.0,31.0,31.0,34.0,33.0,30.0,26.0,False +22,40.0,25.0,25.0,34.0,32.0,25.0,33.0,26.0,False +23,22.0,25.0,37.0,27.0,34.0,34.0,32.0,29.0,False +24,34.0,27.0,28.0,24.0,34.0,33.0,31.0,29.0,False +25,43.0,27.0,31.0,28.0,21.0,28.0,33.0,29.0,False +26,33.0,31.0,31.0,31.0,28.0,24.0,32.0,30.0,False +27,29.0,24.0,27.0,32.0,30.0,32.0,31.0,35.0,False +28,26.0,28.0,32.0,37.0,32.0,34.0,25.0,26.0,False +29,33.0,24.0,29.0,26.0,32.0,36.0,30.0,30.0,False +30,27.0,31.0,36.0,28.0,33.0,28.0,36.0,21.0,False +31,32.0,31.0,21.0,35.0,25.0,30.0,29.0,37.0,False +32,36.0,33.0,32.0,32.0,28.0,27.0,27.0,25.0,False +33,33.0,27.0,26.0,32.0,28.0,33.0,28.0,33.0,False +34,34.0,27.0,35.0,41.0,24.0,31.0,22.0,26.0,False +35,31.0,30.0,36.0,24.0,26.0,38.0,29.0,26.0,False +36,33.0,24.0,32.0,32.0,31.0,35.0,24.0,29.0,False +37,26.0,33.0,31.0,38.0,28.0,29.0,31.0,24.0,False +38,34.0,33.0,29.0,31.0,27.0,23.0,31.0,32.0,False +39,30.0,32.0,27.0,21.0,39.0,28.0,33.0,30.0,False +40,25.0,27.0,22.0,29.0,31.0,34.0,39.0,33.0,False +41,30.0,35.0,23.0,35.0,28.0,30.0,30.0,29.0,False +42,27.0,29.0,27.0,36.0,26.0,35.0,31.0,29.0,False +43,29.0,22.0,41.0,32.0,33.0,29.0,33.0,21.0,False +44,29.0,29.0,33.0,37.0,25.0,30.0,29.0,28.0,False +45,31.0,36.0,34.0,30.0,26.0,28.0,24.0,31.0,False +46,33.0,27.0,25.0,23.0,33.0,34.0,30.0,35.0,False +47,27.0,21.0,36.0,30.0,37.0,32.0,24.0,33.0,False +48,29.0,28.0,27.0,31.0,33.0,33.0,29.0,30.0,False +49,29.0,31.0,29.0,26.0,28.0,38.0,22.0,37.0,False +50,37.0,26.0,30.0,29.0,28.0,27.0,31.0,32.0,False +51,35.0,29.0,29.0,29.0,29.0,33.0,28.0,28.0,False +52,32.0,28.0,29.0,30.0,21.0,38.0,33.0,29.0,False +53,30.0,35.0,34.0,24.0,25.0,33.0,34.0,25.0,False +54,29.0,26.0,33.0,35.0,24.0,29.0,40.0,24.0,False +55,28.0,29.0,35.0,32.0,28.0,30.0,33.0,25.0,False +56,30.0,24.0,26.0,35.0,33.0,28.0,28.0,36.0,False +57,24.0,29.0,28.0,26.0,37.0,31.0,36.0,29.0,False +58,33.0,26.0,35.0,30.0,26.0,28.0,28.0,34.0,False +59,28.0,28.0,31.0,34.0,32.0,35.0,23.0,29.0,False +60,31.0,29.0,37.0,27.0,27.0,30.0,30.0,29.0,False +61,27.0,32.0,37.0,28.0,31.0,35.0,26.0,24.0,False +62,29.0,27.0,26.0,30.0,29.0,30.0,35.0,34.0,False +63,36.0,29.0,28.0,30.0,26.0,29.0,35.0,27.0,False +64,30.0,29.0,33.0,23.0,31.0,29.0,36.0,29.0,False +65,35.0,29.0,27.0,32.0,37.0,26.0,33.0,21.0,False +66,34.0,33.0,32.0,22.0,32.0,31.0,26.0,30.0,False +67,20.0,30.0,32.0,44.0,31.0,26.0,28.0,29.0,False +68,30.0,32.0,36.0,26.0,30.0,35.0,24.0,27.0,False +69,28.0,34.0,34.0,27.0,30.0,25.0,29.0,33.0,False +70,40.0,33.0,29.0,29.0,27.0,27.0,29.0,26.0,False +71,22.0,28.0,34.0,26.0,27.0,33.0,34.0,36.0,False +72,29.0,27.0,28.0,28.0,31.0,35.0,34.0,28.0,False +73,28.0,29.0,28.0,33.0,31.0,33.0,27.0,31.0,False +74,29.0,28.0,32.0,31.0,33.0,26.0,23.0,38.0,False +75,31.0,23.0,31.0,34.0,33.0,29.0,31.0,28.0,False +76,26.0,33.0,33.0,29.0,26.0,32.0,31.0,30.0,False +77,29.0,24.0,31.0,35.0,34.0,31.0,29.0,27.0,False +78,31.0,25.0,33.0,22.0,29.0,29.0,38.0,33.0,False +79,37.0,28.0,25.0,28.0,29.0,27.0,33.0,33.0,False +80,37.0,33.0,35.0,27.0,21.0,25.0,30.0,32.0,False +81,30.0,34.0,28.0,36.0,24.0,27.0,31.0,30.0,False +82,26.0,30.0,22.0,27.0,38.0,29.0,37.0,31.0,False +83,31.0,31.0,33.0,27.0,32.0,25.0,30.0,31.0,False +84,30.0,29.0,27.0,28.0,34.0,32.0,24.0,36.0,False +85,32.0,36.0,26.0,34.0,31.0,29.0,32.0,20.0,False +86,34.0,27.0,23.0,32.0,32.0,30.0,24.0,38.0,False +87,33.0,25.0,35.0,29.0,36.0,25.0,28.0,29.0,False +88,26.0,28.0,30.0,32.0,29.0,31.0,33.0,31.0,False +89,29.0,32.0,29.0,32.0,31.0,33.0,34.0,20.0,False +90,28.0,33.0,33.0,29.0,40.0,25.0,23.0,29.0,False +91,25.0,33.0,30.0,32.0,32.0,25.0,31.0,32.0,False +92,23.0,37.0,29.0,32.0,30.0,33.0,24.0,32.0,False +93,29.0,31.0,28.0,32.0,27.0,35.0,29.0,29.0,False +94,26.0,30.0,34.0,29.0,32.0,21.0,29.0,39.0,False +95,19.0,29.0,27.0,29.0,45.0,29.0,31.0,31.0,False +96,32.0,24.0,28.0,35.0,22.0,40.0,27.0,32.0,False +97,29.0,38.0,31.0,26.0,28.0,26.0,28.0,34.0,False +98,35.0,30.0,29.0,37.0,24.0,26.0,28.0,31.0,False +99,35.0,31.0,30.0,33.0,33.0,25.0,24.0,29.0,False +100,28.0,34.0,25.0,26.0,25.0,35.0,36.0,31.0,False +101,26.0,35.0,30.0,29.0,32.0,31.0,31.0,26.0,False +102,25.0,28.0,23.0,34.0,28.0,38.0,35.0,29.0,False +103,38.0,24.0,23.0,35.0,33.0,33.0,26.0,28.0,False +104,26.0,39.0,30.0,34.0,24.0,32.0,25.0,30.0,False +105,28.0,24.0,32.0,26.0,32.0,37.0,29.0,32.0,False +106,30.0,29.0,26.0,33.0,32.0,28.0,29.0,33.0,False +107,30.0,26.0,34.0,33.0,24.0,28.0,32.0,33.0,False +108,35.0,31.0,29.0,26.0,32.0,26.0,36.0,25.0,False +109,24.0,38.0,31.0,26.0,33.0,25.0,31.0,32.0,False +110,32.0,24.0,37.0,28.0,31.0,32.0,33.0,23.0,False +111,31.0,33.0,31.0,31.0,28.0,31.0,24.0,31.0,False +112,25.0,41.0,35.0,28.0,32.0,20.0,28.0,31.0,False +113,40.0,25.0,33.0,32.0,32.0,27.0,29.0,22.0,False +114,30.0,31.0,26.0,33.0,33.0,28.0,24.0,35.0,False +115,31.0,31.0,30.0,27.0,32.0,36.0,28.0,25.0,False +116,28.0,30.0,32.0,32.0,29.0,28.0,31.0,30.0,False +117,30.0,24.0,24.0,29.0,31.0,33.0,34.0,35.0,False +118,33.0,22.0,25.0,36.0,37.0,25.0,30.0,32.0,False +119,31.0,29.0,32.0,30.0,34.0,18.0,35.0,31.0,False +120,31.0,34.0,25.0,29.0,30.0,27.0,31.0,33.0,False +121,29.0,36.0,25.0,26.0,28.0,36.0,28.0,32.0,False +122,34.0,22.0,27.0,29.0,31.0,28.0,34.0,35.0,False +123,29.0,32.0,37.0,34.0,31.0,20.0,30.0,27.0,False +124,30.0,33.0,26.0,29.0,28.0,28.0,36.0,30.0,False +125,33.0,31.0,31.0,31.0,22.0,37.0,27.0,28.0,False +126,33.0,24.0,31.0,31.0,26.0,34.0,31.0,30.0,False +127,25.0,27.0,29.0,27.0,31.0,35.0,32.0,34.0,False +128,22.0,32.0,28.0,30.0,33.0,28.0,29.0,38.0,False +129,33.0,31.0,25.0,25.0,37.0,30.0,35.0,24.0,False +130,30.0,33.0,32.0,29.0,30.0,28.0,27.0,31.0,False +131,24.0,29.0,27.0,30.0,21.0,32.0,33.0,44.0,False +132,26.0,26.0,33.0,33.0,33.0,33.0,27.0,29.0,False +133,30.0,31.0,29.0,33.0,22.0,26.0,36.0,33.0,False +134,28.0,30.0,22.0,29.0,32.0,39.0,30.0,30.0,False +135,33.0,27.0,30.0,29.0,29.0,31.0,31.0,30.0,False +136,35.0,23.0,31.0,34.0,29.0,26.0,35.0,27.0,False +137,28.0,21.0,32.0,32.0,36.0,32.0,29.0,30.0,False +138,32.0,24.0,28.0,37.0,26.0,25.0,30.0,38.0,False +139,35.0,39.0,32.0,27.0,26.0,27.0,30.0,24.0,False +140,26.0,28.0,35.0,23.0,35.0,36.0,26.0,31.0,False +141,30.0,27.0,27.0,34.0,24.0,29.0,32.0,37.0,False +142,29.0,27.0,28.0,30.0,33.0,31.0,27.0,35.0,False +143,40.0,32.0,30.0,27.0,29.0,26.0,26.0,30.0,False +144,30.0,30.0,36.0,30.0,32.0,24.0,30.0,28.0,False +145,21.0,37.0,27.0,31.0,29.0,33.0,25.0,37.0,False +146,28.0,35.0,24.0,30.0,34.0,35.0,29.0,25.0,False +147,28.0,30.0,29.0,30.0,21.0,32.0,38.0,32.0,False +148,24.0,33.0,29.0,31.0,32.0,30.0,29.0,32.0,False +149,30.0,27.0,29.0,31.0,27.0,28.0,31.0,37.0,False +150,32.0,36.0,30.0,32.0,31.0,26.0,27.0,26.0,False +151,34.0,32.0,26.0,27.0,33.0,28.0,35.0,25.0,False +152,31.0,36.0,23.0,28.0,33.0,29.0,27.0,33.0,False +153,25.0,37.0,31.0,36.0,24.0,29.0,31.0,27.0,False +154,32.0,33.0,31.0,25.0,26.0,33.0,29.0,31.0,False +155,22.0,25.0,29.0,32.0,37.0,34.0,34.0,27.0,False +156,32.0,35.0,29.0,25.0,31.0,31.0,29.0,28.0,False +157,35.0,26.0,34.0,28.0,32.0,24.0,33.0,28.0,False +158,38.0,27.0,38.0,26.0,24.0,28.0,29.0,30.0,False +159,21.0,30.0,29.0,33.0,28.0,31.0,33.0,35.0,False +160,33.0,29.0,24.0,29.0,33.0,33.0,30.0,29.0,False +161,33.0,28.0,33.0,25.0,30.0,37.0,24.0,30.0,False +162,25.0,25.0,38.0,32.0,28.0,29.0,27.0,36.0,False +163,29.0,26.0,34.0,27.0,31.0,28.0,37.0,28.0,False +164,30.0,33.0,27.0,38.0,34.0,27.0,25.0,26.0,False +165,22.0,29.0,31.0,36.0,32.0,26.0,27.0,37.0,False +166,29.0,31.0,31.0,39.0,31.0,28.0,24.0,27.0,False +167,29.0,30.0,32.0,26.0,35.0,28.0,34.0,26.0,False +168,36.0,23.0,29.0,26.0,36.0,31.0,27.0,32.0,False +169,28.0,28.0,35.0,25.0,31.0,30.0,31.0,32.0,False +170,27.0,32.0,26.0,35.0,33.0,36.0,29.0,22.0,False +171,30.0,38.0,30.0,26.0,28.0,28.0,27.0,33.0,False +172,27.0,25.0,25.0,28.0,27.0,33.0,37.0,38.0,False +173,33.0,24.0,38.0,27.0,28.0,34.0,24.0,32.0,False +174,27.0,30.0,23.0,25.0,38.0,30.0,39.0,28.0,False +175,28.0,27.0,40.0,32.0,31.0,31.0,26.0,25.0,False +176,27.0,32.0,24.0,29.0,31.0,37.0,27.0,33.0,False +177,35.0,29.0,24.0,27.0,26.0,34.0,40.0,25.0,False +178,31.0,33.0,33.0,21.0,22.0,28.0,33.0,39.0,False +179,28.0,28.0,27.0,32.0,35.0,35.0,27.0,28.0,False +180,31.0,29.0,31.0,28.0,41.0,30.0,26.0,24.0,False +181,30.0,29.0,34.0,35.0,32.0,25.0,22.0,33.0,False +182,23.0,25.0,36.0,35.0,33.0,29.0,34.0,25.0,False +183,27.0,33.0,35.0,31.0,37.0,22.0,27.0,28.0,False +184,36.0,26.0,31.0,27.0,29.0,28.0,32.0,31.0,False +185,31.0,33.0,31.0,34.0,33.0,27.0,28.0,23.0,False +186,33.0,38.0,26.0,23.0,29.0,29.0,30.0,32.0,False +187,32.0,29.0,33.0,31.0,31.0,27.0,28.0,29.0,False +188,26.0,32.0,23.0,29.0,30.0,34.0,33.0,33.0,False +189,30.0,27.0,31.0,25.0,28.0,31.0,37.0,31.0,False +190,32.0,36.0,31.0,28.0,25.0,22.0,34.0,32.0,False +191,35.0,25.0,33.0,28.0,31.0,28.0,33.0,27.0,False +192,36.0,32.0,27.0,26.0,35.0,24.0,30.0,30.0,False +193,34.0,24.0,36.0,27.0,30.0,30.0,26.0,33.0,False +194,22.0,28.0,31.0,34.0,29.0,34.0,31.0,31.0,False +195,34.0,31.0,30.0,27.0,25.0,36.0,28.0,29.0,False +196,23.0,25.0,36.0,33.0,32.0,27.0,38.0,26.0,False +197,29.0,24.0,29.0,22.0,35.0,34.0,32.0,35.0,False +198,36.0,27.0,31.0,27.0,26.0,36.0,26.0,31.0,False +199,31.0,29.0,27.0,30.0,25.0,31.0,33.0,34.0,False +200,29.0,32.0,28.0,28.0,31.0,30.0,29.0,33.0,False +201,29.0,28.0,29.0,33.0,28.0,27.0,31.0,35.0,False +202,30.0,29.0,25.0,31.0,34.0,35.0,31.0,25.0,False +203,26.0,29.0,30.0,41.0,24.0,26.0,36.0,28.0,False +204,29.0,31.0,19.0,33.0,41.0,25.0,34.0,28.0,False +205,34.0,33.0,28.0,25.0,29.0,33.0,30.0,28.0,False +206,30.0,34.0,31.0,34.0,26.0,29.0,22.0,34.0,False +207,29.0,33.0,32.0,32.0,29.0,25.0,29.0,31.0,False +208,31.0,25.0,32.0,32.0,29.0,35.0,24.0,32.0,False +209,22.0,27.0,27.0,30.0,30.0,28.0,37.0,39.0,False +210,26.0,34.0,28.0,37.0,27.0,36.0,19.0,33.0,False +211,27.0,30.0,32.0,30.0,31.0,26.0,41.0,23.0,False +212,29.0,35.0,27.0,30.0,28.0,29.0,31.0,31.0,False +213,31.0,30.0,27.0,31.0,30.0,33.0,24.0,34.0,False +214,34.0,34.0,33.0,29.0,34.0,32.0,24.0,20.0,False +215,31.0,26.0,32.0,28.0,27.0,35.0,31.0,30.0,False +216,27.0,23.0,30.0,40.0,28.0,27.0,30.0,35.0,False +217,35.0,20.0,31.0,32.0,28.0,28.0,31.0,35.0,False +218,28.0,29.0,20.0,30.0,33.0,35.0,39.0,26.0,False +219,27.0,27.0,36.0,29.0,28.0,29.0,41.0,23.0,False +220,27.0,29.0,29.0,25.0,26.0,31.0,42.0,31.0,False +221,33.0,27.0,22.0,26.0,36.0,31.0,36.0,29.0,False +222,31.0,34.0,31.0,28.0,32.0,29.0,32.0,23.0,False +223,27.0,28.0,29.0,37.0,29.0,29.0,33.0,28.0,False +224,27.0,20.0,30.0,32.0,30.0,36.0,25.0,40.0,False +225,24.0,36.0,27.0,36.0,31.0,33.0,29.0,24.0,False +226,28.0,30.0,32.0,33.0,28.0,27.0,31.0,31.0,False +227,27.0,32.0,26.0,25.0,32.0,36.0,33.0,29.0,False +228,34.0,33.0,31.0,24.0,35.0,30.0,26.0,27.0,False +229,30.0,36.0,32.0,29.0,34.0,30.0,23.0,26.0,False +230,30.0,29.0,29.0,27.0,32.0,38.0,27.0,28.0,False +231,35.0,27.0,30.0,20.0,27.0,34.0,35.0,32.0,False +232,35.0,25.0,39.0,33.0,28.0,28.0,28.0,24.0,False +233,24.0,27.0,36.0,34.0,32.0,29.0,23.0,35.0,False +234,34.0,28.0,25.0,18.0,38.0,36.0,31.0,30.0,False +235,32.0,21.0,37.0,24.0,25.0,32.0,36.0,33.0,False +236,33.0,30.0,36.0,24.0,29.0,26.0,30.0,32.0,False +237,31.0,31.0,32.0,29.0,34.0,26.0,25.0,32.0,False +238,25.0,25.0,35.0,22.0,27.0,35.0,32.0,39.0,False +239,29.0,31.0,25.0,27.0,28.0,37.0,35.0,28.0,False +240,26.0,24.0,38.0,26.0,28.0,30.0,33.0,35.0,False +241,32.0,32.0,40.0,25.0,29.0,31.0,27.0,24.0,False +242,36.0,31.0,33.0,24.0,28.0,29.0,30.0,29.0,False +243,33.0,29.0,34.0,31.0,27.0,28.0,31.0,27.0,False +244,26.0,29.0,31.0,26.0,34.0,33.0,32.0,29.0,False +245,23.0,30.0,30.0,33.0,29.0,29.0,30.0,36.0,False +246,29.0,29.0,30.0,32.0,34.0,32.0,21.0,33.0,False +247,31.0,30.0,28.0,37.0,26.0,34.0,24.0,30.0,False +248,32.0,28.0,32.0,28.0,33.0,27.0,28.0,32.0,False +249,34.0,28.0,30.0,31.0,35.0,29.0,27.0,26.0,False +250,33.0,24.0,25.0,30.0,33.0,26.0,39.0,30.0,False +251,35.0,28.0,36.0,28.0,26.0,29.0,34.0,24.0,False +252,26.0,28.0,32.0,36.0,31.0,26.0,32.0,29.0,False +253,30.0,32.0,29.0,33.0,30.0,26.0,24.0,36.0,False +254,30.0,37.0,29.0,29.0,29.0,26.0,29.0,31.0,False +255,27.0,31.0,40.0,25.0,25.0,26.0,30.0,36.0,False +256,23.0,29.0,34.0,37.0,31.0,30.0,29.0,27.0,False +257,33.0,31.0,34.0,27.0,28.0,36.0,27.0,24.0,False +258,33.0,35.0,41.0,22.0,30.0,24.0,33.0,22.0,False +259,27.0,29.0,30.0,32.0,29.0,33.0,23.0,37.0,False +260,35.0,29.0,29.0,35.0,32.0,30.0,23.0,27.0,False +261,33.0,23.0,32.0,35.0,28.0,31.0,30.0,28.0,False +262,30.0,29.0,28.0,32.0,29.0,33.0,28.0,31.0,False +263,24.0,27.0,35.0,37.0,31.0,23.0,28.0,35.0,False +264,26.0,28.0,31.0,40.0,30.0,26.0,29.0,30.0,False +265,33.0,32.0,29.0,33.0,29.0,25.0,33.0,26.0,False +266,32.0,26.0,31.0,24.0,31.0,35.0,35.0,26.0,False +267,33.0,32.0,30.0,32.0,27.0,29.0,27.0,30.0,False +268,31.0,34.0,33.0,32.0,28.0,23.0,28.0,31.0,False +269,30.0,27.0,28.0,22.0,25.0,38.0,42.0,28.0,False +270,25.0,40.0,22.0,32.0,26.0,38.0,28.0,29.0,False +271,27.0,26.0,28.0,28.0,35.0,37.0,28.0,31.0,False +272,33.0,26.0,40.0,30.0,25.0,27.0,32.0,27.0,False +273,31.0,32.0,31.0,31.0,28.0,33.0,29.0,25.0,False +274,27.0,28.0,28.0,31.0,31.0,35.0,28.0,32.0,False +275,30.0,21.0,36.0,31.0,29.0,34.0,23.0,36.0,False +276,30.0,31.0,32.0,32.0,29.0,38.0,22.0,26.0,False +277,28.0,30.0,36.0,34.0,29.0,30.0,27.0,26.0,False +278,26.0,37.0,24.0,32.0,31.0,25.0,41.0,24.0,False +279,29.0,27.0,25.0,29.0,29.0,30.0,39.0,32.0,False +280,35.0,34.0,25.0,39.0,32.0,24.0,28.0,23.0,False +281,28.0,27.0,27.0,34.0,33.0,32.0,29.0,30.0,False +282,31.0,24.0,32.0,28.0,32.0,34.0,36.0,23.0,False +283,27.0,34.0,35.0,29.0,29.0,28.0,31.0,27.0,False +284,32.0,27.0,30.0,32.0,36.0,25.0,30.0,28.0,False +285,27.0,28.0,30.0,37.0,31.0,28.0,29.0,30.0,False +286,31.0,32.0,35.0,30.0,33.0,27.0,27.0,25.0,False +287,34.0,29.0,31.0,32.0,27.0,30.0,33.0,24.0,False +288,29.0,33.0,29.0,26.0,33.0,24.0,35.0,31.0,False +289,29.0,30.0,27.0,29.0,25.0,33.0,30.0,37.0,False +290,34.0,28.0,26.0,38.0,28.0,28.0,27.0,31.0,False +291,32.0,32.0,31.0,32.0,26.0,31.0,27.0,29.0,False +292,27.0,33.0,35.0,28.0,31.0,22.0,27.0,37.0,False +293,37.0,22.0,32.0,31.0,28.0,33.0,24.0,33.0,False +294,29.0,31.0,25.0,29.0,27.0,33.0,28.0,38.0,False +295,35.0,26.0,26.0,32.0,26.0,26.0,33.0,36.0,False +296,24.0,31.0,24.0,31.0,38.0,31.0,26.0,35.0,False +297,32.0,36.0,27.0,31.0,34.0,25.0,31.0,24.0,False +298,27.0,31.0,34.0,27.0,28.0,34.0,26.0,33.0,False +299,25.0,28.0,24.0,33.0,39.0,30.0,25.0,36.0,False +300,27.0,25.0,36.0,30.0,32.0,33.0,27.0,30.0,False +301,32.0,28.0,37.0,28.0,28.0,32.0,27.0,28.0,False +302,32.0,35.0,30.0,31.0,23.0,31.0,28.0,30.0,False +303,24.0,32.0,33.0,27.0,30.0,37.0,30.0,27.0,False +304,23.0,34.0,31.0,29.0,35.0,27.0,33.0,28.0,False +305,29.0,23.0,31.0,26.0,34.0,32.0,32.0,33.0,False +306,31.0,32.0,39.0,20.0,24.0,27.0,31.0,36.0,False +307,33.0,30.0,30.0,29.0,38.0,26.0,25.0,29.0,False +308,28.0,24.0,34.0,29.0,32.0,35.0,25.0,33.0,False +309,34.0,34.0,34.0,26.0,23.0,30.0,27.0,32.0,False +310,31.0,27.0,28.0,30.0,34.0,30.0,28.0,32.0,False +311,32.0,33.0,27.0,27.0,23.0,32.0,38.0,28.0,False +312,24.0,31.0,30.0,35.0,30.0,32.0,24.0,34.0,False +313,33.0,27.0,31.0,25.0,28.0,31.0,28.0,37.0,False +314,24.0,32.0,32.0,27.0,24.0,26.0,36.0,39.0,False +315,33.0,26.0,27.0,25.0,34.0,34.0,31.0,30.0,False +316,23.0,29.0,30.0,31.0,30.0,28.0,37.0,32.0,False +317,30.0,21.0,28.0,33.0,29.0,29.0,42.0,28.0,False +318,29.0,31.0,32.0,37.0,33.0,29.0,24.0,25.0,False +319,26.0,24.0,40.0,28.0,32.0,26.0,35.0,29.0,False +320,34.0,27.0,25.0,29.0,30.0,31.0,29.0,35.0,False +321,35.0,34.0,25.0,33.0,24.0,24.0,35.0,30.0,False +322,31.0,30.0,30.0,27.0,43.0,22.0,28.0,29.0,False +323,27.0,35.0,32.0,36.0,24.0,34.0,27.0,25.0,False +324,32.0,36.0,27.0,27.0,31.0,32.0,25.0,30.0,False +325,31.0,30.0,27.0,27.0,33.0,33.0,30.0,29.0,False +326,33.0,31.0,21.0,29.0,33.0,30.0,36.0,27.0,False +327,29.0,27.0,27.0,34.0,34.0,27.0,34.0,28.0,False +328,22.0,28.0,30.0,41.0,23.0,30.0,30.0,36.0,False +329,36.0,29.0,32.0,27.0,32.0,27.0,27.0,30.0,False +330,25.0,31.0,31.0,31.0,28.0,33.0,26.0,35.0,False +331,24.0,32.0,30.0,32.0,31.0,28.0,34.0,29.0,False +332,27.0,22.0,28.0,35.0,37.0,30.0,32.0,29.0,False +333,28.0,23.0,35.0,23.0,31.0,33.0,38.0,29.0,False +334,32.0,31.0,25.0,30.0,33.0,31.0,33.0,25.0,False +335,20.0,31.0,38.0,29.0,24.0,29.0,34.0,35.0,False +336,40.0,31.0,31.0,33.0,35.0,23.0,20.0,27.0,False +337,31.0,25.0,31.0,30.0,32.0,31.0,32.0,28.0,False +338,27.0,28.0,28.0,32.0,27.0,30.0,33.0,35.0,False +339,30.0,28.0,28.0,32.0,28.0,32.0,33.0,29.0,False +340,38.0,24.0,27.0,34.0,29.0,30.0,27.0,31.0,False +341,33.0,28.0,31.0,32.0,36.0,22.0,29.0,29.0,False +342,21.0,30.0,27.0,34.0,35.0,26.0,32.0,35.0,False +343,30.0,30.0,39.0,36.0,26.0,29.0,25.0,25.0,False +344,23.0,25.0,36.0,24.0,29.0,30.0,34.0,39.0,False +345,30.0,26.0,32.0,38.0,28.0,28.0,28.0,30.0,False +346,31.0,31.0,30.0,28.0,36.0,27.0,30.0,27.0,False +347,32.0,32.0,35.0,23.0,26.0,31.0,27.0,34.0,False +348,27.0,38.0,27.0,35.0,37.0,20.0,32.0,24.0,False +349,28.0,23.0,31.0,34.0,31.0,29.0,30.0,34.0,False +350,28.0,35.0,27.0,30.0,27.0,26.0,33.0,34.0,False +351,20.0,33.0,30.0,30.0,32.0,30.0,31.0,34.0,False +352,30.0,28.0,29.0,29.0,31.0,34.0,33.0,26.0,False +353,24.0,25.0,37.0,33.0,32.0,27.0,35.0,27.0,False +354,29.0,26.0,32.0,27.0,31.0,31.0,34.0,30.0,False +355,29.0,29.0,24.0,31.0,36.0,31.0,27.0,33.0,False +356,31.0,26.0,25.0,34.0,32.0,32.0,30.0,30.0,False +357,33.0,31.0,35.0,27.0,35.0,28.0,27.0,24.0,False +358,25.0,31.0,26.0,31.0,31.0,30.0,34.0,32.0,False +359,27.0,28.0,30.0,25.0,29.0,35.0,29.0,37.0,False +360,29.0,24.0,26.0,30.0,36.0,37.0,26.0,32.0,False +361,26.0,34.0,24.0,25.0,37.0,27.0,37.0,30.0,False +362,30.0,31.0,28.0,29.0,22.0,35.0,33.0,32.0,False +363,26.0,30.0,34.0,34.0,29.0,25.0,32.0,30.0,False +364,33.0,30.0,29.0,28.0,35.0,21.0,28.0,36.0,False +365,25.0,26.0,36.0,25.0,26.0,29.0,43.0,30.0,False +366,32.0,29.0,36.0,26.0,33.0,23.0,30.0,31.0,False +367,37.0,35.0,29.0,31.0,30.0,24.0,25.0,29.0,False +368,31.0,30.0,27.0,30.0,29.0,40.0,27.0,26.0,False +369,30.0,27.0,33.0,25.0,30.0,29.0,32.0,34.0,False +370,31.0,30.0,27.0,36.0,31.0,23.0,34.0,28.0,False +371,38.0,24.0,30.0,30.0,25.0,36.0,31.0,26.0,False +372,33.0,32.0,24.0,26.0,37.0,25.0,36.0,27.0,False +373,30.0,29.0,31.0,31.0,28.0,31.0,37.0,23.0,False +374,34.0,31.0,33.0,30.0,26.0,30.0,29.0,27.0,False +375,22.0,25.0,31.0,35.0,31.0,39.0,33.0,24.0,False +376,27.0,32.0,30.0,26.0,31.0,30.0,34.0,30.0,False +377,27.0,29.0,25.0,35.0,32.0,31.0,32.0,29.0,False +378,24.0,36.0,29.0,33.0,25.0,34.0,30.0,29.0,False +379,38.0,30.0,30.0,29.0,31.0,25.0,25.0,32.0,False +380,27.0,27.0,26.0,29.0,40.0,32.0,31.0,28.0,False +381,40.0,24.0,25.0,29.0,26.0,29.0,37.0,30.0,False +382,38.0,28.0,34.0,26.0,34.0,30.0,28.0,22.0,False +383,25.0,36.0,26.0,36.0,26.0,28.0,32.0,31.0,False +384,35.0,28.0,31.0,32.0,29.0,26.0,27.0,32.0,False +385,34.0,23.0,31.0,32.0,31.0,26.0,30.0,33.0,False +386,36.0,26.0,28.0,36.0,28.0,28.0,30.0,28.0,False +387,30.0,32.0,26.0,33.0,24.0,30.0,34.0,31.0,False +388,29.0,34.0,31.0,32.0,33.0,26.0,26.0,29.0,False +389,23.0,35.0,29.0,29.0,32.0,22.0,34.0,36.0,False +390,29.0,26.0,31.0,22.0,37.0,36.0,25.0,34.0,False +391,24.0,28.0,28.0,29.0,36.0,36.0,32.0,27.0,False +392,29.0,28.0,37.0,30.0,29.0,35.0,21.0,31.0,False +393,23.0,33.0,33.0,40.0,35.0,20.0,32.0,24.0,False +394,32.0,34.0,32.0,33.0,22.0,30.0,31.0,26.0,False +395,28.0,31.0,20.0,32.0,34.0,30.0,37.0,28.0,False +396,28.0,26.0,34.0,33.0,34.0,31.0,29.0,25.0,False +397,37.0,36.0,28.0,22.0,26.0,31.0,28.0,32.0,False +398,30.0,36.0,28.0,29.0,28.0,27.0,41.0,21.0,False +399,28.0,24.0,26.0,32.0,34.0,34.0,35.0,27.0,False +400,32.0,24.0,34.0,26.0,34.0,28.0,32.0,30.0,False +401,25.0,28.0,32.0,31.0,27.0,26.0,39.0,32.0,False +402,26.0,33.0,30.0,25.0,37.0,20.0,34.0,35.0,False +403,33.0,28.0,27.0,29.0,27.0,37.0,30.0,29.0,False +404,24.0,29.0,26.0,28.0,34.0,33.0,31.0,35.0,False +405,28.0,30.0,23.0,31.0,39.0,28.0,29.0,32.0,False +406,30.0,18.0,41.0,30.0,25.0,33.0,31.0,32.0,False +407,27.0,36.0,23.0,35.0,24.0,36.0,28.0,31.0,False +408,24.0,30.0,25.0,29.0,29.0,37.0,35.0,31.0,False +409,24.0,31.0,36.0,33.0,27.0,29.0,29.0,31.0,False +410,28.0,31.0,27.0,29.0,23.0,35.0,36.0,31.0,False +411,34.0,30.0,30.0,28.0,31.0,27.0,30.0,30.0,False +412,28.0,32.0,35.0,26.0,35.0,29.0,27.0,28.0,False +413,30.0,28.0,34.0,34.0,26.0,27.0,32.0,29.0,False +414,25.0,27.0,31.0,32.0,31.0,31.0,31.0,32.0,False +415,46.0,26.0,22.0,28.0,26.0,32.0,29.0,31.0,False +416,32.0,34.0,29.0,28.0,28.0,34.0,27.0,28.0,False +417,29.0,31.0,29.0,25.0,32.0,30.0,39.0,25.0,False +418,33.0,27.0,26.0,36.0,30.0,29.0,33.0,26.0,False +419,30.0,29.0,29.0,24.0,28.0,34.0,27.0,39.0,False +420,24.0,34.0,40.0,33.0,26.0,26.0,32.0,25.0,False +421,29.0,33.0,29.0,34.0,27.0,32.0,29.0,27.0,False +422,38.0,29.0,22.0,37.0,25.0,31.0,29.0,29.0,False +423,27.0,21.0,28.0,29.0,41.0,36.0,29.0,29.0,False +424,34.0,30.0,27.0,32.0,31.0,29.0,28.0,29.0,False +425,33.0,25.0,37.0,28.0,28.0,28.0,32.0,29.0,False +426,34.0,34.0,28.0,35.0,36.0,21.0,28.0,24.0,False +427,30.0,26.0,30.0,33.0,31.0,29.0,31.0,30.0,False +428,36.0,29.0,30.0,33.0,25.0,28.0,28.0,31.0,False +429,31.0,38.0,27.0,24.0,30.0,29.0,32.0,29.0,False +430,38.0,27.0,27.0,22.0,36.0,29.0,33.0,28.0,False +431,23.0,35.0,29.0,36.0,26.0,34.0,26.0,31.0,False +432,32.0,29.0,25.0,33.0,25.0,28.0,34.0,34.0,False +433,34.0,41.0,37.0,24.0,23.0,23.0,26.0,32.0,False +434,25.0,30.0,31.0,26.0,32.0,34.0,25.0,37.0,False +435,26.0,30.0,23.0,35.0,31.0,34.0,32.0,29.0,False +436,25.0,32.0,27.0,39.0,30.0,23.0,29.0,35.0,False +437,32.0,29.0,29.0,23.0,28.0,29.0,44.0,26.0,False +438,32.0,30.0,30.0,28.0,31.0,27.0,33.0,29.0,False +439,25.0,31.0,32.0,34.0,32.0,35.0,32.0,19.0,False +440,24.0,29.0,34.0,29.0,29.0,28.0,37.0,30.0,False +441,22.0,38.0,34.0,29.0,29.0,26.0,35.0,27.0,False +442,29.0,37.0,24.0,33.0,25.0,28.0,39.0,25.0,False +443,32.0,28.0,31.0,31.0,21.0,30.0,33.0,34.0,False +444,29.0,30.0,25.0,31.0,30.0,23.0,34.0,38.0,False +445,30.0,33.0,34.0,28.0,35.0,25.0,30.0,25.0,False +446,29.0,35.0,26.0,23.0,27.0,34.0,36.0,30.0,False +447,32.0,29.0,36.0,31.0,26.0,26.0,32.0,28.0,False +448,34.0,30.0,30.0,28.0,28.0,25.0,32.0,33.0,False +449,28.0,28.0,28.0,26.0,30.0,32.0,33.0,35.0,False +450,34.0,29.0,27.0,35.0,28.0,24.0,36.0,27.0,False +451,32.0,30.0,27.0,28.0,27.0,25.0,38.0,33.0,False +452,33.0,23.0,29.0,36.0,31.0,32.0,32.0,24.0,False +453,37.0,38.0,19.0,29.0,23.0,34.0,34.0,26.0,False +454,26.0,22.0,31.0,36.0,31.0,25.0,34.0,35.0,False +455,29.0,27.0,36.0,33.0,24.0,31.0,30.0,30.0,False +456,28.0,31.0,32.0,30.0,28.0,25.0,33.0,33.0,False +457,27.0,33.0,28.0,32.0,29.0,32.0,36.0,23.0,False +458,28.0,26.0,36.0,29.0,33.0,24.0,29.0,35.0,False +459,33.0,32.0,23.0,31.0,30.0,22.0,32.0,37.0,False +460,38.0,34.0,27.0,32.0,26.0,34.0,31.0,18.0,False +461,34.0,28.0,26.0,33.0,29.0,31.0,27.0,32.0,False +462,32.0,28.0,24.0,32.0,27.0,28.0,39.0,30.0,False +463,27.0,32.0,27.0,26.0,21.0,38.0,35.0,34.0,False +464,28.0,35.0,34.0,30.0,23.0,33.0,28.0,29.0,False +465,32.0,31.0,33.0,29.0,24.0,33.0,27.0,31.0,False +466,36.0,24.0,31.0,31.0,24.0,35.0,31.0,28.0,False +467,27.0,28.0,33.0,18.0,31.0,34.0,34.0,35.0,False +468,26.0,31.0,30.0,34.0,30.0,34.0,31.0,24.0,False +469,33.0,29.0,30.0,40.0,25.0,28.0,22.0,33.0,False +470,33.0,28.0,28.0,32.0,30.0,32.0,32.0,25.0,False +471,25.0,31.0,32.0,28.0,26.0,25.0,35.0,38.0,False +472,28.0,27.0,35.0,34.0,29.0,25.0,34.0,28.0,False +473,36.0,28.0,30.0,25.0,29.0,35.0,25.0,32.0,False +474,27.0,31.0,31.0,22.0,28.0,37.0,32.0,32.0,False +475,20.0,34.0,34.0,27.0,33.0,33.0,33.0,26.0,False +476,26.0,30.0,22.0,34.0,31.0,28.0,40.0,29.0,False +477,28.0,29.0,27.0,33.0,31.0,34.0,25.0,33.0,False +478,28.0,33.0,32.0,26.0,27.0,25.0,37.0,32.0,False +479,22.0,29.0,32.0,32.0,36.0,29.0,32.0,28.0,False +480,29.0,26.0,29.0,29.0,35.0,25.0,33.0,34.0,False +481,24.0,32.0,34.0,32.0,30.0,32.0,29.0,27.0,False +482,28.0,28.0,27.0,30.0,32.0,34.0,26.0,35.0,False +483,29.0,28.0,31.0,33.0,28.0,35.0,27.0,29.0,False +484,39.0,27.0,29.0,28.0,24.0,30.0,35.0,28.0,False +485,31.0,26.0,25.0,32.0,26.0,35.0,39.0,26.0,False +486,36.0,34.0,32.0,28.0,25.0,28.0,25.0,32.0,False +487,39.0,30.0,30.0,22.0,32.0,28.0,35.0,24.0,False +488,22.0,30.0,35.0,21.0,31.0,30.0,38.0,33.0,False +489,27.0,26.0,32.0,38.0,30.0,36.0,28.0,23.0,False +490,27.0,29.0,25.0,38.0,37.0,35.0,25.0,24.0,False +491,22.0,31.0,31.0,30.0,36.0,32.0,28.0,30.0,False +492,39.0,31.0,22.0,34.0,30.0,27.0,26.0,31.0,False +493,29.0,31.0,28.0,28.0,30.0,38.0,31.0,25.0,False +494,30.0,35.0,33.0,30.0,24.0,30.0,29.0,29.0,False +495,32.0,30.0,25.0,32.0,30.0,32.0,26.0,33.0,False +496,35.0,33.0,22.0,30.0,35.0,22.0,32.0,31.0,False +497,29.0,31.0,27.0,29.0,27.0,37.0,33.0,27.0,False +498,31.0,33.0,27.0,33.0,25.0,30.0,32.0,29.0,False +499,27.0,26.0,32.0,31.0,26.0,32.0,37.0,29.0,False +500,31.0,29.0,37.0,25.0,29.0,31.0,32.0,26.0,False +501,24.0,38.0,29.0,28.0,37.0,31.0,25.0,28.0,False +502,34.0,28.0,31.0,30.0,30.0,35.0,26.0,26.0,False +503,32.0,32.0,24.0,24.0,38.0,23.0,37.0,30.0,False +504,25.0,33.0,34.0,26.0,33.0,26.0,34.0,29.0,False +505,30.0,27.0,35.0,37.0,27.0,27.0,31.0,26.0,False +506,32.0,30.0,32.0,30.0,27.0,30.0,31.0,28.0,False +507,31.0,28.0,30.0,33.0,33.0,26.0,25.0,34.0,False +508,34.0,33.0,37.0,22.0,29.0,30.0,28.0,27.0,False +509,34.0,25.0,18.0,28.0,36.0,31.0,37.0,31.0,False +510,26.0,35.0,26.0,38.0,23.0,34.0,31.0,27.0,False +511,32.0,25.0,33.0,39.0,31.0,20.0,28.0,32.0,False +512,28.0,36.0,36.0,32.0,26.0,24.0,31.0,27.0,False +513,30.0,26.0,35.0,30.0,33.0,34.0,26.0,26.0,False +514,34.0,30.0,36.0,28.0,32.0,28.0,32.0,20.0,False +515,30.0,31.0,28.0,27.0,29.0,40.0,27.0,28.0,False +516,26.0,27.0,33.0,30.0,24.0,33.0,31.0,36.0,False +517,31.0,27.0,31.0,36.0,26.0,33.0,30.0,26.0,False +518,36.0,26.0,28.0,31.0,30.0,31.0,23.0,35.0,False +519,25.0,30.0,30.0,29.0,26.0,29.0,29.0,42.0,False +520,30.0,22.0,30.0,33.0,25.0,32.0,42.0,26.0,False +521,22.0,24.0,37.0,31.0,32.0,33.0,31.0,30.0,False +522,30.0,42.0,23.0,33.0,24.0,37.0,26.0,25.0,False +523,23.0,35.0,33.0,34.0,26.0,29.0,32.0,28.0,False +524,26.0,25.0,31.0,28.0,31.0,35.0,35.0,29.0,False +525,24.0,28.0,35.0,31.0,27.0,30.0,34.0,31.0,False +526,32.0,24.0,37.0,31.0,30.0,28.0,25.0,33.0,False +527,34.0,24.0,32.0,37.0,22.0,34.0,28.0,29.0,False +528,28.0,33.0,30.0,31.0,33.0,33.0,25.0,27.0,False +529,24.0,29.0,36.0,30.0,32.0,27.0,31.0,31.0,False +530,32.0,27.0,25.0,33.0,37.0,27.0,27.0,32.0,False +531,28.0,22.0,27.0,38.0,27.0,28.0,37.0,33.0,False +532,36.0,28.0,29.0,28.0,23.0,26.0,38.0,32.0,False +533,26.0,22.0,38.0,27.0,33.0,28.0,32.0,34.0,False +534,23.0,30.0,36.0,40.0,26.0,29.0,25.0,31.0,False +535,35.0,33.0,32.0,33.0,34.0,28.0,29.0,16.0,False +536,31.0,31.0,29.0,25.0,31.0,30.0,28.0,35.0,False +537,27.0,30.0,36.0,23.0,36.0,25.0,29.0,34.0,False +538,27.0,30.0,35.0,31.0,38.0,26.0,24.0,29.0,False +539,24.0,29.0,33.0,33.0,30.0,33.0,26.0,32.0,False +540,28.0,37.0,33.0,28.0,33.0,31.0,24.0,26.0,False +541,35.0,23.0,26.0,29.0,29.0,30.0,34.0,34.0,False +542,39.0,30.0,33.0,26.0,32.0,25.0,24.0,31.0,False +543,33.0,26.0,25.0,35.0,29.0,31.0,32.0,29.0,False +544,25.0,29.0,26.0,36.0,31.0,27.0,32.0,34.0,False +545,27.0,33.0,35.0,28.0,31.0,33.0,26.0,27.0,False +546,32.0,25.0,40.0,26.0,33.0,28.0,28.0,28.0,False +547,26.0,37.0,31.0,26.0,27.0,27.0,23.0,43.0,False +548,32.0,32.0,33.0,26.0,30.0,25.0,28.0,34.0,False +549,28.0,24.0,32.0,26.0,38.0,31.0,29.0,32.0,False +550,33.0,29.0,33.0,30.0,34.0,33.0,25.0,23.0,False +551,30.0,26.0,27.0,40.0,28.0,25.0,29.0,35.0,False +552,37.0,24.0,27.0,36.0,32.0,20.0,35.0,29.0,False +553,32.0,33.0,32.0,33.0,28.0,25.0,33.0,24.0,False +554,36.0,32.0,31.0,26.0,29.0,24.0,34.0,28.0,False +555,24.0,33.0,28.0,30.0,30.0,28.0,31.0,36.0,False +556,23.0,34.0,26.0,33.0,34.0,35.0,26.0,29.0,False +557,24.0,34.0,25.0,30.0,31.0,32.0,28.0,36.0,False +558,31.0,32.0,23.0,25.0,32.0,30.0,37.0,30.0,False +559,32.0,27.0,35.0,32.0,27.0,24.0,31.0,32.0,False +560,33.0,28.0,27.0,27.0,34.0,28.0,34.0,29.0,False +561,34.0,30.0,32.0,31.0,27.0,31.0,21.0,34.0,False +562,31.0,30.0,33.0,28.0,30.0,27.0,37.0,24.0,False +563,30.0,22.0,29.0,23.0,30.0,35.0,37.0,34.0,False +564,21.0,27.0,22.0,38.0,30.0,33.0,33.0,36.0,False +565,38.0,25.0,40.0,27.0,30.0,26.0,29.0,25.0,False +566,29.0,29.0,27.0,30.0,30.0,31.0,36.0,28.0,False +567,24.0,42.0,36.0,31.0,35.0,23.0,27.0,22.0,False +568,22.0,37.0,33.0,28.0,34.0,24.0,33.0,29.0,False +569,23.0,33.0,27.0,33.0,31.0,36.0,27.0,30.0,False +570,29.0,25.0,34.0,29.0,28.0,35.0,23.0,37.0,False +571,31.0,41.0,39.0,25.0,30.0,22.0,30.0,22.0,False +572,28.0,36.0,28.0,30.0,31.0,29.0,32.0,26.0,False +573,31.0,29.0,32.0,28.0,33.0,26.0,32.0,29.0,False +574,31.0,33.0,29.0,32.0,30.0,31.0,28.0,26.0,False +575,26.0,35.0,29.0,24.0,30.0,34.0,26.0,36.0,False +576,29.0,35.0,30.0,24.0,30.0,30.0,32.0,30.0,False +577,25.0,31.0,38.0,36.0,21.0,26.0,34.0,29.0,False +578,35.0,35.0,28.0,22.0,27.0,31.0,27.0,35.0,False +579,22.0,34.0,31.0,34.0,26.0,22.0,39.0,32.0,False +580,29.0,28.0,26.0,34.0,21.0,40.0,32.0,30.0,False +581,38.0,31.0,26.0,25.0,32.0,28.0,32.0,28.0,False +582,27.0,34.0,33.0,23.0,30.0,33.0,29.0,31.0,False +583,28.0,38.0,26.0,22.0,43.0,27.0,29.0,27.0,False +584,32.0,28.0,31.0,27.0,31.0,29.0,26.0,36.0,False +585,30.0,26.0,34.0,29.0,30.0,29.0,28.0,34.0,False +586,27.0,30.0,35.0,34.0,27.0,31.0,26.0,30.0,False +587,38.0,26.0,31.0,26.0,36.0,32.0,28.0,23.0,False +588,34.0,24.0,31.0,31.0,22.0,29.0,37.0,32.0,False +589,24.0,30.0,36.0,24.0,25.0,35.0,33.0,33.0,False +590,28.0,40.0,27.0,30.0,33.0,33.0,22.0,27.0,False +591,34.0,41.0,30.0,32.0,22.0,28.0,26.0,27.0,False +592,26.0,28.0,32.0,27.0,33.0,28.0,36.0,30.0,False +593,25.0,32.0,36.0,26.0,33.0,35.0,25.0,28.0,False +594,35.0,31.0,30.0,26.0,30.0,24.0,30.0,34.0,False +595,32.0,36.0,31.0,28.0,30.0,34.0,23.0,26.0,False +596,27.0,28.0,32.0,35.0,27.0,29.0,29.0,33.0,False +597,35.0,33.0,32.0,28.0,29.0,28.0,29.0,26.0,False +598,28.0,32.0,36.0,27.0,36.0,21.0,30.0,30.0,False +599,22.0,37.0,36.0,29.0,37.0,25.0,30.0,24.0,False +600,32.0,32.0,32.0,30.0,25.0,28.0,32.0,29.0,False +601,32.0,29.0,32.0,25.0,29.0,30.0,31.0,32.0,False +602,37.0,29.0,21.0,37.0,34.0,30.0,28.0,24.0,False +603,25.0,35.0,28.0,25.0,30.0,40.0,26.0,31.0,False +604,25.0,29.0,31.0,31.0,32.0,30.0,29.0,33.0,False +605,33.0,35.0,33.0,27.0,33.0,21.0,28.0,30.0,False +606,30.0,35.0,33.0,26.0,25.0,33.0,27.0,31.0,False +607,36.0,36.0,26.0,30.0,28.0,21.0,34.0,29.0,False +608,35.0,27.0,34.0,28.0,26.0,30.0,30.0,30.0,False +609,39.0,29.0,34.0,26.0,38.0,22.0,22.0,30.0,False +610,34.0,21.0,28.0,32.0,26.0,29.0,35.0,35.0,False +611,22.0,29.0,33.0,33.0,34.0,27.0,31.0,31.0,False +612,31.0,27.0,29.0,30.0,34.0,26.0,27.0,36.0,False +613,37.0,28.0,27.0,25.0,28.0,24.0,38.0,33.0,False +614,32.0,33.0,27.0,34.0,19.0,34.0,31.0,30.0,False +615,28.0,35.0,31.0,33.0,24.0,27.0,28.0,34.0,False +616,26.0,21.0,31.0,31.0,35.0,27.0,31.0,38.0,False +617,36.0,32.0,26.0,26.0,31.0,31.0,25.0,33.0,False +618,37.0,25.0,35.0,27.0,32.0,29.0,32.0,23.0,False +619,40.0,31.0,31.0,22.0,35.0,23.0,24.0,34.0,False +620,29.0,38.0,28.0,31.0,25.0,26.0,36.0,27.0,False +621,27.0,25.0,28.0,25.0,41.0,30.0,29.0,35.0,False +622,32.0,31.0,31.0,24.0,24.0,36.0,34.0,28.0,False +623,28.0,26.0,32.0,27.0,29.0,28.0,36.0,34.0,False +624,36.0,34.0,35.0,20.0,31.0,23.0,32.0,29.0,False +625,33.0,34.0,30.0,29.0,34.0,24.0,26.0,30.0,False +626,34.0,31.0,26.0,32.0,33.0,29.0,25.0,30.0,False +627,30.0,34.0,28.0,28.0,35.0,30.0,25.0,30.0,False +628,30.0,32.0,28.0,35.0,31.0,25.0,30.0,29.0,False +629,25.0,38.0,35.0,29.0,30.0,20.0,34.0,29.0,False +630,30.0,28.0,30.0,32.0,25.0,29.0,32.0,34.0,False +631,30.0,29.0,29.0,28.0,20.0,33.0,36.0,35.0,False +632,23.0,28.0,32.0,26.0,31.0,34.0,32.0,34.0,False +633,23.0,28.0,31.0,30.0,35.0,29.0,31.0,33.0,False +634,23.0,27.0,29.0,35.0,31.0,36.0,29.0,30.0,False +635,25.0,28.0,31.0,30.0,34.0,32.0,30.0,30.0,False +636,27.0,31.0,35.0,27.0,27.0,31.0,32.0,30.0,False +637,34.0,28.0,28.0,28.0,31.0,32.0,26.0,33.0,False +638,26.0,33.0,29.0,28.0,34.0,23.0,37.0,30.0,False +639,23.0,33.0,33.0,29.0,28.0,34.0,29.0,31.0,False +640,35.0,31.0,26.0,28.0,27.0,31.0,25.0,37.0,False +641,35.0,28.0,20.0,35.0,31.0,31.0,30.0,30.0,False +642,34.0,27.0,32.0,31.0,26.0,30.0,27.0,33.0,False +643,39.0,28.0,21.0,25.0,24.0,37.0,33.0,33.0,False +644,27.0,31.0,29.0,33.0,28.0,31.0,32.0,29.0,False +645,30.0,32.0,23.0,31.0,31.0,37.0,27.0,29.0,False +646,34.0,28.0,34.0,23.0,33.0,30.0,28.0,30.0,False +647,32.0,23.0,31.0,36.0,32.0,27.0,32.0,27.0,False +648,29.0,32.0,25.0,28.0,27.0,38.0,27.0,34.0,False +649,26.0,32.0,36.0,28.0,29.0,34.0,23.0,32.0,False +650,31.0,31.0,30.0,22.0,29.0,34.0,35.0,28.0,False +651,30.0,40.0,35.0,22.0,30.0,32.0,31.0,20.0,False +652,30.0,22.0,32.0,28.0,32.0,31.0,35.0,30.0,False +653,31.0,27.0,25.0,29.0,31.0,30.0,32.0,35.0,False +654,26.0,35.0,33.0,24.0,33.0,35.0,26.0,28.0,False +655,33.0,23.0,37.0,25.0,27.0,42.0,33.0,20.0,False +656,34.0,29.0,38.0,34.0,29.0,29.0,24.0,23.0,False +657,26.0,32.0,32.0,34.0,33.0,29.0,28.0,26.0,False +658,35.0,30.0,28.0,29.0,29.0,28.0,32.0,29.0,False +659,36.0,29.0,22.0,30.0,31.0,32.0,31.0,29.0,False +660,29.0,31.0,29.0,25.0,30.0,33.0,28.0,35.0,False +661,32.0,40.0,26.0,27.0,29.0,31.0,30.0,25.0,False +662,35.0,30.0,26.0,28.0,29.0,32.0,34.0,26.0,False +663,23.0,32.0,26.0,27.0,31.0,36.0,32.0,33.0,False +664,31.0,29.0,24.0,33.0,23.0,39.0,33.0,28.0,False +665,33.0,26.0,35.0,26.0,30.0,29.0,31.0,30.0,False +666,27.0,34.0,31.0,29.0,32.0,22.0,34.0,31.0,False +667,36.0,30.0,26.0,36.0,30.0,29.0,24.0,29.0,False +668,31.0,31.0,26.0,33.0,31.0,32.0,26.0,30.0,False +669,30.0,25.0,27.0,40.0,31.0,27.0,30.0,30.0,False +670,30.0,38.0,30.0,35.0,33.0,29.0,27.0,18.0,False +671,38.0,28.0,32.0,27.0,29.0,29.0,30.0,27.0,False +672,27.0,29.0,37.0,34.0,26.0,29.0,29.0,29.0,False +673,25.0,32.0,30.0,30.0,38.0,34.0,25.0,26.0,False +674,23.0,40.0,26.0,35.0,33.0,31.0,25.0,27.0,False +675,30.0,37.0,28.0,27.0,32.0,30.0,28.0,28.0,False +676,26.0,32.0,32.0,28.0,36.0,30.0,30.0,26.0,False +677,37.0,30.0,34.0,28.0,27.0,29.0,27.0,28.0,False +678,34.0,27.0,29.0,30.0,28.0,26.0,37.0,29.0,False +679,31.0,32.0,32.0,36.0,32.0,29.0,27.0,21.0,False +680,31.0,28.0,24.0,27.0,36.0,38.0,25.0,31.0,False +681,24.0,28.0,28.0,30.0,34.0,26.0,38.0,32.0,False +682,30.0,27.0,29.0,36.0,33.0,27.0,32.0,26.0,False +683,28.0,32.0,26.0,30.0,24.0,31.0,35.0,34.0,False +684,28.0,29.0,26.0,34.0,29.0,29.0,32.0,33.0,False +685,22.0,31.0,28.0,26.0,35.0,32.0,31.0,35.0,False +686,32.0,29.0,35.0,31.0,21.0,26.0,31.0,35.0,False +687,34.0,25.0,30.0,26.0,33.0,32.0,28.0,32.0,False +688,32.0,29.0,28.0,26.0,40.0,31.0,25.0,29.0,False +689,29.0,28.0,26.0,32.0,35.0,32.0,27.0,31.0,False +690,27.0,33.0,27.0,29.0,33.0,29.0,30.0,32.0,False +691,29.0,37.0,24.0,33.0,29.0,26.0,29.0,33.0,False +692,38.0,26.0,25.0,32.0,29.0,29.0,31.0,30.0,False +693,33.0,28.0,34.0,28.0,38.0,31.0,27.0,21.0,False +694,25.0,31.0,39.0,31.0,28.0,32.0,33.0,21.0,False +695,29.0,35.0,27.0,29.0,21.0,37.0,33.0,29.0,False +696,20.0,43.0,26.0,28.0,30.0,31.0,30.0,32.0,False +697,33.0,29.0,33.0,28.0,30.0,32.0,32.0,23.0,False +698,27.0,35.0,26.0,32.0,27.0,34.0,28.0,31.0,False +699,28.0,22.0,27.0,30.0,28.0,38.0,34.0,33.0,False +700,34.0,29.0,26.0,26.0,35.0,25.0,31.0,34.0,False +701,30.0,33.0,30.0,34.0,29.0,25.0,34.0,25.0,False +702,23.0,38.0,30.0,25.0,34.0,36.0,24.0,30.0,False +703,35.0,33.0,30.0,34.0,31.0,27.0,26.0,24.0,False +704,34.0,21.0,34.0,28.0,34.0,29.0,28.0,32.0,False +705,34.0,32.0,28.0,33.0,27.0,35.0,28.0,23.0,False +706,28.0,26.0,32.0,23.0,32.0,35.0,31.0,33.0,False +707,27.0,36.0,24.0,27.0,38.0,26.0,30.0,32.0,False +708,22.0,44.0,29.0,28.0,28.0,35.0,28.0,26.0,False +709,30.0,29.0,29.0,26.0,30.0,33.0,25.0,38.0,False +710,26.0,30.0,32.0,33.0,33.0,35.0,29.0,22.0,False +711,39.0,33.0,31.0,28.0,30.0,22.0,28.0,29.0,False +712,32.0,23.0,34.0,24.0,38.0,32.0,30.0,27.0,False +713,30.0,28.0,21.0,30.0,27.0,38.0,27.0,39.0,False +714,29.0,29.0,30.0,26.0,32.0,37.0,28.0,29.0,False +715,25.0,34.0,22.0,35.0,34.0,27.0,32.0,31.0,False +716,30.0,33.0,32.0,25.0,31.0,32.0,31.0,26.0,False +717,22.0,28.0,34.0,26.0,28.0,39.0,30.0,33.0,False +718,28.0,32.0,38.0,33.0,31.0,22.0,31.0,25.0,False +719,35.0,23.0,42.0,24.0,34.0,28.0,32.0,22.0,False +720,30.0,32.0,33.0,27.0,33.0,25.0,35.0,25.0,False +721,31.0,39.0,27.0,40.0,24.0,31.0,28.0,20.0,False +722,32.0,27.0,23.0,29.0,35.0,38.0,31.0,25.0,False +723,30.0,32.0,33.0,33.0,26.0,32.0,29.0,25.0,False +724,37.0,29.0,23.0,26.0,33.0,35.0,24.0,33.0,False +725,27.0,38.0,36.0,26.0,29.0,29.0,27.0,28.0,False +726,33.0,33.0,27.0,27.0,34.0,30.0,26.0,30.0,False +727,26.0,36.0,25.0,43.0,28.0,22.0,27.0,33.0,False +728,31.0,32.0,26.0,36.0,21.0,30.0,34.0,30.0,False +729,28.0,35.0,30.0,22.0,32.0,25.0,36.0,32.0,False +730,29.0,24.0,30.0,24.0,34.0,33.0,35.0,31.0,False +731,34.0,28.0,34.0,30.0,28.0,30.0,22.0,34.0,False +732,34.0,30.0,26.0,28.0,27.0,30.0,27.0,38.0,False +733,29.0,32.0,30.0,24.0,36.0,32.0,28.0,29.0,False +734,30.0,25.0,36.0,26.0,26.0,38.0,30.0,29.0,False +735,24.0,29.0,38.0,25.0,24.0,33.0,28.0,39.0,False +736,28.0,30.0,29.0,34.0,36.0,23.0,32.0,28.0,False +737,26.0,36.0,25.0,28.0,34.0,33.0,29.0,29.0,False +738,37.0,29.0,32.0,31.0,33.0,21.0,23.0,34.0,False +739,25.0,34.0,30.0,32.0,31.0,26.0,26.0,36.0,False +740,30.0,23.0,33.0,30.0,37.0,27.0,27.0,33.0,False +741,31.0,30.0,33.0,28.0,29.0,36.0,33.0,20.0,False +742,28.0,24.0,33.0,33.0,35.0,27.0,33.0,27.0,False +743,29.0,25.0,37.0,36.0,34.0,29.0,27.0,23.0,False +744,21.0,26.0,27.0,34.0,33.0,32.0,35.0,32.0,False +745,30.0,34.0,38.0,29.0,36.0,25.0,19.0,29.0,False +746,29.0,25.0,27.0,27.0,31.0,30.0,39.0,32.0,False +747,26.0,25.0,33.0,26.0,31.0,30.0,35.0,34.0,False +748,25.0,24.0,28.0,31.0,29.0,29.0,38.0,36.0,False +749,31.0,31.0,30.0,33.0,28.0,27.0,32.0,28.0,False +750,30.0,28.0,24.0,31.0,37.0,30.0,26.0,34.0,False +751,34.0,24.0,26.0,30.0,30.0,28.0,27.0,41.0,False +752,21.0,27.0,33.0,38.0,36.0,26.0,26.0,33.0,False +753,33.0,30.0,32.0,28.0,31.0,25.0,30.0,31.0,False +754,31.0,35.0,26.0,27.0,32.0,31.0,26.0,32.0,False +755,30.0,31.0,40.0,26.0,27.0,31.0,26.0,29.0,False +756,34.0,33.0,29.0,28.0,31.0,25.0,32.0,28.0,False +757,27.0,27.0,34.0,33.0,30.0,32.0,29.0,28.0,False +758,32.0,34.0,29.0,27.0,29.0,31.0,25.0,33.0,False +759,28.0,29.0,31.0,29.0,27.0,34.0,34.0,28.0,False +760,32.0,27.0,30.0,28.0,28.0,39.0,34.0,22.0,False +761,25.0,33.0,27.0,28.0,33.0,33.0,28.0,33.0,False +762,28.0,33.0,31.0,32.0,31.0,25.0,29.0,31.0,False +763,36.0,33.0,28.0,31.0,31.0,27.0,26.0,28.0,False +764,26.0,22.0,30.0,36.0,31.0,29.0,30.0,36.0,False +765,36.0,30.0,19.0,32.0,30.0,35.0,30.0,28.0,False +766,34.0,27.0,25.0,38.0,31.0,27.0,26.0,32.0,False +767,28.0,29.0,33.0,28.0,27.0,34.0,26.0,35.0,False +768,29.0,29.0,30.0,27.0,32.0,33.0,33.0,27.0,False +769,35.0,32.0,26.0,30.0,28.0,30.0,33.0,26.0,False +770,28.0,24.0,34.0,30.0,30.0,25.0,31.0,38.0,False +771,29.0,26.0,29.0,31.0,30.0,27.0,38.0,30.0,False +772,19.0,42.0,29.0,29.0,26.0,39.0,30.0,26.0,False +773,34.0,30.0,29.0,27.0,35.0,32.0,23.0,30.0,False +774,33.0,25.0,27.0,33.0,39.0,34.0,27.0,22.0,False +775,33.0,37.0,32.0,29.0,34.0,22.0,23.0,30.0,False +776,28.0,28.0,30.0,22.0,30.0,30.0,36.0,36.0,False +777,28.0,32.0,23.0,30.0,30.0,41.0,29.0,27.0,False +778,30.0,26.0,30.0,28.0,36.0,40.0,28.0,22.0,False +779,31.0,25.0,34.0,29.0,25.0,25.0,39.0,32.0,False +780,29.0,27.0,30.0,25.0,31.0,37.0,33.0,28.0,False +781,29.0,32.0,24.0,37.0,33.0,34.0,26.0,25.0,False +782,19.0,32.0,33.0,35.0,30.0,34.0,31.0,26.0,False +783,38.0,28.0,31.0,35.0,26.0,30.0,26.0,26.0,False +784,29.0,32.0,25.0,22.0,39.0,26.0,33.0,34.0,False +785,34.0,30.0,32.0,29.0,32.0,32.0,23.0,28.0,False +786,38.0,27.0,28.0,32.0,26.0,28.0,34.0,27.0,False +787,32.0,28.0,34.0,29.0,27.0,31.0,31.0,28.0,False +788,31.0,26.0,29.0,28.0,30.0,37.0,30.0,29.0,False +789,33.0,31.0,35.0,27.0,33.0,34.0,21.0,26.0,False +790,26.0,28.0,28.0,26.0,29.0,26.0,35.0,42.0,False +791,32.0,33.0,30.0,35.0,29.0,26.0,26.0,29.0,False +792,32.0,31.0,27.0,34.0,32.0,23.0,32.0,29.0,False +793,34.0,31.0,26.0,32.0,23.0,35.0,28.0,31.0,False +794,22.0,30.0,32.0,26.0,31.0,33.0,29.0,37.0,False +795,23.0,30.0,33.0,37.0,34.0,32.0,21.0,30.0,False +796,36.0,27.0,28.0,28.0,28.0,30.0,34.0,29.0,False +797,25.0,35.0,24.0,31.0,27.0,33.0,37.0,28.0,False +798,38.0,25.0,28.0,39.0,28.0,27.0,26.0,29.0,False +799,28.0,32.0,31.0,27.0,26.0,30.0,31.0,35.0,False +800,30.0,34.0,37.0,30.0,32.0,20.0,32.0,25.0,False +801,25.0,31.0,33.0,31.0,32.0,31.0,25.0,32.0,False +802,22.0,29.0,33.0,26.0,34.0,37.0,31.0,28.0,False +803,30.0,34.0,34.0,32.0,27.0,35.0,28.0,20.0,False +804,32.0,31.0,33.0,30.0,28.0,26.0,33.0,27.0,False +805,29.0,31.0,40.0,24.0,27.0,35.0,27.0,27.0,False +806,36.0,33.0,22.0,23.0,33.0,31.0,28.0,34.0,False +807,35.0,26.0,32.0,34.0,30.0,33.0,24.0,26.0,False +808,32.0,33.0,35.0,37.0,27.0,27.0,25.0,24.0,False +809,33.0,37.0,29.0,27.0,25.0,32.0,32.0,25.0,False +810,30.0,31.0,29.0,29.0,32.0,35.0,33.0,21.0,False +811,30.0,34.0,32.0,37.0,29.0,26.0,21.0,31.0,False +812,36.0,23.0,34.0,27.0,32.0,32.0,25.0,31.0,False +813,30.0,31.0,29.0,35.0,28.0,32.0,26.0,29.0,False +814,31.0,34.0,32.0,32.0,21.0,37.0,25.0,28.0,False +815,28.0,32.0,28.0,25.0,34.0,30.0,33.0,30.0,False +816,26.0,23.0,28.0,36.0,33.0,31.0,36.0,27.0,False +817,38.0,34.0,32.0,35.0,29.0,24.0,22.0,26.0,False +818,23.0,29.0,33.0,29.0,29.0,36.0,37.0,24.0,False +819,28.0,41.0,27.0,27.0,35.0,24.0,28.0,30.0,False +820,32.0,31.0,32.0,25.0,31.0,27.0,28.0,34.0,False +821,26.0,38.0,35.0,32.0,25.0,30.0,26.0,28.0,False +822,31.0,34.0,30.0,25.0,29.0,31.0,29.0,31.0,False +823,30.0,27.0,34.0,34.0,30.0,22.0,32.0,31.0,False +824,30.0,36.0,22.0,29.0,29.0,35.0,28.0,31.0,False +825,23.0,35.0,31.0,30.0,29.0,28.0,35.0,29.0,False +826,30.0,31.0,30.0,36.0,30.0,31.0,26.0,26.0,False +827,29.0,31.0,30.0,28.0,38.0,33.0,27.0,24.0,False +828,27.0,28.0,27.0,31.0,30.0,33.0,31.0,33.0,False +829,34.0,36.0,26.0,31.0,22.0,32.0,26.0,33.0,False +830,29.0,33.0,31.0,21.0,37.0,34.0,30.0,25.0,False +831,35.0,26.0,28.0,29.0,37.0,25.0,27.0,33.0,False +832,31.0,27.0,31.0,29.0,30.0,33.0,27.0,32.0,False +833,28.0,35.0,26.0,31.0,34.0,23.0,33.0,30.0,False +834,28.0,28.0,28.0,32.0,38.0,36.0,24.0,26.0,False +835,30.0,36.0,31.0,29.0,26.0,30.0,34.0,24.0,False +836,35.0,28.0,36.0,27.0,28.0,28.0,33.0,25.0,False +837,26.0,33.0,25.0,33.0,31.0,25.0,33.0,34.0,False +838,30.0,29.0,29.0,23.0,28.0,31.0,33.0,37.0,False +839,35.0,39.0,28.0,25.0,36.0,25.0,23.0,29.0,False +840,32.0,27.0,28.0,30.0,34.0,30.0,31.0,28.0,False +841,26.0,33.0,35.0,28.0,24.0,33.0,32.0,29.0,False +842,28.0,30.0,35.0,27.0,32.0,32.0,25.0,31.0,False +843,27.0,28.0,39.0,35.0,31.0,26.0,29.0,25.0,False +844,35.0,29.0,31.0,25.0,26.0,28.0,38.0,28.0,False +845,25.0,28.0,30.0,24.0,40.0,37.0,30.0,26.0,False +846,36.0,27.0,30.0,31.0,30.0,25.0,30.0,31.0,False +847,32.0,31.0,27.0,32.0,30.0,27.0,32.0,29.0,False +848,29.0,27.0,32.0,34.0,26.0,35.0,29.0,28.0,False +849,36.0,25.0,26.0,26.0,33.0,29.0,34.0,31.0,False +850,36.0,34.0,27.0,29.0,18.0,29.0,33.0,34.0,False +851,24.0,25.0,31.0,30.0,31.0,36.0,26.0,37.0,False +852,24.0,27.0,29.0,28.0,38.0,33.0,32.0,29.0,False +853,35.0,28.0,23.0,31.0,30.0,37.0,26.0,30.0,False +854,18.0,31.0,29.0,37.0,34.0,33.0,34.0,24.0,False +855,29.0,36.0,25.0,36.0,29.0,28.0,30.0,27.0,False +856,27.0,29.0,32.0,34.0,26.0,30.0,32.0,30.0,False +857,33.0,36.0,32.0,27.0,24.0,28.0,27.0,33.0,False +858,31.0,34.0,27.0,40.0,34.0,27.0,18.0,29.0,False +859,33.0,23.0,25.0,30.0,31.0,34.0,31.0,33.0,False +860,27.0,32.0,31.0,26.0,34.0,34.0,28.0,28.0,False +861,29.0,34.0,28.0,32.0,27.0,29.0,32.0,29.0,False +862,35.0,31.0,26.0,40.0,30.0,33.0,22.0,23.0,False +863,28.0,29.0,30.0,36.0,26.0,33.0,31.0,27.0,False +864,25.0,30.0,35.0,28.0,24.0,31.0,35.0,32.0,False +865,32.0,26.0,36.0,25.0,34.0,27.0,31.0,29.0,False +866,34.0,31.0,29.0,35.0,34.0,34.0,23.0,20.0,False +867,32.0,28.0,32.0,31.0,32.0,29.0,23.0,33.0,False +868,23.0,35.0,35.0,30.0,32.0,23.0,27.0,35.0,False +869,30.0,34.0,35.0,32.0,27.0,22.0,31.0,29.0,False +870,26.0,27.0,34.0,28.0,34.0,33.0,31.0,27.0,False +871,31.0,30.0,27.0,31.0,31.0,31.0,30.0,29.0,False +872,27.0,35.0,30.0,29.0,31.0,24.0,30.0,34.0,False +873,29.0,31.0,28.0,25.0,39.0,30.0,29.0,29.0,False +874,30.0,27.0,33.0,32.0,29.0,34.0,25.0,30.0,False +875,29.0,26.0,30.0,26.0,29.0,33.0,35.0,32.0,False +876,34.0,26.0,29.0,31.0,35.0,28.0,30.0,27.0,False +877,36.0,31.0,29.0,35.0,22.0,29.0,30.0,28.0,False +878,32.0,30.0,27.0,35.0,29.0,31.0,30.0,26.0,False +879,28.0,28.0,30.0,36.0,28.0,32.0,31.0,27.0,False +880,35.0,32.0,30.0,25.0,28.0,26.0,28.0,36.0,False +881,24.0,33.0,30.0,29.0,25.0,36.0,32.0,31.0,False +882,29.0,26.0,29.0,30.0,34.0,33.0,32.0,27.0,False +883,19.0,32.0,32.0,29.0,29.0,31.0,36.0,32.0,False +884,34.0,34.0,29.0,22.0,29.0,32.0,26.0,34.0,False +885,21.0,36.0,35.0,36.0,33.0,27.0,29.0,23.0,False +886,30.0,35.0,29.0,32.0,27.0,32.0,31.0,24.0,False +887,26.0,27.0,27.0,30.0,35.0,30.0,35.0,30.0,False +888,24.0,31.0,30.0,29.0,32.0,36.0,27.0,31.0,False +889,32.0,33.0,34.0,31.0,22.0,32.0,28.0,28.0,False +890,36.0,27.0,31.0,38.0,24.0,25.0,30.0,29.0,False +891,29.0,30.0,25.0,29.0,35.0,29.0,32.0,31.0,False +892,33.0,38.0,40.0,26.0,25.0,25.0,30.0,23.0,False +893,32.0,34.0,29.0,32.0,28.0,25.0,30.0,30.0,False +894,37.0,25.0,26.0,34.0,31.0,31.0,27.0,29.0,False +895,26.0,24.0,31.0,38.0,27.0,32.0,31.0,31.0,False +896,27.0,24.0,33.0,24.0,29.0,33.0,34.0,36.0,False +897,30.0,31.0,21.0,31.0,29.0,34.0,27.0,37.0,False +898,34.0,28.0,30.0,39.0,31.0,22.0,21.0,35.0,False +899,34.0,24.0,32.0,28.0,39.0,28.0,36.0,19.0,False +900,31.0,29.0,32.0,26.0,37.0,24.0,29.0,32.0,False +901,26.0,30.0,26.0,25.0,34.0,30.0,30.0,39.0,False +902,31.0,23.0,33.0,25.0,29.0,29.0,30.0,40.0,False +903,36.0,30.0,26.0,26.0,34.0,30.0,31.0,27.0,False +904,25.0,22.0,34.0,27.0,30.0,36.0,35.0,31.0,False +905,32.0,32.0,30.0,26.0,28.0,28.0,26.0,38.0,False +906,21.0,35.0,29.0,33.0,29.0,32.0,27.0,34.0,False +907,33.0,32.0,31.0,37.0,23.0,35.0,25.0,24.0,False +908,27.0,37.0,34.0,23.0,26.0,34.0,29.0,30.0,False +909,30.0,19.0,30.0,33.0,35.0,34.0,21.0,38.0,False +910,36.0,26.0,37.0,31.0,33.0,22.0,25.0,30.0,False +911,31.0,34.0,30.0,26.0,27.0,30.0,29.0,33.0,False +912,19.0,40.0,37.0,26.0,31.0,26.0,35.0,26.0,False +913,29.0,32.0,29.0,35.0,25.0,31.0,30.0,29.0,False +914,31.0,37.0,27.0,26.0,25.0,29.0,30.0,35.0,False +915,31.0,29.0,25.0,29.0,27.0,29.0,36.0,34.0,False +916,30.0,34.0,33.0,32.0,30.0,27.0,25.0,29.0,False +917,34.0,39.0,25.0,27.0,35.0,27.0,21.0,32.0,False +918,28.0,33.0,25.0,27.0,31.0,31.0,34.0,31.0,False +919,32.0,37.0,21.0,26.0,31.0,35.0,29.0,29.0,False +920,20.0,28.0,36.0,33.0,29.0,37.0,30.0,27.0,False +921,26.0,34.0,31.0,27.0,31.0,33.0,35.0,23.0,False +922,32.0,31.0,33.0,25.0,29.0,28.0,26.0,36.0,False +923,33.0,23.0,25.0,31.0,25.0,41.0,29.0,33.0,False +924,39.0,29.0,21.0,31.0,29.0,24.0,33.0,34.0,False +925,38.0,35.0,34.0,30.0,28.0,20.0,27.0,28.0,False +926,26.0,31.0,25.0,25.0,31.0,32.0,30.0,40.0,False +927,38.0,27.0,25.0,24.0,32.0,25.0,34.0,35.0,False +928,30.0,35.0,26.0,25.0,28.0,31.0,43.0,22.0,False +929,31.0,27.0,32.0,19.0,40.0,31.0,30.0,30.0,False +930,27.0,29.0,28.0,40.0,37.0,21.0,28.0,30.0,False +931,28.0,31.0,27.0,31.0,31.0,33.0,34.0,25.0,False +932,30.0,31.0,30.0,35.0,26.0,30.0,27.0,31.0,False +933,26.0,26.0,28.0,35.0,30.0,30.0,28.0,37.0,False +934,28.0,32.0,33.0,26.0,25.0,35.0,32.0,29.0,False +935,33.0,24.0,38.0,27.0,29.0,33.0,25.0,31.0,False +936,29.0,32.0,32.0,23.0,35.0,30.0,29.0,30.0,False +937,32.0,27.0,34.0,26.0,30.0,31.0,33.0,27.0,False +938,27.0,35.0,26.0,29.0,37.0,33.0,22.0,31.0,False +939,39.0,31.0,33.0,28.0,32.0,31.0,25.0,21.0,False +940,27.0,25.0,34.0,32.0,32.0,39.0,30.0,21.0,False +941,27.0,35.0,35.0,31.0,32.0,29.0,27.0,24.0,False +942,32.0,29.0,26.0,28.0,25.0,39.0,28.0,33.0,False +943,26.0,29.0,36.0,27.0,38.0,34.0,24.0,26.0,False +944,27.0,33.0,33.0,26.0,31.0,29.0,33.0,28.0,False +945,30.0,29.0,31.0,34.0,35.0,31.0,25.0,25.0,False +946,30.0,28.0,33.0,23.0,37.0,24.0,31.0,34.0,False +947,39.0,25.0,32.0,34.0,30.0,30.0,26.0,24.0,False +948,30.0,26.0,25.0,35.0,33.0,37.0,29.0,25.0,False +949,28.0,35.0,30.0,30.0,24.0,23.0,33.0,37.0,False +950,31.0,31.0,24.0,29.0,36.0,20.0,37.0,32.0,False +951,27.0,29.0,26.0,34.0,31.0,28.0,32.0,33.0,False +952,38.0,33.0,25.0,30.0,24.0,28.0,35.0,27.0,False +953,26.0,27.0,30.0,27.0,26.0,38.0,34.0,32.0,False +954,33.0,31.0,28.0,24.0,34.0,31.0,27.0,32.0,False +955,37.0,32.0,29.0,23.0,27.0,27.0,30.0,35.0,False +956,36.0,22.0,30.0,29.0,36.0,35.0,27.0,25.0,False +957,25.0,33.0,24.0,31.0,30.0,31.0,36.0,30.0,False +958,29.0,35.0,32.0,29.0,32.0,34.0,26.0,23.0,False +959,35.0,30.0,28.0,25.0,29.0,35.0,28.0,30.0,False +960,26.0,30.0,27.0,35.0,33.0,25.0,33.0,31.0,False +961,28.0,32.0,34.0,20.0,34.0,33.0,28.0,31.0,False +962,35.0,19.0,38.0,32.0,30.0,25.0,34.0,27.0,False +963,36.0,28.0,27.0,31.0,30.0,30.0,27.0,31.0,False +964,32.0,29.0,32.0,30.0,22.0,30.0,24.0,41.0,False +965,25.0,30.0,37.0,34.0,25.0,31.0,28.0,30.0,False +966,30.0,36.0,31.0,26.0,35.0,27.0,25.0,30.0,False +967,26.0,30.0,23.0,33.0,35.0,36.0,26.0,31.0,False +968,22.0,37.0,36.0,27.0,28.0,28.0,36.0,26.0,False +969,33.0,35.0,28.0,27.0,30.0,34.0,25.0,28.0,False +970,27.0,25.0,32.0,36.0,21.0,37.0,28.0,34.0,False +971,32.0,32.0,31.0,29.0,34.0,27.0,26.0,29.0,False +972,22.0,28.0,26.0,34.0,34.0,30.0,33.0,33.0,False +973,29.0,27.0,27.0,39.0,28.0,27.0,32.0,31.0,False +974,27.0,30.0,40.0,25.0,26.0,29.0,34.0,29.0,False +975,32.0,31.0,32.0,30.0,31.0,25.0,25.0,34.0,False +976,33.0,31.0,28.0,35.0,27.0,31.0,22.0,33.0,False +977,29.0,33.0,39.0,23.0,29.0,32.0,31.0,24.0,False +978,29.0,38.0,23.0,24.0,28.0,36.0,38.0,24.0,False +979,27.0,28.0,24.0,33.0,32.0,31.0,31.0,34.0,False +980,30.0,32.0,24.0,26.0,30.0,28.0,35.0,35.0,False +981,24.0,33.0,29.0,33.0,34.0,24.0,33.0,30.0,False +982,27.0,31.0,36.0,28.0,23.0,25.0,35.0,35.0,False +983,28.0,22.0,38.0,33.0,26.0,25.0,34.0,34.0,False +984,28.0,29.0,29.0,29.0,32.0,29.0,31.0,33.0,False +985,27.0,34.0,28.0,28.0,32.0,28.0,36.0,27.0,False +986,37.0,21.0,36.0,30.0,34.0,24.0,33.0,25.0,False +987,31.0,25.0,27.0,28.0,29.0,28.0,37.0,35.0,False +988,26.0,24.0,34.0,34.0,25.0,41.0,24.0,32.0,False +989,25.0,29.0,29.0,32.0,29.0,34.0,30.0,32.0,False +990,34.0,23.0,30.0,37.0,40.0,19.0,28.0,29.0,False +991,30.0,21.0,27.0,35.0,34.0,31.0,29.0,33.0,False +992,29.0,34.0,30.0,31.0,35.0,33.0,24.0,24.0,False +993,37.0,34.0,30.0,33.0,32.0,28.0,20.0,26.0,False +994,25.0,31.0,26.0,35.0,33.0,30.0,29.0,31.0,False +995,24.0,32.0,32.0,23.0,32.0,30.0,33.0,34.0,False +996,31.0,27.0,26.0,31.0,34.0,30.0,29.0,32.0,False +997,23.0,27.0,31.0,31.0,34.0,35.0,34.0,25.0,False +998,33.0,32.0,30.0,28.0,28.0,29.0,26.0,34.0,False +999,34.0,30.0,30.0,33.0,23.0,25.0,31.0,34.0,False