diff --git a/.github/workflows/basic_test.yml b/.github/workflows/basic_test.yml index af3d3e1..7827cf9 100644 --- a/.github/workflows/basic_test.yml +++ b/.github/workflows/basic_test.yml @@ -48,5 +48,13 @@ jobs: ls python --version python -m pytest ./tests/test_basic_space.py -s + shell: bash + + - name: Test Synthetic Data + run: | + python -m pip install pytest numpy + python -m pip install parameterized + python -m pip install torch + python --version python -m pytest ./tests/test_synthetic.py -s shell: bash diff --git a/.latent-data/NATS-Bench b/.latent-data/NATS-Bench index 3a87943..33bfb2e 160000 --- a/.latent-data/NATS-Bench +++ b/.latent-data/NATS-Bench @@ -1 +1 @@ -Subproject commit 3a8794322f0b990499a44db1b2cb05ef2bb33851 +Subproject commit 33bfb2eb1388f0273d4cc492091b1f983340879b diff --git a/lib/datasets/DownsampledImageNet.py b/lib/datasets/DownsampledImageNet.py index 26eed32..195ef45 100644 --- a/lib/datasets/DownsampledImageNet.py +++ b/lib/datasets/DownsampledImageNet.py @@ -5,118 +5,133 @@ import os, sys, hashlib, torch import numpy as np from PIL import Image import torch.utils.data as data + if sys.version_info[0] == 2: - import cPickle as pickle + import cPickle as pickle else: - import pickle + import pickle def calculate_md5(fpath, chunk_size=1024 * 1024): - md5 = hashlib.md5() - with open(fpath, 'rb') as f: - for chunk in iter(lambda: f.read(chunk_size), b''): - md5.update(chunk) - return md5.hexdigest() + md5 = hashlib.md5() + with open(fpath, "rb") as f: + for chunk in iter(lambda: f.read(chunk_size), b""): + md5.update(chunk) + return md5.hexdigest() def check_md5(fpath, md5, **kwargs): - return md5 == calculate_md5(fpath, **kwargs) + return md5 == calculate_md5(fpath, **kwargs) def check_integrity(fpath, md5=None): - if not os.path.isfile(fpath): return False - if md5 is None: return True - else : return check_md5(fpath, md5) + if not os.path.isfile(fpath): + return False + if md5 is None: + return True + else: + return check_md5(fpath, md5) class ImageNet16(data.Dataset): - # http://image-net.org/download-images - # A Downsampled Variant of ImageNet as an Alternative to the CIFAR datasets - # https://arxiv.org/pdf/1707.08819.pdf - - train_list = [ - ['train_data_batch_1', '27846dcaa50de8e21a7d1a35f30f0e91'], - ['train_data_batch_2', 'c7254a054e0e795c69120a5727050e3f'], - ['train_data_batch_3', '4333d3df2e5ffb114b05d2ffc19b1e87'], - ['train_data_batch_4', '1620cdf193304f4a92677b695d70d10f'], - ['train_data_batch_5', '348b3c2fdbb3940c4e9e834affd3b18d'], - ['train_data_batch_6', '6e765307c242a1b3d7d5ef9139b48945'], - ['train_data_batch_7', '564926d8cbf8fc4818ba23d2faac7564'], - ['train_data_batch_8', 'f4755871f718ccb653440b9dd0ebac66'], - ['train_data_batch_9', 'bb6dd660c38c58552125b1a92f86b5d4'], - ['train_data_batch_10','8f03f34ac4b42271a294f91bf480f29b'], + # http://image-net.org/download-images + # A Downsampled Variant of ImageNet as an Alternative to the CIFAR datasets + # https://arxiv.org/pdf/1707.08819.pdf + + train_list = [ + ["train_data_batch_1", "27846dcaa50de8e21a7d1a35f30f0e91"], + ["train_data_batch_2", "c7254a054e0e795c69120a5727050e3f"], + ["train_data_batch_3", "4333d3df2e5ffb114b05d2ffc19b1e87"], + ["train_data_batch_4", "1620cdf193304f4a92677b695d70d10f"], + ["train_data_batch_5", "348b3c2fdbb3940c4e9e834affd3b18d"], + ["train_data_batch_6", "6e765307c242a1b3d7d5ef9139b48945"], + ["train_data_batch_7", "564926d8cbf8fc4818ba23d2faac7564"], + ["train_data_batch_8", "f4755871f718ccb653440b9dd0ebac66"], + ["train_data_batch_9", "bb6dd660c38c58552125b1a92f86b5d4"], + ["train_data_batch_10", "8f03f34ac4b42271a294f91bf480f29b"], ] - valid_list = [ - ['val_data', '3410e3017fdaefba8d5073aaa65e4bd6'], + valid_list = [ + ["val_data", "3410e3017fdaefba8d5073aaa65e4bd6"], ] - def __init__(self, root, train, transform, use_num_of_class_only=None): - self.root = root - self.transform = transform - self.train = train # training set or valid set - if not self._check_integrity(): raise RuntimeError('Dataset not found or corrupted.') + def __init__(self, root, train, transform, use_num_of_class_only=None): + self.root = root + self.transform = transform + self.train = train # training set or valid set + if not self._check_integrity(): + raise RuntimeError("Dataset not found or corrupted.") - if self.train: downloaded_list = self.train_list - else : downloaded_list = self.valid_list - self.data = [] - self.targets = [] - - # now load the picked numpy arrays - for i, (file_name, checksum) in enumerate(downloaded_list): - file_path = os.path.join(self.root, file_name) - #print ('Load {:}/{:02d}-th : {:}'.format(i, len(downloaded_list), file_path)) - with open(file_path, 'rb') as f: - if sys.version_info[0] == 2: - entry = pickle.load(f) + if self.train: + downloaded_list = self.train_list else: - entry = pickle.load(f, encoding='latin1') - self.data.append(entry['data']) - self.targets.extend(entry['labels']) - self.data = np.vstack(self.data).reshape(-1, 3, 16, 16) - self.data = self.data.transpose((0, 2, 3, 1)) # convert to HWC - if use_num_of_class_only is not None: - assert isinstance(use_num_of_class_only, int) and use_num_of_class_only > 0 and use_num_of_class_only < 1000, 'invalid use_num_of_class_only : {:}'.format(use_num_of_class_only) - new_data, new_targets = [], [] - for I, L in zip(self.data, self.targets): - if 1 <= L <= use_num_of_class_only: - new_data.append( I ) - new_targets.append( L ) - self.data = new_data - self.targets = new_targets - # self.mean.append(entry['mean']) - #self.mean = np.vstack(self.mean).reshape(-1, 3, 16, 16) - #self.mean = np.mean(np.mean(np.mean(self.mean, axis=0), axis=1), axis=1) - #print ('Mean : {:}'.format(self.mean)) - #temp = self.data - np.reshape(self.mean, (1, 1, 1, 3)) - #std_data = np.std(temp, axis=0) - #std_data = np.mean(np.mean(std_data, axis=0), axis=0) - #print ('Std : {:}'.format(std_data)) + downloaded_list = self.valid_list + self.data = [] + self.targets = [] - def __repr__(self): - return ('{name}({num} images, {classes} classes)'.format(name=self.__class__.__name__, num=len(self.data), classes=len(set(self.targets)))) + # now load the picked numpy arrays + for i, (file_name, checksum) in enumerate(downloaded_list): + file_path = os.path.join(self.root, file_name) + # print ('Load {:}/{:02d}-th : {:}'.format(i, len(downloaded_list), file_path)) + with open(file_path, "rb") as f: + if sys.version_info[0] == 2: + entry = pickle.load(f) + else: + entry = pickle.load(f, encoding="latin1") + self.data.append(entry["data"]) + self.targets.extend(entry["labels"]) + self.data = np.vstack(self.data).reshape(-1, 3, 16, 16) + self.data = self.data.transpose((0, 2, 3, 1)) # convert to HWC + if use_num_of_class_only is not None: + assert ( + isinstance(use_num_of_class_only, int) + and use_num_of_class_only > 0 + and use_num_of_class_only < 1000 + ), "invalid use_num_of_class_only : {:}".format(use_num_of_class_only) + new_data, new_targets = [], [] + for I, L in zip(self.data, self.targets): + if 1 <= L <= use_num_of_class_only: + new_data.append(I) + new_targets.append(L) + self.data = new_data + self.targets = new_targets + # self.mean.append(entry['mean']) + # self.mean = np.vstack(self.mean).reshape(-1, 3, 16, 16) + # self.mean = np.mean(np.mean(np.mean(self.mean, axis=0), axis=1), axis=1) + # print ('Mean : {:}'.format(self.mean)) + # temp = self.data - np.reshape(self.mean, (1, 1, 1, 3)) + # std_data = np.std(temp, axis=0) + # std_data = np.mean(np.mean(std_data, axis=0), axis=0) + # print ('Std : {:}'.format(std_data)) + def __repr__(self): + return "{name}({num} images, {classes} classes)".format( + name=self.__class__.__name__, + num=len(self.data), + classes=len(set(self.targets)), + ) - def __getitem__(self, index): - img, target = self.data[index], self.targets[index] - 1 + def __getitem__(self, index): + img, target = self.data[index], self.targets[index] - 1 - img = Image.fromarray(img) + img = Image.fromarray(img) - if self.transform is not None: - img = self.transform(img) + if self.transform is not None: + img = self.transform(img) - return img, target + return img, target - def __len__(self): - return len(self.data) + def __len__(self): + return len(self.data) + + def _check_integrity(self): + root = self.root + for fentry in self.train_list + self.valid_list: + filename, md5 = fentry[0], fentry[1] + fpath = os.path.join(root, filename) + if not check_integrity(fpath, md5): + return False + return True - def _check_integrity(self): - root = self.root - for fentry in (self.train_list + self.valid_list): - filename, md5 = fentry[0], fentry[1] - fpath = os.path.join(root, filename) - if not check_integrity(fpath, md5): - return False - return True """ if __name__ == '__main__': diff --git a/lib/datasets/LandmarkDataset.py b/lib/datasets/LandmarkDataset.py index c1a6df6..cfec357 100644 --- a/lib/datasets/LandmarkDataset.py +++ b/lib/datasets/LandmarkDataset.py @@ -20,172 +20,282 @@ import torch.utils.data as data class LandmarkDataset(data.Dataset): + def __init__( + self, + transform, + sigma, + downsample, + heatmap_type, + shape, + use_gray, + mean_file, + data_indicator, + cache_images=None, + ): - def __init__(self, transform, sigma, downsample, heatmap_type, shape, use_gray, mean_file, data_indicator, cache_images=None): - - self.transform = transform - self.sigma = sigma - self.downsample = downsample - self.heatmap_type = heatmap_type - self.dataset_name = data_indicator - self.shape = shape # [H,W] - self.use_gray = use_gray - assert transform is not None, 'transform : {:}'.format(transform) - self.mean_file = mean_file - if mean_file is None: - self.mean_data = None - warnings.warn('LandmarkDataset initialized with mean_data = None') - else: - assert osp.isfile(mean_file), '{:} is not a file.'.format(mean_file) - self.mean_data = torch.load(mean_file) - self.reset() - self.cutout = None - self.cache_images = cache_images - print ('The general dataset initialization done : {:}'.format(self)) - warnings.simplefilter( 'once' ) - - - def __repr__(self): - return ('{name}(point-num={NUM_PTS}, shape={shape}, sigma={sigma}, heatmap_type={heatmap_type}, length={length}, cutout={cutout}, dataset={dataset_name}, mean={mean_file})'.format(name=self.__class__.__name__, **self.__dict__)) - - - def set_cutout(self, length): - if length is not None and length >= 1: - self.cutout = CutOut( int(length) ) - else: self.cutout = None - - - def reset(self, num_pts=-1, boxid='default', only_pts=False): - self.NUM_PTS = num_pts - if only_pts: return - self.length = 0 - self.datas = [] - self.labels = [] - self.NormDistances = [] - self.BOXID = boxid - if self.mean_data is None: - self.mean_face = None - else: - self.mean_face = torch.Tensor(self.mean_data[boxid].copy().T) - assert (self.mean_face >= -1).all() and (self.mean_face <= 1).all(), 'mean-{:}-face : {:}'.format(boxid, self.mean_face) - #assert self.dataset_name is not None, 'The dataset name is None' - - - def __len__(self): - assert len(self.datas) == self.length, 'The length is not correct : {}'.format(self.length) - return self.length - - - def append(self, data, label, distance): - assert osp.isfile(data), 'The image path is not a file : {:}'.format(data) - self.datas.append( data ) ; self.labels.append( label ) - self.NormDistances.append( distance ) - self.length = self.length + 1 - - - def load_list(self, file_lists, num_pts, boxindicator, normalizeL, reset): - if reset: self.reset(num_pts, boxindicator) - else : assert self.NUM_PTS == num_pts and self.BOXID == boxindicator, 'The number of point is inconsistance : {:} vs {:}'.format(self.NUM_PTS, num_pts) - if isinstance(file_lists, str): file_lists = [file_lists] - samples = [] - for idx, file_path in enumerate(file_lists): - print (':::: load list {:}/{:} : {:}'.format(idx, len(file_lists), file_path)) - xdata = torch.load(file_path) - if isinstance(xdata, list) : data = xdata # image or video dataset list - elif isinstance(xdata, dict): data = xdata['datas'] # multi-view dataset list - else: raise ValueError('Invalid Type Error : {:}'.format( type(xdata) )) - samples = samples + data - # samples is a dict, where the key is the image-path and the value is the annotation - # each annotation is a dict, contains 'points' (3,num_pts), and various box - print ('GeneralDataset-V2 : {:} samples'.format(len(samples))) - - #for index, annotation in enumerate(samples): - for index in tqdm( range( len(samples) ) ): - annotation = samples[index] - image_path = annotation['current_frame'] - points, box = annotation['points'], annotation['box-{:}'.format(boxindicator)] - label = PointMeta2V(self.NUM_PTS, points, box, image_path, self.dataset_name) - if normalizeL is None: normDistance = None - else : normDistance = annotation['normalizeL-{:}'.format(normalizeL)] - self.append(image_path, label, normDistance) - - assert len(self.datas) == self.length, 'The length and the data is not right {} vs {}'.format(self.length, len(self.datas)) - assert len(self.labels) == self.length, 'The length and the labels is not right {} vs {}'.format(self.length, len(self.labels)) - assert len(self.NormDistances) == self.length, 'The length and the NormDistances is not right {} vs {}'.format(self.length, len(self.NormDistance)) - print ('Load data done for LandmarkDataset, which has {:} images.'.format(self.length)) - - - def __getitem__(self, index): - assert index >= 0 and index < self.length, 'Invalid index : {:}'.format(index) - if self.cache_images is not None and self.datas[index] in self.cache_images: - image = self.cache_images[ self.datas[index] ].clone() - else: - image = pil_loader(self.datas[index], self.use_gray) - target = self.labels[index].copy() - return self._process_(image, target, index) - - - def _process_(self, image, target, index): - - # transform the image and points - image, target, theta = self.transform(image, target) - (C, H, W), (height, width) = image.size(), self.shape - - # obtain the visiable indicator vector - if target.is_none(): nopoints = True - else : nopoints = False - if index == -1: __path = None - else : __path = self.datas[index] - if isinstance(theta, list) or isinstance(theta, tuple): - affineImage, heatmaps, mask, norm_trans_points, THETA, transpose_theta = [], [], [], [], [], [] - for _theta in theta: - _affineImage, _heatmaps, _mask, _norm_trans_points, _theta, _transpose_theta \ - = self.__process_affine(image, target, _theta, nopoints, 'P[{:}]@{:}'.format(index, __path)) - affineImage.append(_affineImage) - heatmaps.append(_heatmaps) - mask.append(_mask) - norm_trans_points.append(_norm_trans_points) - THETA.append(_theta) - transpose_theta.append(_transpose_theta) - affineImage, heatmaps, mask, norm_trans_points, THETA, transpose_theta = \ - torch.stack(affineImage), torch.stack(heatmaps), torch.stack(mask), torch.stack(norm_trans_points), torch.stack(THETA), torch.stack(transpose_theta) - else: - affineImage, heatmaps, mask, norm_trans_points, THETA, transpose_theta = self.__process_affine(image, target, theta, nopoints, 'S[{:}]@{:}'.format(index, __path)) - - torch_index = torch.IntTensor([index]) - torch_nopoints = torch.ByteTensor( [ nopoints ] ) - torch_shape = torch.IntTensor([H,W]) - - return affineImage, heatmaps, mask, norm_trans_points, THETA, transpose_theta, torch_index, torch_nopoints, torch_shape - - - def __process_affine(self, image, target, theta, nopoints, aux_info=None): - image, target, theta = image.clone(), target.copy(), theta.clone() - (C, H, W), (height, width) = image.size(), self.shape - if nopoints: # do not have label - norm_trans_points = torch.zeros((3, self.NUM_PTS)) - heatmaps = torch.zeros((self.NUM_PTS+1, height//self.downsample, width//self.downsample)) - mask = torch.ones((self.NUM_PTS+1, 1, 1), dtype=torch.uint8) - transpose_theta = identity2affine(False) - else: - norm_trans_points = apply_affine2point(target.get_points(), theta, (H,W)) - norm_trans_points = apply_boundary(norm_trans_points) - real_trans_points = norm_trans_points.clone() - real_trans_points[:2, :] = denormalize_points(self.shape, real_trans_points[:2,:]) - heatmaps, mask = generate_label_map(real_trans_points.numpy(), height//self.downsample, width//self.downsample, self.sigma, self.downsample, nopoints, self.heatmap_type) # H*W*C - heatmaps = torch.from_numpy(heatmaps.transpose((2, 0, 1))).type(torch.FloatTensor) - mask = torch.from_numpy(mask.transpose((2, 0, 1))).type(torch.ByteTensor) - if self.mean_face is None: - #warnings.warn('In LandmarkDataset use identity2affine for transpose_theta because self.mean_face is None.') - transpose_theta = identity2affine(False) - else: - if torch.sum(norm_trans_points[2,:] == 1) < 3: - warnings.warn('In LandmarkDataset after transformation, no visiable point, using identity instead. Aux: {:}'.format(aux_info)) - transpose_theta = identity2affine(False) + self.transform = transform + self.sigma = sigma + self.downsample = downsample + self.heatmap_type = heatmap_type + self.dataset_name = data_indicator + self.shape = shape # [H,W] + self.use_gray = use_gray + assert transform is not None, "transform : {:}".format(transform) + self.mean_file = mean_file + if mean_file is None: + self.mean_data = None + warnings.warn("LandmarkDataset initialized with mean_data = None") else: - transpose_theta = solve2theta(norm_trans_points, self.mean_face.clone()) + assert osp.isfile(mean_file), "{:} is not a file.".format(mean_file) + self.mean_data = torch.load(mean_file) + self.reset() + self.cutout = None + self.cache_images = cache_images + print("The general dataset initialization done : {:}".format(self)) + warnings.simplefilter("once") - affineImage = affine2image(image, theta, self.shape) - if self.cutout is not None: affineImage = self.cutout( affineImage ) + def __repr__(self): + return "{name}(point-num={NUM_PTS}, shape={shape}, sigma={sigma}, heatmap_type={heatmap_type}, length={length}, cutout={cutout}, dataset={dataset_name}, mean={mean_file})".format( + name=self.__class__.__name__, **self.__dict__ + ) - return affineImage, heatmaps, mask, norm_trans_points, theta, transpose_theta + def set_cutout(self, length): + if length is not None and length >= 1: + self.cutout = CutOut(int(length)) + else: + self.cutout = None + + def reset(self, num_pts=-1, boxid="default", only_pts=False): + self.NUM_PTS = num_pts + if only_pts: + return + self.length = 0 + self.datas = [] + self.labels = [] + self.NormDistances = [] + self.BOXID = boxid + if self.mean_data is None: + self.mean_face = None + else: + self.mean_face = torch.Tensor(self.mean_data[boxid].copy().T) + assert (self.mean_face >= -1).all() and ( + self.mean_face <= 1 + ).all(), "mean-{:}-face : {:}".format(boxid, self.mean_face) + # assert self.dataset_name is not None, 'The dataset name is None' + + def __len__(self): + assert len(self.datas) == self.length, "The length is not correct : {}".format( + self.length + ) + return self.length + + def append(self, data, label, distance): + assert osp.isfile(data), "The image path is not a file : {:}".format(data) + self.datas.append(data) + self.labels.append(label) + self.NormDistances.append(distance) + self.length = self.length + 1 + + def load_list(self, file_lists, num_pts, boxindicator, normalizeL, reset): + if reset: + self.reset(num_pts, boxindicator) + else: + assert ( + self.NUM_PTS == num_pts and self.BOXID == boxindicator + ), "The number of point is inconsistance : {:} vs {:}".format( + self.NUM_PTS, num_pts + ) + if isinstance(file_lists, str): + file_lists = [file_lists] + samples = [] + for idx, file_path in enumerate(file_lists): + print( + ":::: load list {:}/{:} : {:}".format(idx, len(file_lists), file_path) + ) + xdata = torch.load(file_path) + if isinstance(xdata, list): + data = xdata # image or video dataset list + elif isinstance(xdata, dict): + data = xdata["datas"] # multi-view dataset list + else: + raise ValueError("Invalid Type Error : {:}".format(type(xdata))) + samples = samples + data + # samples is a dict, where the key is the image-path and the value is the annotation + # each annotation is a dict, contains 'points' (3,num_pts), and various box + print("GeneralDataset-V2 : {:} samples".format(len(samples))) + + # for index, annotation in enumerate(samples): + for index in tqdm(range(len(samples))): + annotation = samples[index] + image_path = annotation["current_frame"] + points, box = ( + annotation["points"], + annotation["box-{:}".format(boxindicator)], + ) + label = PointMeta2V( + self.NUM_PTS, points, box, image_path, self.dataset_name + ) + if normalizeL is None: + normDistance = None + else: + normDistance = annotation["normalizeL-{:}".format(normalizeL)] + self.append(image_path, label, normDistance) + + assert ( + len(self.datas) == self.length + ), "The length and the data is not right {} vs {}".format( + self.length, len(self.datas) + ) + assert ( + len(self.labels) == self.length + ), "The length and the labels is not right {} vs {}".format( + self.length, len(self.labels) + ) + assert ( + len(self.NormDistances) == self.length + ), "The length and the NormDistances is not right {} vs {}".format( + self.length, len(self.NormDistance) + ) + print( + "Load data done for LandmarkDataset, which has {:} images.".format( + self.length + ) + ) + + def __getitem__(self, index): + assert index >= 0 and index < self.length, "Invalid index : {:}".format(index) + if self.cache_images is not None and self.datas[index] in self.cache_images: + image = self.cache_images[self.datas[index]].clone() + else: + image = pil_loader(self.datas[index], self.use_gray) + target = self.labels[index].copy() + return self._process_(image, target, index) + + def _process_(self, image, target, index): + + # transform the image and points + image, target, theta = self.transform(image, target) + (C, H, W), (height, width) = image.size(), self.shape + + # obtain the visiable indicator vector + if target.is_none(): + nopoints = True + else: + nopoints = False + if index == -1: + __path = None + else: + __path = self.datas[index] + if isinstance(theta, list) or isinstance(theta, tuple): + affineImage, heatmaps, mask, norm_trans_points, THETA, transpose_theta = ( + [], + [], + [], + [], + [], + [], + ) + for _theta in theta: + ( + _affineImage, + _heatmaps, + _mask, + _norm_trans_points, + _theta, + _transpose_theta, + ) = self.__process_affine( + image, target, _theta, nopoints, "P[{:}]@{:}".format(index, __path) + ) + affineImage.append(_affineImage) + heatmaps.append(_heatmaps) + mask.append(_mask) + norm_trans_points.append(_norm_trans_points) + THETA.append(_theta) + transpose_theta.append(_transpose_theta) + affineImage, heatmaps, mask, norm_trans_points, THETA, transpose_theta = ( + torch.stack(affineImage), + torch.stack(heatmaps), + torch.stack(mask), + torch.stack(norm_trans_points), + torch.stack(THETA), + torch.stack(transpose_theta), + ) + else: + ( + affineImage, + heatmaps, + mask, + norm_trans_points, + THETA, + transpose_theta, + ) = self.__process_affine( + image, target, theta, nopoints, "S[{:}]@{:}".format(index, __path) + ) + + torch_index = torch.IntTensor([index]) + torch_nopoints = torch.ByteTensor([nopoints]) + torch_shape = torch.IntTensor([H, W]) + + return ( + affineImage, + heatmaps, + mask, + norm_trans_points, + THETA, + transpose_theta, + torch_index, + torch_nopoints, + torch_shape, + ) + + def __process_affine(self, image, target, theta, nopoints, aux_info=None): + image, target, theta = image.clone(), target.copy(), theta.clone() + (C, H, W), (height, width) = image.size(), self.shape + if nopoints: # do not have label + norm_trans_points = torch.zeros((3, self.NUM_PTS)) + heatmaps = torch.zeros( + (self.NUM_PTS + 1, height // self.downsample, width // self.downsample) + ) + mask = torch.ones((self.NUM_PTS + 1, 1, 1), dtype=torch.uint8) + transpose_theta = identity2affine(False) + else: + norm_trans_points = apply_affine2point(target.get_points(), theta, (H, W)) + norm_trans_points = apply_boundary(norm_trans_points) + real_trans_points = norm_trans_points.clone() + real_trans_points[:2, :] = denormalize_points( + self.shape, real_trans_points[:2, :] + ) + heatmaps, mask = generate_label_map( + real_trans_points.numpy(), + height // self.downsample, + width // self.downsample, + self.sigma, + self.downsample, + nopoints, + self.heatmap_type, + ) # H*W*C + heatmaps = torch.from_numpy(heatmaps.transpose((2, 0, 1))).type( + torch.FloatTensor + ) + mask = torch.from_numpy(mask.transpose((2, 0, 1))).type(torch.ByteTensor) + if self.mean_face is None: + # warnings.warn('In LandmarkDataset use identity2affine for transpose_theta because self.mean_face is None.') + transpose_theta = identity2affine(False) + else: + if torch.sum(norm_trans_points[2, :] == 1) < 3: + warnings.warn( + "In LandmarkDataset after transformation, no visiable point, using identity instead. Aux: {:}".format( + aux_info + ) + ) + transpose_theta = identity2affine(False) + else: + transpose_theta = solve2theta( + norm_trans_points, self.mean_face.clone() + ) + + affineImage = affine2image(image, theta, self.shape) + if self.cutout is not None: + affineImage = self.cutout(affineImage) + + return affineImage, heatmaps, mask, norm_trans_points, theta, transpose_theta diff --git a/lib/datasets/SearchDatasetWrap.py b/lib/datasets/SearchDatasetWrap.py index 06c5191..5f5c761 100644 --- a/lib/datasets/SearchDatasetWrap.py +++ b/lib/datasets/SearchDatasetWrap.py @@ -6,41 +6,49 @@ import torch.utils.data as data class SearchDataset(data.Dataset): + def __init__(self, name, data, train_split, valid_split, check=True): + self.datasetname = name + if isinstance(data, (list, tuple)): # new type of SearchDataset + assert len(data) == 2, "invalid length: {:}".format(len(data)) + self.train_data = data[0] + self.valid_data = data[1] + self.train_split = train_split.copy() + self.valid_split = valid_split.copy() + self.mode_str = "V2" # new mode + else: + self.mode_str = "V1" # old mode + self.data = data + self.train_split = train_split.copy() + self.valid_split = valid_split.copy() + if check: + intersection = set(train_split).intersection(set(valid_split)) + assert ( + len(intersection) == 0 + ), "the splitted train and validation sets should have no intersection" + self.length = len(self.train_split) - def __init__(self, name, data, train_split, valid_split, check=True): - self.datasetname = name - if isinstance(data, (list, tuple)): # new type of SearchDataset - assert len(data) == 2, 'invalid length: {:}'.format( len(data) ) - self.train_data = data[0] - self.valid_data = data[1] - self.train_split = train_split.copy() - self.valid_split = valid_split.copy() - self.mode_str = 'V2' # new mode - else: - self.mode_str = 'V1' # old mode - self.data = data - self.train_split = train_split.copy() - self.valid_split = valid_split.copy() - if check: - intersection = set(train_split).intersection(set(valid_split)) - assert len(intersection) == 0, 'the splitted train and validation sets should have no intersection' - self.length = len(self.train_split) + def __repr__(self): + return "{name}(name={datasetname}, train={tr_L}, valid={val_L}, version={ver})".format( + name=self.__class__.__name__, + datasetname=self.datasetname, + tr_L=len(self.train_split), + val_L=len(self.valid_split), + ver=self.mode_str, + ) - def __repr__(self): - return ('{name}(name={datasetname}, train={tr_L}, valid={val_L}, version={ver})'.format(name=self.__class__.__name__, datasetname=self.datasetname, tr_L=len(self.train_split), val_L=len(self.valid_split), ver=self.mode_str)) + def __len__(self): + return self.length - def __len__(self): - return self.length - - def __getitem__(self, index): - assert index >= 0 and index < self.length, 'invalid index = {:}'.format(index) - train_index = self.train_split[index] - valid_index = random.choice( self.valid_split ) - if self.mode_str == 'V1': - train_image, train_label = self.data[train_index] - valid_image, valid_label = self.data[valid_index] - elif self.mode_str == 'V2': - train_image, train_label = self.train_data[train_index] - valid_image, valid_label = self.valid_data[valid_index] - else: raise ValueError('invalid mode : {:}'.format(self.mode_str)) - return train_image, train_label, valid_image, valid_label + def __getitem__(self, index): + assert index >= 0 and index < self.length, "invalid index = {:}".format(index) + train_index = self.train_split[index] + valid_index = random.choice(self.valid_split) + if self.mode_str == "V1": + train_image, train_label = self.data[train_index] + valid_image, valid_label = self.data[valid_index] + elif self.mode_str == "V2": + train_image, train_label = self.train_data[train_index] + valid_image, valid_label = self.valid_data[valid_index] + else: + raise ValueError("invalid mode : {:}".format(self.mode_str)) + return train_image, train_label, valid_image, valid_label diff --git a/lib/datasets/__init__.py b/lib/datasets/__init__.py index 5750ebd..8893bf1 100644 --- a/lib/datasets/__init__.py +++ b/lib/datasets/__init__.py @@ -4,4 +4,5 @@ from .get_dataset_with_transform import get_datasets, get_nas_search_loaders from .SearchDatasetWrap import SearchDataset +from .synthetic_adaptive_environment import QuadraticFunction from .synthetic_adaptive_environment import SynAdaptiveEnv diff --git a/lib/datasets/get_dataset_with_transform.py b/lib/datasets/get_dataset_with_transform.py index 7a79867..9afe5da 100644 --- a/lib/datasets/get_dataset_with_transform.py +++ b/lib/datasets/get_dataset_with_transform.py @@ -14,214 +14,349 @@ from .SearchDatasetWrap import SearchDataset from config_utils import load_config -Dataset2Class = {'cifar10' : 10, - 'cifar100': 100, - 'imagenet-1k-s':1000, - 'imagenet-1k' : 1000, - 'ImageNet16' : 1000, - 'ImageNet16-150': 150, - 'ImageNet16-120': 120, - 'ImageNet16-200': 200} +Dataset2Class = { + "cifar10": 10, + "cifar100": 100, + "imagenet-1k-s": 1000, + "imagenet-1k": 1000, + "ImageNet16": 1000, + "ImageNet16-150": 150, + "ImageNet16-120": 120, + "ImageNet16-200": 200, +} class CUTOUT(object): + def __init__(self, length): + self.length = length - def __init__(self, length): - self.length = length + def __repr__(self): + return "{name}(length={length})".format( + name=self.__class__.__name__, **self.__dict__ + ) - def __repr__(self): - return ('{name}(length={length})'.format(name=self.__class__.__name__, **self.__dict__)) + def __call__(self, img): + h, w = img.size(1), img.size(2) + mask = np.ones((h, w), np.float32) + y = np.random.randint(h) + x = np.random.randint(w) - def __call__(self, img): - h, w = img.size(1), img.size(2) - mask = np.ones((h, w), np.float32) - y = np.random.randint(h) - x = np.random.randint(w) + y1 = np.clip(y - self.length // 2, 0, h) + y2 = np.clip(y + self.length // 2, 0, h) + x1 = np.clip(x - self.length // 2, 0, w) + x2 = np.clip(x + self.length // 2, 0, w) - y1 = np.clip(y - self.length // 2, 0, h) - y2 = np.clip(y + self.length // 2, 0, h) - x1 = np.clip(x - self.length // 2, 0, w) - x2 = np.clip(x + self.length // 2, 0, w) - - mask[y1: y2, x1: x2] = 0. - mask = torch.from_numpy(mask) - mask = mask.expand_as(img) - img *= mask - return img + mask[y1:y2, x1:x2] = 0.0 + mask = torch.from_numpy(mask) + mask = mask.expand_as(img) + img *= mask + return img imagenet_pca = { - 'eigval': np.asarray([0.2175, 0.0188, 0.0045]), - 'eigvec': np.asarray([ - [-0.5675, 0.7192, 0.4009], - [-0.5808, -0.0045, -0.8140], - [-0.5836, -0.6948, 0.4203], - ]) + "eigval": np.asarray([0.2175, 0.0188, 0.0045]), + "eigvec": np.asarray( + [ + [-0.5675, 0.7192, 0.4009], + [-0.5808, -0.0045, -0.8140], + [-0.5836, -0.6948, 0.4203], + ] + ), } class Lighting(object): - def __init__(self, alphastd, - eigval=imagenet_pca['eigval'], - eigvec=imagenet_pca['eigvec']): - self.alphastd = alphastd - assert eigval.shape == (3,) - assert eigvec.shape == (3, 3) - self.eigval = eigval - self.eigvec = eigvec + def __init__( + self, alphastd, eigval=imagenet_pca["eigval"], eigvec=imagenet_pca["eigvec"] + ): + self.alphastd = alphastd + assert eigval.shape == (3,) + assert eigvec.shape == (3, 3) + self.eigval = eigval + self.eigvec = eigvec - def __call__(self, img): - if self.alphastd == 0.: - return img - rnd = np.random.randn(3) * self.alphastd - rnd = rnd.astype('float32') - v = rnd - old_dtype = np.asarray(img).dtype - v = v * self.eigval - v = v.reshape((3, 1)) - inc = np.dot(self.eigvec, v).reshape((3,)) - img = np.add(img, inc) - if old_dtype == np.uint8: - img = np.clip(img, 0, 255) - img = Image.fromarray(img.astype(old_dtype), 'RGB') - return img + def __call__(self, img): + if self.alphastd == 0.0: + return img + rnd = np.random.randn(3) * self.alphastd + rnd = rnd.astype("float32") + v = rnd + old_dtype = np.asarray(img).dtype + v = v * self.eigval + v = v.reshape((3, 1)) + inc = np.dot(self.eigvec, v).reshape((3,)) + img = np.add(img, inc) + if old_dtype == np.uint8: + img = np.clip(img, 0, 255) + img = Image.fromarray(img.astype(old_dtype), "RGB") + return img - def __repr__(self): - return self.__class__.__name__ + '()' + def __repr__(self): + return self.__class__.__name__ + "()" def get_datasets(name, root, cutout): - if name == 'cifar10': - mean = [x / 255 for x in [125.3, 123.0, 113.9]] - std = [x / 255 for x in [63.0, 62.1, 66.7]] - elif name == 'cifar100': - mean = [x / 255 for x in [129.3, 124.1, 112.4]] - std = [x / 255 for x in [68.2, 65.4, 70.4]] - elif name.startswith('imagenet-1k'): - mean, std = [0.485, 0.456, 0.406], [0.229, 0.224, 0.225] - elif name.startswith('ImageNet16'): - mean = [x / 255 for x in [122.68, 116.66, 104.01]] - std = [x / 255 for x in [63.22, 61.26 , 65.09]] - else: - raise TypeError("Unknow dataset : {:}".format(name)) + if name == "cifar10": + mean = [x / 255 for x in [125.3, 123.0, 113.9]] + std = [x / 255 for x in [63.0, 62.1, 66.7]] + elif name == "cifar100": + mean = [x / 255 for x in [129.3, 124.1, 112.4]] + std = [x / 255 for x in [68.2, 65.4, 70.4]] + elif name.startswith("imagenet-1k"): + mean, std = [0.485, 0.456, 0.406], [0.229, 0.224, 0.225] + elif name.startswith("ImageNet16"): + mean = [x / 255 for x in [122.68, 116.66, 104.01]] + std = [x / 255 for x in [63.22, 61.26, 65.09]] + else: + raise TypeError("Unknow dataset : {:}".format(name)) - # Data Argumentation - if name == 'cifar10' or name == 'cifar100': - lists = [transforms.RandomHorizontalFlip(), transforms.RandomCrop(32, padding=4), transforms.ToTensor(), transforms.Normalize(mean, std)] - if cutout > 0 : lists += [CUTOUT(cutout)] - train_transform = transforms.Compose(lists) - test_transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize(mean, std)]) - xshape = (1, 3, 32, 32) - elif name.startswith('ImageNet16'): - lists = [transforms.RandomHorizontalFlip(), transforms.RandomCrop(16, padding=2), transforms.ToTensor(), transforms.Normalize(mean, std)] - if cutout > 0 : lists += [CUTOUT(cutout)] - train_transform = transforms.Compose(lists) - test_transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize(mean, std)]) - xshape = (1, 3, 16, 16) - elif name == 'tiered': - lists = [transforms.RandomHorizontalFlip(), transforms.RandomCrop(80, padding=4), transforms.ToTensor(), transforms.Normalize(mean, std)] - if cutout > 0 : lists += [CUTOUT(cutout)] - train_transform = transforms.Compose(lists) - test_transform = transforms.Compose([transforms.CenterCrop(80), transforms.ToTensor(), transforms.Normalize(mean, std)]) - xshape = (1, 3, 32, 32) - elif name.startswith('imagenet-1k'): - normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) - if name == 'imagenet-1k': - xlists = [transforms.RandomResizedCrop(224)] - xlists.append( - transforms.ColorJitter( - brightness=0.4, - contrast=0.4, - saturation=0.4, - hue=0.2)) - xlists.append( Lighting(0.1)) - elif name == 'imagenet-1k-s': - xlists = [transforms.RandomResizedCrop(224, scale=(0.2, 1.0))] - else: raise ValueError('invalid name : {:}'.format(name)) - xlists.append( transforms.RandomHorizontalFlip(p=0.5) ) - xlists.append( transforms.ToTensor() ) - xlists.append( normalize ) - train_transform = transforms.Compose(xlists) - test_transform = transforms.Compose([transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize]) - xshape = (1, 3, 224, 224) - else: - raise TypeError("Unknow dataset : {:}".format(name)) + # Data Argumentation + if name == "cifar10" or name == "cifar100": + lists = [ + transforms.RandomHorizontalFlip(), + transforms.RandomCrop(32, padding=4), + transforms.ToTensor(), + transforms.Normalize(mean, std), + ] + if cutout > 0: + lists += [CUTOUT(cutout)] + train_transform = transforms.Compose(lists) + test_transform = transforms.Compose( + [transforms.ToTensor(), transforms.Normalize(mean, std)] + ) + xshape = (1, 3, 32, 32) + elif name.startswith("ImageNet16"): + lists = [ + transforms.RandomHorizontalFlip(), + transforms.RandomCrop(16, padding=2), + transforms.ToTensor(), + transforms.Normalize(mean, std), + ] + if cutout > 0: + lists += [CUTOUT(cutout)] + train_transform = transforms.Compose(lists) + test_transform = transforms.Compose( + [transforms.ToTensor(), transforms.Normalize(mean, std)] + ) + xshape = (1, 3, 16, 16) + elif name == "tiered": + lists = [ + transforms.RandomHorizontalFlip(), + transforms.RandomCrop(80, padding=4), + transforms.ToTensor(), + transforms.Normalize(mean, std), + ] + if cutout > 0: + lists += [CUTOUT(cutout)] + train_transform = transforms.Compose(lists) + test_transform = transforms.Compose( + [ + transforms.CenterCrop(80), + transforms.ToTensor(), + transforms.Normalize(mean, std), + ] + ) + xshape = (1, 3, 32, 32) + elif name.startswith("imagenet-1k"): + normalize = transforms.Normalize( + mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225] + ) + if name == "imagenet-1k": + xlists = [transforms.RandomResizedCrop(224)] + xlists.append( + transforms.ColorJitter( + brightness=0.4, contrast=0.4, saturation=0.4, hue=0.2 + ) + ) + xlists.append(Lighting(0.1)) + elif name == "imagenet-1k-s": + xlists = [transforms.RandomResizedCrop(224, scale=(0.2, 1.0))] + else: + raise ValueError("invalid name : {:}".format(name)) + xlists.append(transforms.RandomHorizontalFlip(p=0.5)) + xlists.append(transforms.ToTensor()) + xlists.append(normalize) + train_transform = transforms.Compose(xlists) + test_transform = transforms.Compose( + [ + transforms.Resize(256), + transforms.CenterCrop(224), + transforms.ToTensor(), + normalize, + ] + ) + xshape = (1, 3, 224, 224) + else: + raise TypeError("Unknow dataset : {:}".format(name)) - if name == 'cifar10': - train_data = dset.CIFAR10 (root, train=True , transform=train_transform, download=True) - test_data = dset.CIFAR10 (root, train=False, transform=test_transform , download=True) - assert len(train_data) == 50000 and len(test_data) == 10000 - elif name == 'cifar100': - train_data = dset.CIFAR100(root, train=True , transform=train_transform, download=True) - test_data = dset.CIFAR100(root, train=False, transform=test_transform , download=True) - assert len(train_data) == 50000 and len(test_data) == 10000 - elif name.startswith('imagenet-1k'): - train_data = dset.ImageFolder(osp.join(root, 'train'), train_transform) - test_data = dset.ImageFolder(osp.join(root, 'val'), test_transform) - assert len(train_data) == 1281167 and len(test_data) == 50000, 'invalid number of images : {:} & {:} vs {:} & {:}'.format(len(train_data), len(test_data), 1281167, 50000) - elif name == 'ImageNet16': - train_data = ImageNet16(root, True , train_transform) - test_data = ImageNet16(root, False, test_transform) - assert len(train_data) == 1281167 and len(test_data) == 50000 - elif name == 'ImageNet16-120': - train_data = ImageNet16(root, True , train_transform, 120) - test_data = ImageNet16(root, False, test_transform , 120) - assert len(train_data) == 151700 and len(test_data) == 6000 - elif name == 'ImageNet16-150': - train_data = ImageNet16(root, True , train_transform, 150) - test_data = ImageNet16(root, False, test_transform , 150) - assert len(train_data) == 190272 and len(test_data) == 7500 - elif name == 'ImageNet16-200': - train_data = ImageNet16(root, True , train_transform, 200) - test_data = ImageNet16(root, False, test_transform , 200) - assert len(train_data) == 254775 and len(test_data) == 10000 - else: raise TypeError("Unknow dataset : {:}".format(name)) - - class_num = Dataset2Class[name] - return train_data, test_data, xshape, class_num + if name == "cifar10": + train_data = dset.CIFAR10( + root, train=True, transform=train_transform, download=True + ) + test_data = dset.CIFAR10( + root, train=False, transform=test_transform, download=True + ) + assert len(train_data) == 50000 and len(test_data) == 10000 + elif name == "cifar100": + train_data = dset.CIFAR100( + root, train=True, transform=train_transform, download=True + ) + test_data = dset.CIFAR100( + root, train=False, transform=test_transform, download=True + ) + assert len(train_data) == 50000 and len(test_data) == 10000 + elif name.startswith("imagenet-1k"): + train_data = dset.ImageFolder(osp.join(root, "train"), train_transform) + test_data = dset.ImageFolder(osp.join(root, "val"), test_transform) + assert ( + len(train_data) == 1281167 and len(test_data) == 50000 + ), "invalid number of images : {:} & {:} vs {:} & {:}".format( + len(train_data), len(test_data), 1281167, 50000 + ) + elif name == "ImageNet16": + train_data = ImageNet16(root, True, train_transform) + test_data = ImageNet16(root, False, test_transform) + assert len(train_data) == 1281167 and len(test_data) == 50000 + elif name == "ImageNet16-120": + train_data = ImageNet16(root, True, train_transform, 120) + test_data = ImageNet16(root, False, test_transform, 120) + assert len(train_data) == 151700 and len(test_data) == 6000 + elif name == "ImageNet16-150": + train_data = ImageNet16(root, True, train_transform, 150) + test_data = ImageNet16(root, False, test_transform, 150) + assert len(train_data) == 190272 and len(test_data) == 7500 + elif name == "ImageNet16-200": + train_data = ImageNet16(root, True, train_transform, 200) + test_data = ImageNet16(root, False, test_transform, 200) + assert len(train_data) == 254775 and len(test_data) == 10000 + else: + raise TypeError("Unknow dataset : {:}".format(name)) + + class_num = Dataset2Class[name] + return train_data, test_data, xshape, class_num -def get_nas_search_loaders(train_data, valid_data, dataset, config_root, batch_size, workers): - if isinstance(batch_size, (list,tuple)): - batch, test_batch = batch_size - else: - batch, test_batch = batch_size, batch_size - if dataset == 'cifar10': - #split_Fpath = 'configs/nas-benchmark/cifar-split.txt' - cifar_split = load_config('{:}/cifar-split.txt'.format(config_root), None, None) - train_split, valid_split = cifar_split.train, cifar_split.valid # search over the proposed training and validation set - #logger.log('Load split file from {:}'.format(split_Fpath)) # they are two disjoint groups in the original CIFAR-10 training set - # To split data - xvalid_data = deepcopy(train_data) - if hasattr(xvalid_data, 'transforms'): # to avoid a print issue - xvalid_data.transforms = valid_data.transform - xvalid_data.transform = deepcopy( valid_data.transform ) - search_data = SearchDataset(dataset, train_data, train_split, valid_split) - # data loader - search_loader = torch.utils.data.DataLoader(search_data, batch_size=batch, shuffle=True , num_workers=workers, pin_memory=True) - train_loader = torch.utils.data.DataLoader(train_data , batch_size=batch, sampler=torch.utils.data.sampler.SubsetRandomSampler(train_split), num_workers=workers, pin_memory=True) - valid_loader = torch.utils.data.DataLoader(xvalid_data, batch_size=test_batch, sampler=torch.utils.data.sampler.SubsetRandomSampler(valid_split), num_workers=workers, pin_memory=True) - elif dataset == 'cifar100': - cifar100_test_split = load_config('{:}/cifar100-test-split.txt'.format(config_root), None, None) - search_train_data = train_data - search_valid_data = deepcopy(valid_data) ; search_valid_data.transform = train_data.transform - search_data = SearchDataset(dataset, [search_train_data,search_valid_data], list(range(len(search_train_data))), cifar100_test_split.xvalid) - search_loader = torch.utils.data.DataLoader(search_data, batch_size=batch, shuffle=True , num_workers=workers, pin_memory=True) - train_loader = torch.utils.data.DataLoader(train_data , batch_size=batch, shuffle=True , num_workers=workers, pin_memory=True) - valid_loader = torch.utils.data.DataLoader(valid_data , batch_size=test_batch, sampler=torch.utils.data.sampler.SubsetRandomSampler(cifar100_test_split.xvalid), num_workers=workers, pin_memory=True) - elif dataset == 'ImageNet16-120': - imagenet_test_split = load_config('{:}/imagenet-16-120-test-split.txt'.format(config_root), None, None) - search_train_data = train_data - search_valid_data = deepcopy(valid_data) ; search_valid_data.transform = train_data.transform - search_data = SearchDataset(dataset, [search_train_data,search_valid_data], list(range(len(search_train_data))), imagenet_test_split.xvalid) - search_loader = torch.utils.data.DataLoader(search_data, batch_size=batch, shuffle=True , num_workers=workers, pin_memory=True) - train_loader = torch.utils.data.DataLoader(train_data , batch_size=batch, shuffle=True , num_workers=workers, pin_memory=True) - valid_loader = torch.utils.data.DataLoader(valid_data , batch_size=test_batch, sampler=torch.utils.data.sampler.SubsetRandomSampler(imagenet_test_split.xvalid), num_workers=workers, pin_memory=True) - else: - raise ValueError('invalid dataset : {:}'.format(dataset)) - return search_loader, train_loader, valid_loader +def get_nas_search_loaders( + train_data, valid_data, dataset, config_root, batch_size, workers +): + if isinstance(batch_size, (list, tuple)): + batch, test_batch = batch_size + else: + batch, test_batch = batch_size, batch_size + if dataset == "cifar10": + # split_Fpath = 'configs/nas-benchmark/cifar-split.txt' + cifar_split = load_config("{:}/cifar-split.txt".format(config_root), None, None) + train_split, valid_split = ( + cifar_split.train, + cifar_split.valid, + ) # search over the proposed training and validation set + # logger.log('Load split file from {:}'.format(split_Fpath)) # they are two disjoint groups in the original CIFAR-10 training set + # To split data + xvalid_data = deepcopy(train_data) + if hasattr(xvalid_data, "transforms"): # to avoid a print issue + xvalid_data.transforms = valid_data.transform + xvalid_data.transform = deepcopy(valid_data.transform) + search_data = SearchDataset(dataset, train_data, train_split, valid_split) + # data loader + search_loader = torch.utils.data.DataLoader( + search_data, + batch_size=batch, + shuffle=True, + num_workers=workers, + pin_memory=True, + ) + train_loader = torch.utils.data.DataLoader( + train_data, + batch_size=batch, + sampler=torch.utils.data.sampler.SubsetRandomSampler(train_split), + num_workers=workers, + pin_memory=True, + ) + valid_loader = torch.utils.data.DataLoader( + xvalid_data, + batch_size=test_batch, + sampler=torch.utils.data.sampler.SubsetRandomSampler(valid_split), + num_workers=workers, + pin_memory=True, + ) + elif dataset == "cifar100": + cifar100_test_split = load_config( + "{:}/cifar100-test-split.txt".format(config_root), None, None + ) + search_train_data = train_data + search_valid_data = deepcopy(valid_data) + search_valid_data.transform = train_data.transform + search_data = SearchDataset( + dataset, + [search_train_data, search_valid_data], + list(range(len(search_train_data))), + cifar100_test_split.xvalid, + ) + search_loader = torch.utils.data.DataLoader( + search_data, + batch_size=batch, + shuffle=True, + num_workers=workers, + pin_memory=True, + ) + train_loader = torch.utils.data.DataLoader( + train_data, + batch_size=batch, + shuffle=True, + num_workers=workers, + pin_memory=True, + ) + valid_loader = torch.utils.data.DataLoader( + valid_data, + batch_size=test_batch, + sampler=torch.utils.data.sampler.SubsetRandomSampler( + cifar100_test_split.xvalid + ), + num_workers=workers, + pin_memory=True, + ) + elif dataset == "ImageNet16-120": + imagenet_test_split = load_config( + "{:}/imagenet-16-120-test-split.txt".format(config_root), None, None + ) + search_train_data = train_data + search_valid_data = deepcopy(valid_data) + search_valid_data.transform = train_data.transform + search_data = SearchDataset( + dataset, + [search_train_data, search_valid_data], + list(range(len(search_train_data))), + imagenet_test_split.xvalid, + ) + search_loader = torch.utils.data.DataLoader( + search_data, + batch_size=batch, + shuffle=True, + num_workers=workers, + pin_memory=True, + ) + train_loader = torch.utils.data.DataLoader( + train_data, + batch_size=batch, + shuffle=True, + num_workers=workers, + pin_memory=True, + ) + valid_loader = torch.utils.data.DataLoader( + valid_data, + batch_size=test_batch, + sampler=torch.utils.data.sampler.SubsetRandomSampler( + imagenet_test_split.xvalid + ), + num_workers=workers, + pin_memory=True, + ) + else: + raise ValueError("invalid dataset : {:}".format(dataset)) + return search_loader, train_loader, valid_loader -#if __name__ == '__main__': + +# if __name__ == '__main__': # train_data, test_data, xshape, class_num = dataset = get_datasets('cifar10', '/data02/dongxuanyi/.torch/cifar.python/', -1) # import pdb; pdb.set_trace() diff --git a/lib/datasets/landmark_utils/point_meta.py b/lib/datasets/landmark_utils/point_meta.py index 2091970..842110c 100644 --- a/lib/datasets/landmark_utils/point_meta.py +++ b/lib/datasets/landmark_utils/point_meta.py @@ -9,108 +9,211 @@ from xvision import normalize_points from xvision import denormalize_points -class PointMeta(): - # points : 3 x num_pts (x, y, oculusion) - # image_size: original [width, height] - def __init__(self, num_point, points, box, image_path, dataset_name): +class PointMeta: + # points : 3 x num_pts (x, y, oculusion) + # image_size: original [width, height] + def __init__(self, num_point, points, box, image_path, dataset_name): - self.num_point = num_point - if box is not None: - assert (isinstance(box, tuple) or isinstance(box, list)) and len(box) == 4 - self.box = torch.Tensor(box) - else: self.box = None - if points is None: - self.points = points - else: - assert len(points.shape) == 2 and points.shape[0] == 3 and points.shape[1] == self.num_point, 'The shape of point is not right : {}'.format( points ) - self.points = torch.Tensor(points.copy()) - self.image_path = image_path - self.datasets = dataset_name + self.num_point = num_point + if box is not None: + assert (isinstance(box, tuple) or isinstance(box, list)) and len(box) == 4 + self.box = torch.Tensor(box) + else: + self.box = None + if points is None: + self.points = points + else: + assert ( + len(points.shape) == 2 + and points.shape[0] == 3 + and points.shape[1] == self.num_point + ), "The shape of point is not right : {}".format(points) + self.points = torch.Tensor(points.copy()) + self.image_path = image_path + self.datasets = dataset_name - def __repr__(self): - if self.box is None: boxstr = 'None' - else : boxstr = 'box=[{:.1f}, {:.1f}, {:.1f}, {:.1f}]'.format(*self.box.tolist()) - return ('{name}(points={num_point}, '.format(name=self.__class__.__name__, **self.__dict__) + boxstr + ')') + def __repr__(self): + if self.box is None: + boxstr = "None" + else: + boxstr = "box=[{:.1f}, {:.1f}, {:.1f}, {:.1f}]".format(*self.box.tolist()) + return ( + "{name}(points={num_point}, ".format( + name=self.__class__.__name__, **self.__dict__ + ) + + boxstr + + ")" + ) - def get_box(self, return_diagonal=False): - if self.box is None: return None - if not return_diagonal: - return self.box.clone() - else: - W = (self.box[2]-self.box[0]).item() - H = (self.box[3]-self.box[1]).item() - return math.sqrt(H*H+W*W) + def get_box(self, return_diagonal=False): + if self.box is None: + return None + if not return_diagonal: + return self.box.clone() + else: + W = (self.box[2] - self.box[0]).item() + H = (self.box[3] - self.box[1]).item() + return math.sqrt(H * H + W * W) - def get_points(self, ignore_indicator=False): - if ignore_indicator: last = 2 - else : last = 3 - if self.points is not None: return self.points.clone()[:last, :] - else : return torch.zeros((last, self.num_point)) + def get_points(self, ignore_indicator=False): + if ignore_indicator: + last = 2 + else: + last = 3 + if self.points is not None: + return self.points.clone()[:last, :] + else: + return torch.zeros((last, self.num_point)) - def is_none(self): - #assert self.box is not None, 'The box should not be None' - return self.points is None - #if self.box is None: return True - #else : return self.points is None + def is_none(self): + # assert self.box is not None, 'The box should not be None' + return self.points is None + # if self.box is None: return True + # else : return self.points is None - def copy(self): - return copy.deepcopy(self) + def copy(self): + return copy.deepcopy(self) - def visiable_pts_num(self): - with torch.no_grad(): - ans = self.points[2,:] > 0 - ans = torch.sum(ans) - ans = ans.item() - return ans - - def special_fun(self, indicator): - if indicator == '68to49': # For 300W or 300VW, convert the default 68 points to 49 points. - assert self.num_point == 68, 'num-point must be 68 vs. {:}'.format(self.num_point) - self.num_point = 49 - out = torch.ones((68), dtype=torch.uint8) - out[[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,60,64]] = 0 - if self.points is not None: self.points = self.points.clone()[:, out] - else: - raise ValueError('Invalid indicator : {:}'.format( indicator )) + def visiable_pts_num(self): + with torch.no_grad(): + ans = self.points[2, :] > 0 + ans = torch.sum(ans) + ans = ans.item() + return ans - def apply_horizontal_flip(self): - #self.points[0, :] = width - self.points[0, :] - 1 - # Mugsy spefic or Synthetic - if self.datasets.startswith('HandsyROT'): - ori = np.array(list(range(0, 42))) - pos = np.array(list(range(21,42)) + list(range(0,21))) - self.points[:, pos] = self.points[:, ori] - elif self.datasets.startswith('face68'): - ori = np.array(list(range(0, 68))) - pos = np.array([17,16,15,14,13,12,11,10, 9, 8,7,6,5,4,3,2,1, 27,26,25,24,23,22,21,20,19,18, 28,29,30,31, 36,35,34,33,32, 46,45,44,43,48,47, 40,39,38,37,42,41, 55,54,53,52,51,50,49,60,59,58,57,56,65,64,63,62,61,68,67,66])-1 - self.points[:, ori] = self.points[:, pos] - else: - raise ValueError('Does not support {:}'.format(self.datasets)) + def special_fun(self, indicator): + if ( + indicator == "68to49" + ): # For 300W or 300VW, convert the default 68 points to 49 points. + assert self.num_point == 68, "num-point must be 68 vs. {:}".format( + self.num_point + ) + self.num_point = 49 + out = torch.ones((68), dtype=torch.uint8) + out[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 60, 64]] = 0 + if self.points is not None: + self.points = self.points.clone()[:, out] + else: + raise ValueError("Invalid indicator : {:}".format(indicator)) + def apply_horizontal_flip(self): + # self.points[0, :] = width - self.points[0, :] - 1 + # Mugsy spefic or Synthetic + if self.datasets.startswith("HandsyROT"): + ori = np.array(list(range(0, 42))) + pos = np.array(list(range(21, 42)) + list(range(0, 21))) + self.points[:, pos] = self.points[:, ori] + elif self.datasets.startswith("face68"): + ori = np.array(list(range(0, 68))) + pos = ( + np.array( + [ + 17, + 16, + 15, + 14, + 13, + 12, + 11, + 10, + 9, + 8, + 7, + 6, + 5, + 4, + 3, + 2, + 1, + 27, + 26, + 25, + 24, + 23, + 22, + 21, + 20, + 19, + 18, + 28, + 29, + 30, + 31, + 36, + 35, + 34, + 33, + 32, + 46, + 45, + 44, + 43, + 48, + 47, + 40, + 39, + 38, + 37, + 42, + 41, + 55, + 54, + 53, + 52, + 51, + 50, + 49, + 60, + 59, + 58, + 57, + 56, + 65, + 64, + 63, + 62, + 61, + 68, + 67, + 66, + ] + ) + - 1 + ) + self.points[:, ori] = self.points[:, pos] + else: + raise ValueError("Does not support {:}".format(self.datasets)) # shape = (H,W) def apply_affine2point(points, theta, shape): - assert points.size(0) == 3, 'invalid points shape : {:}'.format(points.size()) - with torch.no_grad(): - ok_points = points[2,:] == 1 - assert torch.sum(ok_points).item() > 0, 'there is no visiable point' - points[:2,:] = normalize_points(shape, points[:2,:]) + assert points.size(0) == 3, "invalid points shape : {:}".format(points.size()) + with torch.no_grad(): + ok_points = points[2, :] == 1 + assert torch.sum(ok_points).item() > 0, "there is no visiable point" + points[:2, :] = normalize_points(shape, points[:2, :]) - norm_trans_points = ok_points.unsqueeze(0).repeat(3, 1).float() + norm_trans_points = ok_points.unsqueeze(0).repeat(3, 1).float() - trans_points, ___ = torch.gesv(points[:, ok_points], theta) + trans_points, ___ = torch.gesv(points[:, ok_points], theta) - norm_trans_points[:, ok_points] = trans_points - - return norm_trans_points + norm_trans_points[:, ok_points] = trans_points + return norm_trans_points def apply_boundary(norm_trans_points): - with torch.no_grad(): - norm_trans_points = norm_trans_points.clone() - oks = torch.stack((norm_trans_points[0]>-1, norm_trans_points[0]<1, norm_trans_points[1]>-1, norm_trans_points[1]<1, norm_trans_points[2]>0)) - oks = torch.sum(oks, dim=0) == 5 - norm_trans_points[2, :] = oks - return norm_trans_points + with torch.no_grad(): + norm_trans_points = norm_trans_points.clone() + oks = torch.stack( + ( + norm_trans_points[0] > -1, + norm_trans_points[0] < 1, + norm_trans_points[1] > -1, + norm_trans_points[1] < 1, + norm_trans_points[2] > 0, + ) + ) + oks = torch.sum(oks, dim=0) == 5 + norm_trans_points[2, :] = oks + return norm_trans_points diff --git a/lib/datasets/synthetic_adaptive_environment.py b/lib/datasets/synthetic_adaptive_environment.py index 4166973..103c1f6 100644 --- a/lib/datasets/synthetic_adaptive_environment.py +++ b/lib/datasets/synthetic_adaptive_environment.py @@ -1,39 +1,123 @@ ##################################################### # Copyright (c) Xuanyi Dong [GitHub D-X-Y], 2021.03 # ##################################################### +import math import numpy as np from typing import Optional +import torch import torch.utils.data as data +class QuadraticFunction: + """The quadratic function that outputs f(x) = a * x^2 + b * x + c.""" + + def __init__(self, list_of_points=None): + self._params = dict(a=None, b=None, c=None) + if list_of_points is not None: + self.fit(list_of_points) + + def set(self, a, b, c): + self._params["a"] = a + self._params["b"] = b + self._params["c"] = c + + def check_valid(self): + for key, value in self._params.items(): + if value is None: + raise ValueError("The {:} is None".format(key)) + + def __getitem__(self, x): + self.check_valid() + return self._params["a"] * x * x + self._params["b"] * x + self._params["c"] + + def fit( + self, + list_of_points, + transf=lambda x: x, + max_iter=900, + lr_max=1.0, + verbose=False, + ): + with torch.no_grad(): + data = torch.Tensor(list_of_points).type(torch.float32) + assert data.ndim == 2 and data.size(1) == 2, "Invalid shape : {:}".format( + data.shape + ) + x, y = data[:, 0], data[:, 1] + weights = torch.nn.Parameter(torch.Tensor(3)) + torch.nn.init.normal_(weights, mean=0.0, std=1.0) + optimizer = torch.optim.Adam([weights], lr=lr_max, amsgrad=True) + lr_scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=[int(max_iter*0.25), int(max_iter*0.5), int(max_iter*0.75)], gamma=0.1) + if verbose: + print("The optimizer: {:}".format(optimizer)) + + best_loss = None + for _iter in range(max_iter): + y_hat = transf(weights[0] * x * x + weights[1] * x + weights[2]) + loss = torch.mean(torch.abs(y - y_hat)) + optimizer.zero_grad() + loss.backward() + optimizer.step() + lr_scheduler.step() + if verbose: + print( + "In QuadraticFunction's fit, loss at the {:02d}/{:02d}-th iter is {:}".format( + _iter, max_iter, loss.item() + ) + ) + # Update the params + if best_loss is None or best_loss > loss.item(): + best_loss = loss.item() + self._params["a"] = weights[0].item() + self._params["b"] = weights[1].item() + self._params["c"] = weights[2].item() + + def __repr__(self): + return "{name}(y = {a} * x^2 + {b} * x + {c})".format( + name=self.__class__.__name__, + a=self._params["a"], + b=self._params["b"], + c=self._params["c"], + ) + + class SynAdaptiveEnv(data.Dataset): - """The synethtic dataset for adaptive environment.""" + """The synethtic dataset for adaptive environment. + + - x in [0, 1] + - y = amplitude-scale-of(x) * sin( period-phase-shift-of(x) ) + - where + - the amplitude scale is a quadratic function of x + - the period-phase-shift is another quadratic function of x + + """ def __init__( self, - max_num_phase: int = 100, - interval: float = 0.1, - max_scale: float = 4, - offset_scale: float = 1.5, + num: int = 100, + num_sin_phase: int = 4, + min_amplitude: float = 1, + max_amplitude: float = 4, + phase_shift: float = 0, mode: Optional[str] = None, ): + self._amplitude_scale = QuadraticFunction( + [(0, min_amplitude), (0.5, max_amplitude), (0, min_amplitude)] + ) - self._max_num_phase = max_num_phase - self._interval = interval + self._num_sin_phase = num_sin_phase + self._interval = 1.0 / (float(num) - 1) + self._total_num = num + + self._period_phase_shift = QuadraticFunction() + + fitting_data = [] + temp_max_scalar = 2 ** num_sin_phase + for i in range(num_sin_phase): + value = (2 ** i) / temp_max_scalar + fitting_data.append((value, math.sin(value))) + self._period_phase_shift.fit(fitting_data, transf=lambda x: torch.sin(x)) - self._times = np.arange(0, np.pi * self._max_num_phase, self._interval) - xmin, xmax = self._times.min(), self._times.max() - self._inputs = [] - self._total_num = len(self._times) - for i in range(self._total_num): - scale = (i + 1.0) / self._total_num * max_scale - sin_scale = (i + 1.0) / self._total_num * 0.7 - sin_scale = -4 * (sin_scale - 0.5) ** 2 + 1 - # scale = -(self._times[i] - (xmin - xmax) / 2) + max_scale - self._inputs.append( - np.sin(self._times[i] * sin_scale) * (offset_scale - scale) - ) - self._inputs = np.array(self._inputs) # Training Set 60% num_of_train = int(self._total_num * 0.6) # Validation Set 20% @@ -70,10 +154,11 @@ class SynAdaptiveEnv(data.Dataset): def __getitem__(self, index): assert 0 <= index < len(self), "{:} is not in [0, {:})".format(index, len(self)) index = self._indexes[index] - value = float(self._inputs[index]) - if self._transform is not None: - value = self._transform(value) - return index, float(self._times[index]), value + position = self._interval * index + value = self._amplitude_scale[position] * math.sin( + self._period_phase_shift[position] + ) + return index, position, value def __len__(self): return len(self._indexes) diff --git a/lib/datasets/test_utils.py b/lib/datasets/test_utils.py index 2245f87..757ac4f 100644 --- a/lib/datasets/test_utils.py +++ b/lib/datasets/test_utils.py @@ -5,16 +5,20 @@ import os def test_imagenet_data(imagenet): - total_length = len(imagenet) - assert total_length == 1281166 or total_length == 50000, 'The length of ImageNet is wrong : {}'.format(total_length) - map_id = {} - for index in range(total_length): - path, target = imagenet.imgs[index] - folder, image_name = os.path.split(path) - _, folder = os.path.split(folder) - if folder not in map_id: - map_id[folder] = target - else: - assert map_id[folder] == target, 'Class : {} is not {}'.format(folder, target) - assert image_name.find(folder) == 0, '{} is wrong.'.format(path) - print ('Check ImageNet Dataset OK') + total_length = len(imagenet) + assert ( + total_length == 1281166 or total_length == 50000 + ), "The length of ImageNet is wrong : {}".format(total_length) + map_id = {} + for index in range(total_length): + path, target = imagenet.imgs[index] + folder, image_name = os.path.split(path) + _, folder = os.path.split(folder) + if folder not in map_id: + map_id[folder] = target + else: + assert map_id[folder] == target, "Class : {} is not {}".format( + folder, target + ) + assert image_name.find(folder) == 0, "{} is wrong.".format(path) + print("Check ImageNet Dataset OK") diff --git a/notebooks/TOT/synthetic.ipynb b/notebooks/TOT/synthetic.ipynb index 10ed8f9..695ea1f 100644 --- a/notebooks/TOT/synthetic.ipynb +++ b/notebooks/TOT/synthetic.ipynb @@ -37,7 +37,7 @@ " sys.path.insert(0, str(lib_dir))\n", "\n", "from datasets import SynAdaptiveEnv\n", - "from xlayers.super_core import SuperMLPv1" + "from xlayers.super_core import SuperSequential, SuperMLPv1" ] }, { @@ -51,7 +51,10 @@ " xs = torch.FloatTensor(xs).view(-1, 1)\n", " ys = torch.FloatTensor(ys).view(-1, 1)\n", " \n", - " model = SuperMLPv1(1, 10, 1, torch.nn.ReLU)\n", + " model = SuperSequential(\n", + " SuperMLPv1(1, 10, 20, torch.nn.ReLU),\n", + " SuperMLPv1(20, 10, 1, torch.nn.ReLU)\n", + " )\n", " optimizer = torch.optim.Adam(\n", " model.parameters(),\n", " lr=0.01, weight_decay=1e-4, amsgrad=True\n", @@ -73,7 +76,7 @@ " return answers\n", "\n", "def f(x):\n", - " return np.cos( 0.5 * x + 0.)\n", + " return np.cos( 0.5 * x + x * x)\n", "\n", "def get_data(mode):\n", " dataset = SynAdaptiveEnv(mode=mode)\n", @@ -131,6 +134,7 @@ " [train_preds, valid_preds, test_preds] = optimize_fn(train_xs, train_ys, [train_xs, valid_xs, test_xs])\n", " draw_ax(cur_ax, train_times, train_preds, None, None,\n", " alpha=1.0, linestyle='--', color='r', legend=\"MLP\", plot_only=True)\n", + " import pdb; pdb.set_trace()\n", " draw_ax(cur_ax, valid_times, valid_preds, None, None,\n", " alpha=1.0, linestyle='--', color='g', legend=None, plot_only=True)\n", " draw_ax(cur_ax, test_times, test_preds, None, None,\n", @@ -153,7 +157,41 @@ "name": "stdout", "output_type": "stream", "text": [ - "The Desktop is at: /Users/xuanyidong/Desktop\n" + "The Desktop is at: /Users/xuanyidong/Desktop\n", + "> \u001b[0;32m\u001b[0m(89)\u001b[0;36mvisualize_syn\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m 87 \u001b[0;31m alpha=1.0, linestyle='--', color='r', legend=\"MLP\", plot_only=True)\n", + "\u001b[0m\u001b[0;32m 88 \u001b[0;31m \u001b[0;32mimport\u001b[0m \u001b[0mpdb\u001b[0m\u001b[0;34m;\u001b[0m \u001b[0mpdb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_trace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m---> 89 \u001b[0;31m draw_ax(cur_ax, valid_times, valid_preds, None, None,\n", + "\u001b[0m\u001b[0;32m 90 \u001b[0;31m alpha=1.0, linestyle='--', color='g', legend=None, plot_only=True)\n", + "\u001b[0m\u001b[0;32m 91 \u001b[0;31m draw_ax(cur_ax, test_times, test_preds, None, None,\n", + "\u001b[0m\n", + "ipdb> train_times\n", + "[0.0, 0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1.0, 1.1, 1.2000000000000002, 1.3, 1.4000000000000001, 1.5, 1.6, 1.7000000000000002, 1.8, 1.9000000000000001, 2.0, 2.1, 2.2, 2.3000000000000003, 2.4000000000000004, 2.5, 2.6, 2.7, 2.8000000000000003, 2.9000000000000004, 3.0, 3.1, 3.2, 3.3000000000000003, 3.4000000000000004, 3.5, 3.6, 3.7, 3.8000000000000003, 3.9000000000000004, 4.0, 4.1000000000000005, 4.2, 4.3, 4.4, 4.5, 4.6000000000000005, 4.7, 4.800000000000001, 4.9, 5.0, 5.1000000000000005, 5.2, 5.300000000000001, 5.4, 5.5, 5.6000000000000005, 5.7, 5.800000000000001, 5.9, 6.0, 6.1000000000000005, 6.2, 6.300000000000001, 6.4, 6.5, 6.6000000000000005, 6.7, 6.800000000000001, 6.9, 7.0, 7.1000000000000005, 7.2, 7.300000000000001, 7.4, 7.5, 7.6000000000000005, 7.7, 7.800000000000001, 7.9, 8.0, 8.1, 8.200000000000001, 8.3, 8.4, 8.5, 8.6, 8.700000000000001, 8.8, 8.9, 9.0, 9.1, 9.200000000000001, 9.3, 9.4, 9.5, 9.600000000000001, 9.700000000000001, 9.8, 9.9, 10.0, 10.100000000000001, 10.200000000000001, 10.3, 10.4, 10.5, 10.600000000000001, 10.700000000000001, 10.8, 10.9, 11.0, 11.100000000000001, 11.200000000000001, 11.3, 11.4, 11.5, 11.600000000000001, 11.700000000000001, 11.8, 11.9, 12.0, 12.100000000000001, 12.200000000000001, 12.3, 12.4, 12.5, 12.600000000000001, 12.700000000000001, 12.8, 12.9, 13.0, 13.100000000000001, 13.200000000000001, 13.3, 13.4, 13.5, 13.600000000000001, 13.700000000000001, 13.8, 13.9, 14.0, 14.100000000000001, 14.200000000000001, 14.3, 14.4, 14.5, 14.600000000000001, 14.700000000000001, 14.8, 14.9, 15.0, 15.100000000000001, 15.200000000000001, 15.3, 15.4, 15.5, 15.600000000000001, 15.700000000000001, 15.8, 15.9, 16.0, 16.1, 16.2, 16.3, 16.400000000000002, 16.5, 16.6, 16.7, 16.8, 16.900000000000002, 17.0, 17.1, 17.2, 17.3, 17.400000000000002, 17.5, 17.6, 17.7, 17.8, 17.900000000000002, 18.0, 18.1, 18.2, 18.3, 18.400000000000002, 18.5, 18.6, 18.7, 18.8, 18.900000000000002, 19.0, 19.1, 19.200000000000003, 19.3, 19.400000000000002, 19.5, 19.6, 19.700000000000003, 19.8, 19.900000000000002, 20.0, 20.1, 20.200000000000003, 20.3, 20.400000000000002, 20.5, 20.6, 20.700000000000003, 20.8, 20.900000000000002, 21.0, 21.1, 21.200000000000003, 21.3, 21.400000000000002, 21.5, 21.6, 21.700000000000003, 21.8, 21.900000000000002, 22.0, 22.1, 22.200000000000003, 22.3, 22.400000000000002, 22.5, 22.6, 22.700000000000003, 22.8, 22.900000000000002, 23.0, 23.1, 23.200000000000003, 23.3, 23.400000000000002, 23.5, 23.6, 23.700000000000003, 23.8, 23.900000000000002, 24.0, 24.1, 24.200000000000003, 24.3, 24.400000000000002, 24.5, 24.6, 24.700000000000003, 24.8, 24.900000000000002, 25.0, 25.1, 25.200000000000003, 25.3, 25.400000000000002, 25.5, 25.6, 25.700000000000003, 25.8, 25.900000000000002, 26.0, 26.1, 26.200000000000003, 26.3, 26.400000000000002, 26.5, 26.6, 26.700000000000003, 26.8, 26.900000000000002, 27.0, 27.1, 27.200000000000003, 27.3, 27.400000000000002, 27.5, 27.6, 27.700000000000003, 27.8, 27.900000000000002, 28.0, 28.1, 28.200000000000003, 28.3, 28.400000000000002, 28.5, 28.6, 28.700000000000003, 28.8, 28.900000000000002, 29.0, 29.1, 29.200000000000003, 29.3, 29.400000000000002, 29.5, 29.6, 29.700000000000003, 29.8, 29.900000000000002, 30.0, 30.1, 30.200000000000003, 30.3, 30.400000000000002, 30.5, 30.6, 30.700000000000003, 30.8, 30.900000000000002, 31.0, 31.1, 31.200000000000003, 31.3, 31.400000000000002, 31.5, 31.6, 31.700000000000003, 31.8, 31.900000000000002, 32.0, 32.1, 32.2, 32.300000000000004, 32.4, 32.5, 32.6, 32.7, 32.800000000000004, 32.9, 33.0, 33.1, 33.2, 33.300000000000004, 33.4, 33.5, 33.6, 33.7, 33.800000000000004, 33.9, 34.0, 34.1, 34.2, 34.300000000000004, 34.4, 34.5, 34.6, 34.7, 34.800000000000004, 34.9, 35.0, 35.1, 35.2, 35.300000000000004, 35.4, 35.5, 35.6, 35.7, 35.800000000000004, 35.9, 36.0, 36.1, 36.2, 36.300000000000004, 36.4, 36.5, 36.6, 36.7, 36.800000000000004, 36.9, 37.0, 37.1, 37.2, 37.300000000000004, 37.4, 37.5, 37.6, 37.7, 37.800000000000004, 37.9, 38.0, 38.1, 38.2, 38.300000000000004, 38.400000000000006, 38.5, 38.6, 38.7, 38.800000000000004, 38.900000000000006, 39.0, 39.1, 39.2, 39.300000000000004, 39.400000000000006, 39.5, 39.6, 39.7, 39.800000000000004, 39.900000000000006, 40.0, 40.1, 40.2, 40.300000000000004, 40.400000000000006, 40.5, 40.6, 40.7, 40.800000000000004, 40.900000000000006, 41.0, 41.1, 41.2, 41.300000000000004, 41.400000000000006, 41.5, 41.6, 41.7, 41.800000000000004, 41.900000000000006, 42.0, 42.1, 42.2, 42.300000000000004, 42.400000000000006, 42.5, 42.6, 42.7, 42.800000000000004, 42.900000000000006, 43.0, 43.1, 43.2, 43.300000000000004, 43.400000000000006, 43.5, 43.6, 43.7, 43.800000000000004, 43.900000000000006, 44.0, 44.1, 44.2, 44.300000000000004, 44.400000000000006, 44.5, 44.6, 44.7, 44.800000000000004, 44.900000000000006, 45.0, 45.1, 45.2, 45.300000000000004, 45.400000000000006, 45.5, 45.6, 45.7, 45.800000000000004, 45.900000000000006, 46.0, 46.1, 46.2, 46.300000000000004, 46.400000000000006, 46.5, 46.6, 46.7, 46.800000000000004, 46.900000000000006, 47.0, 47.1, 47.2, 47.300000000000004, 47.400000000000006, 47.5, 47.6, 47.7, 47.800000000000004, 47.900000000000006, 48.0, 48.1, 48.2, 48.300000000000004, 48.400000000000006, 48.5, 48.6, 48.7, 48.800000000000004, 48.900000000000006, 49.0, 49.1, 49.2, 49.300000000000004, 49.400000000000006, 49.5, 49.6, 49.7, 49.800000000000004, 49.900000000000006, 50.0, 50.1, 50.2, 50.300000000000004, 50.400000000000006, 50.5, 50.6, 50.7, 50.800000000000004, 50.900000000000006, 51.0, 51.1, 51.2, 51.300000000000004, 51.400000000000006, 51.5, 51.6, 51.7, 51.800000000000004, 51.900000000000006, 52.0, 52.1, 52.2, 52.300000000000004, 52.400000000000006, 52.5, 52.6, 52.7, 52.800000000000004, 52.900000000000006, 53.0, 53.1, 53.2, 53.300000000000004, 53.400000000000006, 53.5, 53.6, 53.7, 53.800000000000004, 53.900000000000006, 54.0, 54.1, 54.2, 54.300000000000004, 54.400000000000006, 54.5, 54.6, 54.7, 54.800000000000004, 54.900000000000006, 55.0, 55.1, 55.2, 55.300000000000004, 55.400000000000006, 55.5, 55.6, 55.7, 55.800000000000004, 55.900000000000006, 56.0, 56.1, 56.2, 56.300000000000004, 56.400000000000006, 56.5, 56.6, 56.7, 56.800000000000004, 56.900000000000006, 57.0, 57.1, 57.2, 57.300000000000004, 57.400000000000006, 57.5, 57.6, 57.7, 57.800000000000004, 57.900000000000006, 58.0, 58.1, 58.2, 58.300000000000004, 58.400000000000006, 58.5, 58.6, 58.7, 58.800000000000004, 58.900000000000006, 59.0, 59.1, 59.2, 59.300000000000004, 59.400000000000006, 59.5, 59.6, 59.7, 59.800000000000004, 59.900000000000006, 60.0, 60.1, 60.2, 60.300000000000004, 60.400000000000006, 60.5, 60.6, 60.7, 60.800000000000004, 60.900000000000006, 61.0, 61.1, 61.2, 61.300000000000004, 61.400000000000006, 61.5, 61.6, 61.7, 61.800000000000004, 61.900000000000006, 62.0, 62.1, 62.2, 62.300000000000004, 62.400000000000006, 62.5, 62.6, 62.7, 62.800000000000004, 62.900000000000006, 63.0, 63.1, 63.2, 63.300000000000004, 63.400000000000006, 63.5, 63.6, 63.7, 63.800000000000004, 63.900000000000006, 64.0, 64.10000000000001, 64.2, 64.3, 64.4, 64.5, 64.60000000000001, 64.7, 64.8, 64.9, 65.0, 65.10000000000001, 65.2, 65.3, 65.4, 65.5, 65.60000000000001, 65.7, 65.8, 65.9, 66.0, 66.10000000000001, 66.2, 66.3, 66.4, 66.5, 66.60000000000001, 66.7, 66.8, 66.9, 67.0, 67.10000000000001, 67.2, 67.3, 67.4, 67.5, 67.60000000000001, 67.7, 67.8, 67.9, 68.0, 68.10000000000001, 68.2, 68.3, 68.4, 68.5, 68.60000000000001, 68.7, 68.8, 68.9, 69.0, 69.10000000000001, 69.2, 69.3, 69.4, 69.5, 69.60000000000001, 69.7, 69.8, 69.9, 70.0, 70.10000000000001, 70.2, 70.3, 70.4, 70.5, 70.60000000000001, 70.7, 70.8, 70.9, 71.0, 71.10000000000001, 71.2, 71.3, 71.4, 71.5, 71.60000000000001, 71.7, 71.8, 71.9, 72.0, 72.10000000000001, 72.2, 72.3, 72.4, 72.5, 72.60000000000001, 72.7, 72.8, 72.9, 73.0, 73.10000000000001, 73.2, 73.3, 73.4, 73.5, 73.60000000000001, 73.7, 73.8, 73.9, 74.0, 74.10000000000001, 74.2, 74.3, 74.4, 74.5, 74.60000000000001, 74.7, 74.8, 74.9, 75.0, 75.10000000000001, 75.2, 75.3, 75.4, 75.5, 75.60000000000001, 75.7, 75.8, 75.9, 76.0, 76.10000000000001, 76.2, 76.3, 76.4, 76.5, 76.60000000000001, 76.7, 76.80000000000001, 76.9, 77.0, 77.10000000000001, 77.2, 77.30000000000001, 77.4, 77.5, 77.60000000000001, 77.7, 77.80000000000001, 77.9, 78.0, 78.10000000000001, 78.2, 78.30000000000001, 78.4, 78.5, 78.60000000000001, 78.7, 78.80000000000001, 78.9, 79.0, 79.10000000000001, 79.2, 79.30000000000001, 79.4, 79.5, 79.60000000000001, 79.7, 79.80000000000001, 79.9, 80.0, 80.10000000000001, 80.2, 80.30000000000001, 80.4, 80.5, 80.60000000000001, 80.7, 80.80000000000001, 80.9, 81.0, 81.10000000000001, 81.2, 81.30000000000001, 81.4, 81.5, 81.60000000000001, 81.7, 81.80000000000001, 81.9, 82.0, 82.10000000000001, 82.2, 82.30000000000001, 82.4, 82.5, 82.60000000000001, 82.7, 82.80000000000001, 82.9, 83.0, 83.10000000000001, 83.2, 83.30000000000001, 83.4, 83.5, 83.60000000000001, 83.7, 83.80000000000001, 83.9, 84.0, 84.10000000000001, 84.2, 84.30000000000001, 84.4, 84.5, 84.60000000000001, 84.7, 84.80000000000001, 84.9, 85.0, 85.10000000000001, 85.2, 85.30000000000001, 85.4, 85.5, 85.60000000000001, 85.7, 85.80000000000001, 85.9, 86.0, 86.10000000000001, 86.2, 86.30000000000001, 86.4, 86.5, 86.60000000000001, 86.7, 86.80000000000001, 86.9, 87.0, 87.10000000000001, 87.2, 87.30000000000001, 87.4, 87.5, 87.60000000000001, 87.7, 87.80000000000001, 87.9, 88.0, 88.10000000000001, 88.2, 88.30000000000001, 88.4, 88.5, 88.60000000000001, 88.7, 88.80000000000001, 88.9, 89.0, 89.10000000000001, 89.2, 89.30000000000001, 89.4, 89.5, 89.60000000000001, 89.7, 89.80000000000001, 89.9, 90.0, 90.10000000000001, 90.2, 90.30000000000001, 90.4, 90.5, 90.60000000000001, 90.7, 90.80000000000001, 90.9, 91.0, 91.10000000000001, 91.2, 91.30000000000001, 91.4, 91.5, 91.60000000000001, 91.7, 91.80000000000001, 91.9, 92.0, 92.10000000000001, 92.2, 92.30000000000001, 92.4, 92.5, 92.60000000000001, 92.7, 92.80000000000001, 92.9, 93.0, 93.10000000000001, 93.2, 93.30000000000001, 93.4, 93.5, 93.60000000000001, 93.7, 93.80000000000001, 93.9, 94.0, 94.10000000000001, 94.2, 94.30000000000001, 94.4, 94.5, 94.60000000000001, 94.7, 94.80000000000001, 94.9, 95.0, 95.10000000000001, 95.2, 95.30000000000001, 95.4, 95.5, 95.60000000000001, 95.7, 95.80000000000001, 95.9, 96.0, 96.10000000000001, 96.2, 96.30000000000001, 96.4, 96.5, 96.60000000000001, 96.7, 96.80000000000001, 96.9, 97.0, 97.10000000000001, 97.2, 97.30000000000001, 97.4, 97.5, 97.60000000000001, 97.7, 97.80000000000001, 97.9, 98.0, 98.10000000000001, 98.2, 98.30000000000001, 98.4, 98.5, 98.60000000000001, 98.7, 98.80000000000001, 98.9, 99.0, 99.10000000000001, 99.2, 99.30000000000001, 99.4, 99.5, 99.60000000000001, 99.7, 99.80000000000001, 99.9, 100.0, 100.10000000000001, 100.2, 100.30000000000001, 100.4, 100.5, 100.60000000000001, 100.7, 100.80000000000001, 100.9, 101.0, 101.10000000000001, 101.2, 101.30000000000001, 101.4, 101.5, 101.60000000000001, 101.7, 101.80000000000001, 101.9, 102.0, 102.10000000000001, 102.2, 102.30000000000001, 102.4, 102.5, 102.60000000000001, 102.7, 102.80000000000001, 102.9, 103.0, 103.10000000000001, 103.2, 103.30000000000001, 103.4, 103.5, 103.60000000000001, 103.7, 103.80000000000001, 103.9, 104.0, 104.10000000000001, 104.2, 104.30000000000001, 104.4, 104.5, 104.60000000000001, 104.7, 104.80000000000001, 104.9, 105.0, 105.10000000000001, 105.2, 105.30000000000001, 105.4, 105.5, 105.60000000000001, 105.7, 105.80000000000001, 105.9, 106.0, 106.10000000000001, 106.2, 106.30000000000001, 106.4, 106.5, 106.60000000000001, 106.7, 106.80000000000001, 106.9, 107.0, 107.10000000000001, 107.2, 107.30000000000001, 107.4, 107.5, 107.60000000000001, 107.7, 107.80000000000001, 107.9, 108.0, 108.10000000000001, 108.2, 108.30000000000001, 108.4, 108.5, 108.60000000000001, 108.7, 108.80000000000001, 108.9, 109.0, 109.10000000000001, 109.2, 109.30000000000001, 109.4, 109.5, 109.60000000000001, 109.7, 109.80000000000001, 109.9, 110.0, 110.10000000000001, 110.2, 110.30000000000001, 110.4, 110.5, 110.60000000000001, 110.7, 110.80000000000001, 110.9, 111.0, 111.10000000000001, 111.2, 111.30000000000001, 111.4, 111.5, 111.60000000000001, 111.7, 111.80000000000001, 111.9, 112.0, 112.10000000000001, 112.2, 112.30000000000001, 112.4, 112.5, 112.60000000000001, 112.7, 112.80000000000001, 112.9, 113.0, 113.10000000000001, 113.2, 113.30000000000001, 113.4, 113.5, 113.60000000000001, 113.7, 113.80000000000001, 113.9, 114.0, 114.10000000000001, 114.2, 114.30000000000001, 114.4, 114.5, 114.60000000000001, 114.7, 114.80000000000001, 114.9, 115.0, 115.10000000000001, 115.2, 115.30000000000001, 115.4, 115.5, 115.60000000000001, 115.7, 115.80000000000001, 115.9, 116.0, 116.10000000000001, 116.2, 116.30000000000001, 116.4, 116.5, 116.60000000000001, 116.7, 116.80000000000001, 116.9, 117.0, 117.10000000000001, 117.2, 117.30000000000001, 117.4, 117.5, 117.60000000000001, 117.7, 117.80000000000001, 117.9, 118.0, 118.10000000000001, 118.2, 118.30000000000001, 118.4, 118.5, 118.60000000000001, 118.7, 118.80000000000001, 118.9, 119.0, 119.10000000000001, 119.2, 119.30000000000001, 119.4, 119.5, 119.60000000000001, 119.7, 119.80000000000001, 119.9, 120.0, 120.10000000000001, 120.2, 120.30000000000001, 120.4, 120.5, 120.60000000000001, 120.7, 120.80000000000001, 120.9, 121.0, 121.10000000000001, 121.2, 121.30000000000001, 121.4, 121.5, 121.60000000000001, 121.7, 121.80000000000001, 121.9, 122.0, 122.10000000000001, 122.2, 122.30000000000001, 122.4, 122.5, 122.60000000000001, 122.7, 122.80000000000001, 122.9, 123.0, 123.10000000000001, 123.2, 123.30000000000001, 123.4, 123.5, 123.60000000000001, 123.7, 123.80000000000001, 123.9, 124.0, 124.10000000000001, 124.2, 124.30000000000001, 124.4, 124.5, 124.60000000000001, 124.7, 124.80000000000001, 124.9, 125.0, 125.10000000000001, 125.2, 125.30000000000001, 125.4, 125.5, 125.60000000000001, 125.7, 125.80000000000001, 125.9, 126.0, 126.10000000000001, 126.2, 126.30000000000001, 126.4, 126.5, 126.60000000000001, 126.7, 126.80000000000001, 126.9, 127.0, 127.10000000000001, 127.2, 127.30000000000001, 127.4, 127.5, 127.60000000000001, 127.7, 127.80000000000001, 127.9, 128.0, 128.1, 128.20000000000002, 128.3, 128.4, 128.5, 128.6, 128.70000000000002, 128.8, 128.9, 129.0, 129.1, 129.20000000000002, 129.3, 129.4, 129.5, 129.6, 129.70000000000002, 129.8, 129.9, 130.0, 130.1, 130.20000000000002, 130.3, 130.4, 130.5, 130.6, 130.70000000000002, 130.8, 130.9, 131.0, 131.1, 131.20000000000002, 131.3, 131.4, 131.5, 131.6, 131.70000000000002, 131.8, 131.9, 132.0, 132.1, 132.20000000000002, 132.3, 132.4, 132.5, 132.6, 132.70000000000002, 132.8, 132.9, 133.0, 133.1, 133.20000000000002, 133.3, 133.4, 133.5, 133.6, 133.70000000000002, 133.8, 133.9, 134.0, 134.1, 134.20000000000002, 134.3, 134.4, 134.5, 134.6, 134.70000000000002, 134.8, 134.9, 135.0, 135.1, 135.20000000000002, 135.3, 135.4, 135.5, 135.6, 135.70000000000002, 135.8, 135.9, 136.0, 136.1, 136.20000000000002, 136.3, 136.4, 136.5, 136.6, 136.70000000000002, 136.8, 136.9, 137.0, 137.1, 137.20000000000002, 137.3, 137.4, 137.5, 137.6, 137.70000000000002, 137.8, 137.9, 138.0, 138.1, 138.20000000000002, 138.3, 138.4, 138.5, 138.6, 138.70000000000002, 138.8, 138.9, 139.0, 139.1, 139.20000000000002, 139.3, 139.4, 139.5, 139.6, 139.70000000000002, 139.8, 139.9, 140.0, 140.1, 140.20000000000002, 140.3, 140.4, 140.5, 140.6, 140.70000000000002, 140.8, 140.9, 141.0, 141.1, 141.20000000000002, 141.3, 141.4, 141.5, 141.6, 141.70000000000002, 141.8, 141.9, 142.0, 142.1, 142.20000000000002, 142.3, 142.4, 142.5, 142.6, 142.70000000000002, 142.8, 142.9, 143.0, 143.1, 143.20000000000002, 143.3, 143.4, 143.5, 143.6, 143.70000000000002, 143.8, 143.9, 144.0, 144.1, 144.20000000000002, 144.3, 144.4, 144.5, 144.6, 144.70000000000002, 144.8, 144.9, 145.0, 145.1, 145.20000000000002, 145.3, 145.4, 145.5, 145.6, 145.70000000000002, 145.8, 145.9, 146.0, 146.1, 146.20000000000002, 146.3, 146.4, 146.5, 146.6, 146.70000000000002, 146.8, 146.9, 147.0, 147.1, 147.20000000000002, 147.3, 147.4, 147.5, 147.6, 147.70000000000002, 147.8, 147.9, 148.0, 148.1, 148.20000000000002, 148.3, 148.4, 148.5, 148.6, 148.70000000000002, 148.8, 148.9, 149.0, 149.1, 149.20000000000002, 149.3, 149.4, 149.5, 149.6, 149.70000000000002, 149.8, 149.9, 150.0, 150.1, 150.20000000000002, 150.3, 150.4, 150.5, 150.6, 150.70000000000002, 150.8, 150.9, 151.0, 151.1, 151.20000000000002, 151.3, 151.4, 151.5, 151.6, 151.70000000000002, 151.8, 151.9, 152.0, 152.1, 152.20000000000002, 152.3, 152.4, 152.5, 152.6, 152.70000000000002, 152.8, 152.9, 153.0, 153.1, 153.20000000000002, 153.3, 153.4, 153.5, 153.60000000000002, 153.70000000000002, 153.8, 153.9, 154.0, 154.10000000000002, 154.20000000000002, 154.3, 154.4, 154.5, 154.60000000000002, 154.70000000000002, 154.8, 154.9, 155.0, 155.10000000000002, 155.20000000000002, 155.3, 155.4, 155.5, 155.60000000000002, 155.70000000000002, 155.8, 155.9, 156.0, 156.10000000000002, 156.20000000000002, 156.3, 156.4, 156.5, 156.60000000000002, 156.70000000000002, 156.8, 156.9, 157.0, 157.10000000000002, 157.20000000000002, 157.3, 157.4, 157.5, 157.60000000000002, 157.70000000000002, 157.8, 157.9, 158.0, 158.10000000000002, 158.20000000000002, 158.3, 158.4, 158.5, 158.60000000000002, 158.70000000000002, 158.8, 158.9, 159.0, 159.10000000000002, 159.20000000000002, 159.3, 159.4, 159.5, 159.60000000000002, 159.70000000000002, 159.8, 159.9, 160.0, 160.10000000000002, 160.20000000000002, 160.3, 160.4, 160.5, 160.60000000000002, 160.70000000000002, 160.8, 160.9, 161.0, 161.10000000000002, 161.20000000000002, 161.3, 161.4, 161.5, 161.60000000000002, 161.70000000000002, 161.8, 161.9, 162.0, 162.10000000000002, 162.20000000000002, 162.3, 162.4, 162.5, 162.60000000000002, 162.70000000000002, 162.8, 162.9, 163.0, 163.10000000000002, 163.20000000000002, 163.3, 163.4, 163.5, 163.60000000000002, 163.70000000000002, 163.8, 163.9, 164.0, 164.10000000000002, 164.20000000000002, 164.3, 164.4, 164.5, 164.60000000000002, 164.70000000000002, 164.8, 164.9, 165.0, 165.10000000000002, 165.20000000000002, 165.3, 165.4, 165.5, 165.60000000000002, 165.70000000000002, 165.8, 165.9, 166.0, 166.10000000000002, 166.20000000000002, 166.3, 166.4, 166.5, 166.60000000000002, 166.70000000000002, 166.8, 166.9, 167.0, 167.10000000000002, 167.20000000000002, 167.3, 167.4, 167.5, 167.60000000000002, 167.70000000000002, 167.8, 167.9, 168.0, 168.10000000000002, 168.20000000000002, 168.3, 168.4, 168.5, 168.60000000000002, 168.70000000000002, 168.8, 168.9, 169.0, 169.10000000000002, 169.20000000000002, 169.3, 169.4, 169.5, 169.60000000000002, 169.70000000000002, 169.8, 169.9, 170.0, 170.10000000000002, 170.20000000000002, 170.3, 170.4, 170.5, 170.60000000000002, 170.70000000000002, 170.8, 170.9, 171.0, 171.10000000000002, 171.20000000000002, 171.3, 171.4, 171.5, 171.60000000000002, 171.70000000000002, 171.8, 171.9, 172.0, 172.10000000000002, 172.20000000000002, 172.3, 172.4, 172.5, 172.60000000000002, 172.70000000000002, 172.8, 172.9, 173.0, 173.10000000000002, 173.20000000000002, 173.3, 173.4, 173.5, 173.60000000000002, 173.70000000000002, 173.8, 173.9, 174.0, 174.10000000000002, 174.20000000000002, 174.3, 174.4, 174.5, 174.60000000000002, 174.70000000000002, 174.8, 174.9, 175.0, 175.10000000000002, 175.20000000000002, 175.3, 175.4, 175.5, 175.60000000000002, 175.70000000000002, 175.8, 175.9, 176.0, 176.10000000000002, 176.20000000000002, 176.3, 176.4, 176.5, 176.60000000000002, 176.70000000000002, 176.8, 176.9, 177.0, 177.10000000000002, 177.20000000000002, 177.3, 177.4, 177.5, 177.60000000000002, 177.70000000000002, 177.8, 177.9, 178.0, 178.10000000000002, 178.20000000000002, 178.3, 178.4, 178.5, 178.60000000000002, 178.70000000000002, 178.8, 178.9, 179.0, 179.10000000000002, 179.20000000000002, 179.3, 179.4, 179.5, 179.60000000000002, 179.70000000000002, 179.8, 179.9, 180.0, 180.10000000000002, 180.20000000000002, 180.3, 180.4, 180.5, 180.60000000000002, 180.70000000000002, 180.8, 180.9, 181.0, 181.10000000000002, 181.20000000000002, 181.3, 181.4, 181.5, 181.60000000000002, 181.70000000000002, 181.8, 181.9, 182.0, 182.10000000000002, 182.20000000000002, 182.3, 182.4, 182.5, 182.60000000000002, 182.70000000000002, 182.8, 182.9, 183.0, 183.10000000000002, 183.20000000000002, 183.3, 183.4, 183.5, 183.60000000000002, 183.70000000000002, 183.8, 183.9, 184.0, 184.10000000000002, 184.20000000000002, 184.3, 184.4, 184.5, 184.60000000000002, 184.70000000000002, 184.8, 184.9, 185.0, 185.10000000000002, 185.20000000000002, 185.3, 185.4, 185.5, 185.60000000000002, 185.70000000000002, 185.8, 185.9, 186.0, 186.10000000000002, 186.20000000000002, 186.3, 186.4, 186.5, 186.60000000000002, 186.70000000000002, 186.8, 186.9, 187.0, 187.10000000000002, 187.20000000000002, 187.3, 187.4, 187.5, 187.60000000000002, 187.70000000000002, 187.8, 187.9, 188.0, 188.10000000000002, 188.20000000000002, 188.3, 188.4]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ipdb> train_preds\n", + "[-0.04611632227897644, -0.045859843492507935, -0.045347750186920166, -0.04458075761795044, -0.04355984926223755, -0.04228568077087402, -0.04075917601585388, -0.03898113965988159, -0.036952465772628784, -0.03467392921447754, -0.03214627504348755, -0.029370546340942383, -0.026347368955612183, -0.023629456758499146, -0.021652281284332275, -0.019537389278411865, -0.01728537678718567, -0.014701485633850098, -0.011017769575119019, -0.007136136293411255, -0.0030573904514312744, 0.0012176334857940674, 0.00568816065788269, 0.01035335659980774, 0.015212282538414001, 0.024441495537757874, 0.034274160861968994, 0.04434235394001007, 0.05476266145706177, 0.06553322076797485, 0.07665219902992249, 0.08811751008033752, 0.09992708265781403, 0.11207878589630127, 0.12457036972045898, 0.1348687708377838, 0.14432348310947418, 0.15401709079742432, 0.1639476716518402, 0.1741131991147995, 0.1845117211341858, 0.1951409876346588, 0.20599885284900665, 0.2170828878879547, 0.22839070856571198, 0.23991985619068146, 0.25166743993759155, 0.2636308968067169, 0.27580732107162476, 0.2881937026977539, 0.3007868230342865, 0.31358349323272705, 0.326580286026001, 0.3397737145423889, 0.35316002368927, 0.36673545837402344, 0.3804960250854492, 0.3944375216960907, 0.4085557162761688, 0.4228460490703583, 0.43730393052101135, 0.45192456245422363, 0.4667028784751892, 0.4816338121891022, 0.4967120289802551, 0.5119316577911377, 0.5272871851921082, 0.542772650718689, 0.5583819150924683, 0.5741086006164551, 0.5899459719657898, 0.6058872938156128, 0.6219258308410645, 0.6380539536476135, 0.6542644500732422, 0.6705495119094849, 0.686901330947876, 0.703311562538147, 0.7197721600532532, 0.7362741231918335, 0.7528088092803955, 0.7693670392036438, 0.7859395742416382, 0.8025166988372803, 0.8190886378288269, 0.8356451988220215, 0.8521762490272522, 0.8686710596084595, 0.8851190209388733, 0.9015086889266968, 0.9178289771080017, 0.934068500995636, 0.950215220451355, 0.9619203209877014, 0.9724005460739136, 0.9827953577041626, 0.9930967688560486, 1.0032958984375, 1.0133841037750244, 1.0223143100738525, 1.0289654731750488, 1.035523772239685, 1.041982650756836, 1.0483357906341553, 1.0545767545700073, 1.0606989860534668, 1.066696286201477, 1.072561502456665, 1.0782880783081055, 1.0838695764541626, 1.0892987251281738, 1.0942928791046143, 1.0983036756515503, 1.1021784543991089, 1.1059117317199707, 1.1094977855682373, 1.112931251525879, 1.1162065267562866, 1.119317650794983, 1.1222593784332275, 1.1250262260437012, 1.127611756324768, 1.1300114393234253, 1.1322190761566162, 1.134229063987732, 1.1360361576080322, 1.137634515762329, 1.139019250869751, 1.1401841640472412, 1.1411248445510864, 1.1418354511260986, 1.1423108577728271, 1.1425459384918213, 1.142535924911499, 1.1422756910324097, 1.1417604684829712, 1.1409857273101807, 1.139946699142456, 1.1386388540267944, 1.1370586156845093, 1.135201096534729, 1.1330630779266357, 1.1306400299072266, 1.1279289722442627, 1.1249263286590576, 1.121628761291504, 1.1180336475372314, 1.114138126373291, 1.1099395751953125, 1.105436086654663, 1.1006255149841309, 1.0955058336257935, 1.0892024040222168, 1.081896424293518, 1.0741932392120361, 1.0660918951034546, 1.0575921535491943, 1.048694372177124, 1.0393991470336914, 1.029707431793213, 1.0193676948547363, 1.00386381149292, 0.9877821803092957, 0.9711267948150635, 0.9539030194282532, 0.9270477294921875, 0.899175763130188, 0.8704700469970703, 0.8409428596496582, 0.8106081485748291, 0.7794802784919739, 0.7475759387016296, 0.7149122953414917, 0.6815081238746643, 0.6473833918571472, 0.61255943775177, 0.5770588517189026, 0.540905773639679, 0.5041252374649048, 0.466744065284729, 0.42879006266593933, 0.39029255509376526, 0.3512822985649109, 0.31179079413414, 0.27185162901878357, 0.23149913549423218, 0.19076907634735107, 0.14969851076602936, 0.10044234991073608, 0.04247118532657623, -0.0015277862548828125, -0.030001014471054077, -0.06862908601760864, -0.10734251141548157, -0.14174914360046387, -0.1752888262271881, -0.20879262685775757, -0.24222278594970703, -0.2841641306877136, -0.32651621103286743, -0.36862272024154663, -0.406676709651947, -0.44286733865737915, -0.4787063002586365, -0.5141467452049255, -0.549141526222229, -0.5836432576179504, -0.6176037192344666, -0.6509752869606018, -0.6837095618247986, -0.7157586216926575, -0.7470743060112, -0.7776085734367371, -0.807313859462738, -0.8361420035362244, -0.8640466332435608, -0.8909806609153748, -0.9168980717658997, -0.9417534470558167, -0.9655019640922546, -0.9880995154380798, -1.0095036029815674, -1.0296719074249268, -1.0485637187957764, -1.0661394596099854, -1.0823605060577393, -1.0971903800964355, -1.110593557357788, -1.1225366592407227, -1.1329874992370605, -1.1419157981872559, -1.1492931842803955, -1.1550936698913574, -1.1592931747436523, -1.161869764328003, -1.1628031730651855, -1.1620771884918213, -1.1596765518188477, -1.1555888652801514, -1.1498050689697266, -1.142317771911621, -1.1331236362457275, -1.1222209930419922, -1.1096115112304688, -1.0953001976013184, -1.0792949199676514, -1.0616061687469482, -1.0422489643096924, -1.0212397575378418, -0.9985994696617126, -0.9743520617485046, -0.9485242962837219, -0.9211466908454895, -0.8922531008720398, -0.8618804812431335, -0.8300696015357971, -0.796863853931427, -0.7623102068901062, -0.7264594435691833, -0.6893647313117981, -0.6510828137397766, -0.6116737723350525, -0.5712003707885742, -0.5297288298606873, -0.4873279929161072, -0.4440695643424988, -0.40002816915512085, -0.35155391693115234, -0.2995750308036804, -0.24697327613830566, -0.2048126459121704, -0.1628747284412384, -0.1206820011138916, -0.07279494404792786, -0.023996561765670776, 0.012291938066482544, 0.08425337076187134, 0.1499699205160141, 0.20292335748672485, 0.25544804334640503, 0.3074324131011963, 0.3587648272514343, 0.40933337807655334, 0.4590262174606323, 0.5077319145202637, 0.555340051651001, 0.6017407178878784, 0.6468254327774048, 0.6904872059822083, 0.7326209545135498, 0.773123562335968, 0.8118941783905029, 0.8488349914550781, 0.883850634098053, 0.9168493151664734, 0.9477428793907166, 0.9686260223388672, 0.9860219359397888, 1.0018739700317383, 1.0161347389221191, 1.0259699821472168, 1.0333722829818726, 1.0396175384521484, 1.0446829795837402, 1.0485491752624512, 1.0511990785598755, 1.0526182651519775, 1.0527955293655396, 1.0517226457595825, 1.0493942499160767, 1.0458078384399414, 1.0409648418426514, 1.0348691940307617, 1.0275284051895142, 1.0183804035186768, 1.0038893222808838, 0.9876189231872559, 0.9696000814437866, 0.947944164276123, 0.915421187877655, 0.8804270625114441, 0.8430386781692505, 0.8033401370048523, 0.7614229917526245, 0.7173855900764465, 0.6713332533836365, 0.6233780384063721, 0.5736386179924011, 0.5222396850585938, 0.46931228041648865, 0.41499269008636475, 0.35942327976226807, 0.3027508854866028, 0.24512727558612823, 0.18670880794525146, 0.1273561716079712, 0.044477641582489014, -0.013951927423477173, -0.06155875325202942, -0.11608812212944031, -0.16341686248779297, -0.21044087409973145, -0.2605155110359192, -0.31925445795059204, -0.37707245349884033, -0.42706894874572754, -0.4754711985588074, -0.5226126909255981, -0.5683454871177673, -0.6125242114067078, -0.6550062894821167, -0.6956542730331421, -0.7343338131904602, -0.7709161639213562, -0.8052777647972107, -0.8373004794120789, -0.8668726086616516, -0.8938897252082825, -0.9182539582252502, -0.939875066280365, -0.958671510219574, -0.9745692610740662, -0.9875033497810364, -0.9974183440208435, -1.004267930984497, -1.0080151557922363, -1.0086333751678467, -1.0061063766479492, -1.0004281997680664, -0.9916033148765564, -0.9796467423439026, -0.9645848870277405, -0.9464542269706726, -0.9253029227256775, -0.9011891484260559, -0.8741827607154846, -0.8443635106086731, -0.8118219971656799, -0.7766594290733337, -0.7389865517616272, -0.6989244818687439, -0.6566038727760315, -0.6121640801429749, -0.5657540559768677, -0.5175303220748901, -0.4676578640937805, -0.4163087010383606, -0.3611552119255066, -0.2995697259902954, -0.23855090141296387, -0.18884733319282532, -0.13867276906967163, -0.08422952890396118, -0.025795847177505493, 0.020527809858322144, 0.10893338918685913, 0.17718590795993805, 0.2391248345375061, 0.30000361800193787, 0.35958707332611084, 0.4176431894302368, 0.473943829536438, 0.5282660722732544, 0.5803929567337036, 0.6301141381263733, 0.6772271394729614, 0.7215383648872375, 0.76286381483078, 0.8010297417640686, 0.8358736038208008, 0.8672452569007874, 0.8950071930885315, 0.9190353751182556, 0.939220130443573, 0.9548187255859375, 0.9628662467002869, 0.968227744102478, 0.9708718657493591, 0.97077876329422, 0.9679394960403442, 0.9623565673828125, 0.9540438652038574, 0.9375486969947815, 0.916756272315979, 0.8919855356216431, 0.8633281588554382, 0.83089280128479, 0.7948052287101746, 0.7552079558372498, 0.7122593522071838, 0.6661336421966553, 0.6170200109481812, 0.565122127532959, 0.5106571912765503, 0.45385515689849854, 0.394957959651947, 0.3342185318470001, 0.27189961075782776, 0.2082728147506714, 0.14361760020256042, 0.058524489402770996, -0.010894358158111572, -0.06336674094200134, -0.12235832214355469, -0.17421019077301025, -0.2254757285118103, -0.28464841842651367, -0.3477165102958679, -0.4055507779121399, -0.45748502016067505, -0.5074883699417114, -0.5553233623504639, -0.6007601618766785, -0.6435796022415161, -0.6835731863975525, -0.7205444574356079, -0.7543103098869324, -0.7847020030021667, -0.8115654587745667, -0.8347627520561218, -0.8541731238365173, -0.8696933388710022, -0.8812382817268372, -0.8887423872947693, -0.8921579718589783, -0.8914589285850525, -0.8866385817527771, -0.8777104020118713, -0.8647084832191467, -0.8476879000663757, -0.8267235159873962, -0.8019106984138489, -0.7733648419380188, -0.7412207722663879, -0.7056323885917664, -0.6667720675468445, -0.6248297095298767, -0.5800121426582336, -0.5325422883033752, -0.48265790939331055, -0.43061065673828125, -0.37605053186416626, -0.3123909831047058, -0.24719887971878052, -0.19460618495941162, -0.14195352792739868, -0.0850381851196289, -0.02392938733100891, 0.02842801809310913, 0.12075021862983704, 0.1882738620042801, 0.25236278772354126, 0.31483206152915955, 0.3753424286842346, 0.4335637390613556, 0.48917636275291443, 0.5418735146522522, 0.5913624167442322, 0.6373668313026428, 0.6796277761459351, 0.7179061770439148, 0.7519832253456116, 0.7816628813743591, 0.8067723512649536, 0.827163577079773, 0.8427141904830933, 0.85332852602005, 0.8589385151863098, 0.8595036864280701, 0.8550122380256653, 0.8454810380935669, 0.830955982208252, 0.8115116953849792, 0.7872515320777893, 0.7583068609237671, 0.7248371243476868, 0.6870286464691162, 0.645093560218811, 0.5992695689201355, 0.5498179793357849, 0.4970225989818573, 0.4411882162094116, 0.3826391100883484, 0.32171687483787537, 0.25877904891967773, 0.19419650733470917, 0.12832553684711456, 0.03543482720851898, -0.021519362926483154, -0.08092525601387024, -0.13838788866996765, -0.19083625078201294, -0.24237769842147827, -0.30607378482818604, -0.3683774471282959, -0.4223112463951111, -0.4722910523414612, -0.5195955634117126, -0.5639292001724243, -0.6050140261650085, -0.6425911784172058, -0.6764222979545593, -0.7062916159629822, -0.7320078015327454, -0.7534042000770569, -0.7703414559364319, -0.7827073931694031, -0.7904190421104431, -0.7934219241142273, -0.7916921973228455, -0.7852358222007751, -0.7740890383720398, -0.758319079875946, -0.7380226254463196, -0.7133262753486633, -0.6843858957290649, -0.6513848900794983, -0.6145352721214294, -0.5740733742713928, -0.5302612781524658, -0.4833837151527405, -0.4337468147277832, -0.38167572021484375, -0.3197433352470398, -0.25570952892303467, -0.2018914520740509, -0.14964455366134644, -0.09423243999481201, -0.0330355167388916, 0.014436483383178711, 0.10554209351539612, 0.1767827421426773, 0.24001500010490417, 0.3012539744377136, 0.3600804805755615, 0.4160906970500946, 0.46889859437942505, 0.5181390047073364, 0.5634704828262329, 0.6045771241188049, 0.6411715745925903, 0.672996997833252, 0.6998291015625, 0.7214775085449219, 0.7377880811691284, 0.7486432790756226, 0.7539641857147217, 0.7537103891372681, 0.747881293296814, 0.7365152835845947, 0.7196910381317139, 0.6975260376930237, 0.6701764464378357, 0.637836217880249, 0.6007357835769653, 0.5591405630111694, 0.5133494138717651, 0.4636920690536499, 0.4105278551578522, 0.35424238443374634, 0.29524528980255127, 0.23396699130535126, 0.17085625231266022, 0.09772798418998718, 0.010625064373016357, -0.03694334626197815, -0.09712868928909302, -0.15100720524787903, -0.20181185007095337, -0.25334709882736206, -0.3146633505821228, -0.37350261211395264, -0.4232533574104309, -0.469150185585022, -0.511784017086029, -0.5508319139480591, -0.585997998714447, -0.6170142889022827, -0.6436444520950317, -0.6656848788261414, -0.6829670071601868, -0.6953580975532532, -0.7027629017829895, -0.7051247358322144, -0.702425479888916, -0.6946863532066345, -0.681968092918396, -0.6643701791763306, -0.6420305371284485, -0.6151243448257446, -0.5838634371757507, -0.5484940409660339, -0.5092954635620117, -0.46657800674438477, -0.4206802248954773, -0.3706691265106201, -0.31208252906799316, -0.2511826157569885, -0.2005922794342041, -0.15043509006500244, -0.0974419116973877, -0.03848552703857422, 0.008118808269500732, 0.09031340479850769, 0.16316992044448853, 0.22352047264575958, 0.2816123962402344, 0.3369732201099396, 0.38915181159973145, 0.43772247433662415, 0.4822883605957031, 0.5224846601486206, 0.557982325553894, 0.58849036693573, 0.6137588024139404, 0.6335808038711548, 0.6477943658828735, 0.6562842130661011, 0.6589825749397278, 0.6558701992034912, 0.6469768285751343, 0.6323804259300232, 0.6122076511383057, 0.5866321921348572, 0.555874228477478, 0.5201976299285889, 0.4799090623855591, 0.43535467982292175, 0.3869176506996155, 0.3350149393081665, 0.2800939083099365, 0.2226284295320511, 0.16311487555503845, 0.09172980487346649, 0.009885206818580627, -0.03485044836997986, -0.09203961491584778, -0.14396557211875916, -0.1920446753501892, -0.23878714442253113, -0.2946932911872864, -0.3494262099266052, -0.39840078353881836, -0.44018417596817017, -0.47840821743011475, -0.5127385258674622, -0.542874813079834, -0.5685536861419678, -0.5895512700080872, -0.6056851148605347, -0.6168163418769836, -0.6228505969047546, -0.623738706111908, -0.6194791793823242, -0.6101154088973999, -0.5957378149032593, -0.5764819979667664, -0.5525280833244324, -0.524098813533783, -0.4914582371711731, -0.45490849018096924, -0.41478854417800903, -0.37009894847869873, -0.31726765632629395, -0.26172029972076416, -0.21274244785308838, -0.1662156581878662, -0.11881572008132935, -0.06432074308395386, -0.01574307680130005, 0.04071144759654999, 0.12234963476657867, 0.18101739883422852, 0.23576895892620087, 0.28780484199523926, 0.3366405963897705, 0.38182175159454346, 0.4229280948638916, 0.45957767963409424, 0.49143046140670776, 0.5181922912597656, 0.5396164655685425, 0.5555076599121094, 0.565723180770874, 0.5701743364334106, 0.5688276290893555, 0.561705470085144, 0.5488854646682739, 0.5305001735687256, 0.5067359805107117, 0.4778311848640442, 0.4440740942955017, 0.4057998061180115, 0.3633878231048584, 0.3172576427459717, 0.2678651511669159, 0.21569815278053284, 0.1612718254327774, 0.09598344564437866, 0.015869617462158203, -0.023869335651397705, -0.07658028602600098, -0.12709087133407593, -0.17171257734298706, -0.21501633524894714, -0.25994569063186646, -0.3102644681930542, -0.3573359251022339, -0.39817875623703003, -0.4324185252189636, -0.46271222829818726, -0.4887639880180359, -0.5103204846382141, -0.5271743535995483, -0.5391653180122375, -0.5461823344230652, -0.5481647849082947, -0.5451033115386963, -0.537039577960968, -0.5240665674209595, -0.506327211856842, -0.4840124845504761, -0.45736080408096313, -0.4266546964645386, -0.39221805334091187, -0.3505597710609436, -0.30384695529937744, -0.25421953201293945, -0.21135252714157104, -0.16916429996490479, -0.12596675753593445, -0.0772959291934967, -0.026612132787704468, 0.010913118720054626, 0.0822441577911377, 0.14754638075828552, 0.19771379232406616, 0.2452368289232254, 0.289622962474823, 0.3304130434989929, 0.3671860992908478, 0.399564266204834, 0.4272162914276123, 0.449861615896225, 0.4672727584838867, 0.4792785048484802, 0.4857652187347412, 0.48667848110198975, 0.48202311992645264, 0.4718639850616455, 0.45632484555244446, 0.43558692932128906, 0.40988752245903015, 0.37951692938804626, 0.3448154926300049, 0.3061702251434326, 0.2640102207660675, 0.21880218386650085, 0.17104552686214447, 0.11846062541007996, 0.04709815979003906, -0.006776928901672363, -0.04598298668861389, -0.09385421872138977, -0.1373230218887329, -0.17702063918113708, -0.21519899368286133, -0.2533913254737854, -0.29670029878616333, -0.3365664482116699, -0.3725593686103821, -0.40131741762161255, -0.4250035881996155, -0.44442516565322876, -0.4593793749809265, -0.46971386671066284, -0.47532814741134644, -0.476174533367157, -0.4722590446472168, -0.4636412262916565, -0.45043325424194336, -0.4327988028526306, -0.4109514355659485, -0.3851515054702759, -0.3520382046699524, -0.3145207166671753, -0.2736583948135376, -0.23306739330291748, -0.1969718039035797, -0.15944349765777588, -0.12091541290283203, -0.07685720920562744, -0.03156620264053345, 0.0024347305297851562, 0.05840142071247101, 0.12312749028205872, 0.16903641819953918, 0.2108812779188156, 0.24967961013317108, 0.2849944531917572, 0.3164301812648773, 0.34363698959350586, 0.3663150668144226, 0.3842180371284485, 0.39715564250946045, 0.40499618649482727, 0.40766775608062744, 0.405159056186676, 0.39751961827278137, 0.38485920429229736, 0.36734604835510254, 0.3452053368091583, 0.3187161087989807, 0.2882080078125, 0.2540574371814728, 0.2166828066110611, 0.17653971910476685, 0.13411545753479004, 0.07481946051120758, 0.013250946998596191, -0.019495755434036255, -0.058092594146728516, -0.10007581114768982, -0.13748985528945923, -0.17187190055847168, -0.20466190576553345, -0.23547324538230896, -0.2693561911582947, -0.3023000955581665, -0.3314487934112549, -0.356467068195343, -0.37707090377807617, -0.39148688316345215, -0.4012141823768616, -0.406637966632843, -0.40771228075027466, -0.40444356203079224, -0.3968920111656189, -0.38516920804977417, -0.3677721619606018, -0.34540069103240967, -0.3189738392829895, -0.28883475065231323, -0.25537145137786865, -0.22451987862586975, -0.19413992762565613, -0.16223692893981934, -0.12921234965324402, -0.09262999892234802, -0.05331626534461975, -0.018613606691360474, 0.011480972170829773, 0.06513786315917969, 0.11958791315555573, 0.15898172557353973, 0.19323675334453583, 0.22442704439163208, 0.25217825174331665, 0.27616065740585327, 0.2960931658744812, 0.31174683570861816, 0.3229474127292633, 0.3295774459838867, 0.33157795667648315, 0.32894837856292725, 0.3217471241950989, 0.31008994579315186, 0.2941490411758423, 0.27414995431900024, 0.25036925077438354, 0.22313028573989868, 0.1927991807460785, 0.159779891371727, 0.12297546863555908, 0.07137706875801086, 0.017694979906082153, -0.012734800577163696, -0.041300415992736816, -0.0772523283958435, -0.11254975199699402, -0.14230486750602722, -0.17056095600128174, -0.19712835550308228, -0.22167396545410156, -0.24389243125915527, -0.26880180835723877, -0.2902275323867798, -0.30777132511138916, -0.32122915983200073, -0.33045095205307007, -0.3353431820869446, -0.33586907386779785, -0.33204931020736694, -0.32396143674850464, -0.31173837184906006, -0.2955673336982727, -0.2756854295730591, -0.25237828493118286, -0.2299729883670807, -0.20716053247451782, -0.18252646923065186, -0.15640830993652344, -0.1291610300540924, -0.09918585419654846, -0.06637424230575562, -0.03355613350868225, -0.009462237358093262, 0.017146065831184387, 0.06342984735965729, 0.10690158605575562, 0.14196491241455078, 0.16837173700332642, 0.19185051321983337, 0.21210797131061554, 0.2288956642150879, 0.24201330542564392, 0.25131160020828247, 0.2566933333873749, 0.258114755153656, 0.2555861473083496, 0.2491709440946579, 0.23898491263389587, 0.22519421577453613, 0.20801284909248352, 0.18769948184490204, 0.16455383598804474, 0.1389121115207672, 0.10436417162418365, 0.06328576803207397, 0.020158886909484863, -0.005827873945236206, -0.026188284158706665, -0.055640995502471924, -0.08469358086585999, -0.11295109987258911, -0.13649863004684448, -0.1586018204689026, -0.1790808141231537, -0.19766724109649658, -0.2141210436820984, -0.22823309898376465, -0.23982831835746765, -0.24997621774673462, -0.2578720450401306, -0.2621690630912781, -0.26283878087997437, -0.2599037289619446, -0.2534363865852356, -0.24374204874038696, -0.23346707224845886, -0.22081869840621948, -0.2059948742389679, -0.1892225742340088, -0.1707543134689331, -0.15086430311203003, -0.12984448671340942, -0.10709920525550842, -0.08126670122146606, -0.05521401762962341, -0.02930992841720581, -0.011719614267349243, 0.008407413959503174, 0.03805124759674072, 0.07119420170783997, 0.10157844424247742, 0.12869316339492798, 0.14571931958198547, 0.1600039154291153, 0.17137184739112854, 0.1796911060810089, 0.18487469851970673, 0.1868811696767807, 0.185714989900589, 0.18142633140087128, 0.17410936951637268, 0.16390123963356018, 0.1509791910648346, 0.13555797934532166, 0.11375384032726288, 0.08640295267105103, 0.05673032999038696, 0.02508510649204254, 0.002825528383255005, -0.015402644872665405, -0.030973702669143677, -0.05343225598335266, -0.07550269365310669, -0.09686970710754395, -0.11676818132400513, -0.13327258825302124, -0.14843493700027466, -0.16205081343650818, -0.17394065856933594, -0.18395215272903442, -0.1919620931148529, -0.1978784203529358, -0.20164057612419128, -0.20322072505950928, -0.2026234269142151, -0.19988515973091125, -0.19507363438606262, -0.18828684091567993, -0.17965030670166016, -0.16931560635566711, -0.15745794773101807, -0.14427271485328674, -0.12997281551361084, -0.114784836769104, -0.0966358482837677, -0.07786169648170471, -0.05890238285064697, -0.04004201292991638, -0.02277553081512451, -0.01156112551689148, 0.0023325681686401367, 0.015287995338439941, 0.037270426750183105, 0.056831374764442444, 0.07376733422279358, 0.08786430954933167, 0.09895449876785278, 0.10691775381565094, 0.11168317496776581, 0.11322927474975586, 0.11158393323421478, 0.10682301223278046, 0.09906893968582153, 0.08848752081394196, 0.07528567314147949, 0.05970683693885803, 0.0420270711183548, 0.022386223077774048, 0.007872015237808228, -0.004043161869049072, -0.016043096780776978, -0.024884849786758423, -0.040209949016571045, -0.05533057451248169, -0.0700191855430603, -0.08405867218971252, -0.09724587202072144, -0.10939422249794006, -0.11945536732673645, -0.12775522470474243, -0.1347808837890625, -0.14044880867004395, -0.1446983516216278, -0.14749246835708618, -0.1488173007965088, -0.14868253469467163, -0.14712074398994446, -0.14418593049049377, -0.13995328545570374, -0.1345166265964508, -0.12798717617988586, -0.12049102783203125, -0.11191454529762268, -0.10151135921478271, -0.09050562977790833, -0.07908427715301514, -0.06743702292442322, -0.05575317144393921, -0.04421806335449219, -0.0330108106136322, -0.02319243550300598, -0.01753699779510498, -0.010957509279251099, -0.004120677709579468, 0.001867443323135376, 0.0069335997104644775, 0.011021330952644348, 0.0140916109085083, 0.016595974564552307, 0.01847061514854431, 0.01839640736579895, 0.016430944204330444, 0.014046892523765564, 0.011166229844093323, 0.007468312978744507, 0.0030386745929718018, -0.0020271241664886475, -0.007625699043273926, -0.013647615909576416, -0.018514126539230347, -0.022987276315689087, -0.029985815286636353, -0.0379948616027832, -0.04583063721656799, -0.05336791276931763, -0.06049048900604248, -0.06709352135658264, -0.07308447360992432, -0.07838499546051025, -0.0829315185546875, -0.08667623996734619, -0.08958721160888672, -0.09164902567863464, -0.09286180138587952, -0.09324145317077637, -0.09281831979751587, -0.09163671731948853, -0.08975297212600708, -0.08723452687263489, -0.08415824174880981, -0.08060812950134277, -0.07667392492294312, -0.07244893908500671, -0.06802830100059509, -0.06350672245025635, -0.05897679924964905, -0.05452713370323181, -0.05024042725563049, -0.046192467212677, -0.04245030879974365, -0.03907141089439392, -0.03610256314277649, -0.03357943892478943, -0.03152605891227722, -0.02995455265045166, -0.028865456581115723, -0.02824801206588745, -0.028080761432647705, -0.02833232283592224, -0.028962552547454834, -0.029923588037490845, -0.031161516904830933, -0.03261744976043701, -0.03422924876213074, -0.035933226346969604, -0.03766584396362305, -0.03936508297920227, -0.04097193479537964, -0.042432427406311035, -0.04369837045669556, -0.044728755950927734, -0.04549074172973633, -0.04596060514450073, -0.046124041080474854, -0.04597678780555725, -0.04552462697029114, -0.044783174991607666, -0.04377776384353638, -0.04254227876663208, -0.04111936688423157, -0.03955826163291931, -0.037913978099823, -0.03624647855758667, -0.03461846709251404, -0.03309395909309387, -0.03173676133155823, -0.030608922243118286, -0.029768705368041992, -0.029269307851791382, -0.029157161712646484, -0.0294705331325531, -0.030238479375839233, -0.03147956728935242, -0.03320127725601196, -0.035399049520492554, -0.03805646300315857, -0.041144758462905884, -0.04462319612503052, -0.04843941330909729, -0.052530646324157715, -0.05682402849197388, -0.06123855710029602, -0.06568595767021179, -0.07007288932800293, -0.07430264353752136, -0.07827705144882202, -0.0818985104560852, -0.0850723385810852, -0.08770886063575745, -0.08972510695457458, -0.09104761481285095, -0.09161356091499329, -0.09137293696403503, -0.09028998017311096, -0.08834418654441833, -0.08553162217140198, -0.08186551928520203, -0.07737657427787781, -0.07211291790008545, -0.06613984704017639, -0.05953916907310486, -0.05240800976753235, -0.044857561588287354, -0.03701144456863403, -0.029003292322158813, -0.02244669198989868, -0.018002688884735107, -0.012977004051208496, -0.007072001695632935, -0.001639038324356079, 0.003205537796020508, 0.0073524415493011475, 0.010701850056648254, 0.013165608048439026, 0.014668971300125122, 0.015152662992477417, 0.01457446813583374, 0.012910500168800354, 0.010155975818634033, 0.006326168775558472, 0.0014564692974090576, -0.004397571086883545, -0.011160850524902344, -0.01766425371170044, -0.023340970277786255, -0.033359915018081665, -0.04468488693237305, -0.05635300278663635, -0.0681663453578949, -0.07991892099380493, -0.09140017628669739, -0.1023993194103241, -0.11270841956138611, -0.12100434303283691, -0.12821951508522034, -0.13434824347496033, -0.1392548680305481, -0.14282220602035522, -0.1449543833732605, -0.14557892084121704, -0.1446484923362732, -0.14214277267456055, -0.13806882500648499, -0.13246223330497742, -0.1253865361213684, -0.11693310737609863, -0.10619717836380005, -0.0936816930770874, -0.08006748557090759, -0.06557035446166992, -0.050426334142684937, -0.034887999296188354, -0.02145978808403015, -0.011537432670593262, 0.0008580386638641357, 0.012684360146522522, 0.030965834856033325, 0.049563586711883545, 0.06594809889793396, 0.07978790998458862, 0.09078997373580933, 0.09870597720146179, 0.1033376157283783, 0.10454115271568298, 0.10223060846328735, 0.09638082981109619, 0.08702899515628815, 0.07427485287189484, 0.05828046798706055, 0.039268165826797485, 0.017238765954971313, 0.003423035144805908, -0.010708630084991455, -0.022437363862991333, -0.03997981548309326, -0.05942913889884949, -0.0789722204208374, -0.09826761484146118, -0.11654379963874817, -0.13192546367645264, -0.14621984958648682, -0.15915796160697937, -0.17049050331115723, -0.1799927055835724, -0.1874682903289795, -0.1927536129951477, -0.19572147727012634, -0.19628337025642395, -0.19439205527305603, -0.1900429129600525, -0.18327492475509644, -0.1741701066493988, -0.1628534197807312, -0.14949044585227966, -0.1342858374118805, -0.11747965216636658, -0.09709560871124268, -0.07494726777076721, -0.051980942487716675, -0.028588980436325073, -0.012752383947372437, 0.006139427423477173, 0.03217180073261261, 0.06411907076835632, 0.09364831447601318, 0.12020647525787354, 0.1390959918498993, 0.15284675359725952, 0.16349250078201294, 0.1708040088415146, 0.1746079921722412, 0.17479102313518524, 0.17130203545093536, 0.16415446996688843, 0.15342667698860168, 0.13926133513450623, 0.11929234862327576, 0.09093862771987915, 0.05890180170536041, 0.023562178015708923, -0.0005970597267150879, -0.019930988550186157, -0.04347723722457886, -0.07082381844520569, -0.098064124584198, -0.12324497103691101, -0.14538541436195374, -0.16620177030563354, -0.18531066179275513, -0.2023543417453766, -0.21700742840766907, -0.22898298501968384, -0.23803836107254028, -0.24397951364517212, -0.24729114770889282, -0.24645590782165527, -0.24199050664901733, -0.23463580012321472, -0.2240397334098816, -0.21035298705101013, -0.1937830150127411, -0.17459088563919067, -0.15308701992034912, -0.12962639331817627, -0.1031726598739624, -0.0729401707649231, -0.0419141948223114, -0.016631007194519043, 0.008192181587219238, 0.04800274968147278, 0.09169237315654755, 0.13132810592651367, 0.15798264741897583, 0.18150542676448822, 0.2014470398426056, 0.2174174040555954, 0.22909259796142578, 0.23622184991836548, 0.23863252997398376, 0.2362341582775116, 0.22902081906795502, 0.21707217395305634, 0.20055288076400757, 0.1797105371952057, 0.1548720896244049, 0.12566229701042175, 0.08172178268432617, 0.03416694700717926, -0.0017352104187011719, -0.026154249906539917, -0.06123393774032593, -0.09637442231178284, -0.12864133715629578, -0.15754008293151855, -0.18489336967468262, -0.21020102500915527, -0.23299431800842285, -0.2551828622817993, -0.2762899398803711, -0.29274046421051025, -0.3041995167732239, -0.3104180097579956, -0.31123799085617065, -0.306596040725708, -0.2965257167816162, -0.28115737438201904, -0.2607172131538391, -0.23745039105415344, -0.21431735157966614, -0.18816152215003967, -0.15942174196243286, -0.12858659029006958, -0.09344673156738281, -0.05484643578529358, -0.019516319036483765, 0.010457336902618408, 0.06411112844944, 0.1194165050983429, 0.1592196822166443, 0.19334776699543, 0.2237035483121872, 0.24971531331539154, 0.270883709192276, 0.28679192066192627, 0.2971133589744568, 0.301618367433548, 0.3001793622970581, 0.29277360439300537, 0.2794847786426544, 0.2605022192001343, 0.23611842095851898, 0.206724613904953, 0.17280477285385132, 0.13492724299430847, 0.08012841641902924, 0.01890583336353302, -0.016447007656097412, -0.05307075381278992, -0.09603720903396606, -0.1351235806941986, -0.17067605257034302, -0.2044433057308197, -0.2358095645904541, -0.2696794867515564, -0.30145198106765747, -0.3281458020210266, -0.34924638271331787, -0.36433571577072144, -0.3731001019477844, -0.3753359913825989, -0.3709554076194763, -0.35998767614364624, -0.34258008003234863, -0.31899601221084595, -0.2896110415458679, -0.25490736961364746, -0.22174319624900818, -0.1876707375049591, -0.1509971022605896, -0.11213487386703491, -0.06599161028862, -0.02143150568008423, 0.014062672853469849, 0.08252006769180298, 0.14335836470127106, 0.18842077255249023, 0.2298170030117035, 0.2667831778526306, 0.29862910509109497, 0.3247513771057129, 0.3446446657180786, 0.35791146755218506, 0.36427009105682373, 0.3635602593421936, 0.3557469844818115, 0.3409218490123749, 0.31930243968963623, 0.2912291884422302, 0.2571600675582886, 0.21766327321529388, 0.17340776324272156, 0.12387025356292725, 0.05227386951446533, -0.0051295459270477295, -0.04627630114555359, -0.09698763489723206, -0.14263096451759338, -0.18473240733146667, -0.22479546070098877, -0.26699143648147583, -0.31021493673324585, -0.3482348322868347, -0.38033998012542725, -0.40273815393447876, -0.4189378619194031, -0.42868053913116455, -0.4317522644996643, -0.42805689573287964, -0.4176182746887207, -0.4005812406539917, -0.37667423486709595, -0.3430747389793396, -0.30320852994918823, -0.2577483654022217, -0.21548274159431458, -0.17301833629608154, -0.1281995177268982, -0.07683467864990234, -0.023225069046020508, 0.01971954107284546, 0.10098642110824585, 0.16471263766288757, 0.21748767793178558, 0.26608186960220337, 0.3096046447753906, 0.34725069999694824, 0.3783150315284729, 0.4022059440612793, 0.4184562861919403, 0.42673259973526, 0.42684173583984375, 0.4187350571155548, 0.40251022577285767, 0.3784101605415344, 0.3468196988105774, 0.30825918912887573, 0.2633762061595917, 0.21293458342552185, 0.1578015238046646, 0.08736389875411987, 0.007884383201599121, -0.037918269634246826, -0.09627556800842285, -0.14864158630371094, -0.1972646713256836, -0.24363118410110474, -0.2986751198768616, -0.34897828102111816, -0.3918127417564392, -0.42474496364593506, -0.45121216773986816, -0.47070956230163574, -0.4828529953956604, -0.4873875379562378, -0.4841926693916321, -0.4732838273048401, -0.4548148512840271, -0.42907440662384033, -0.39648163318634033, -0.3541877865791321, -0.30314987897872925, -0.24651223421096802, -0.19808343052864075, -0.1473250389099121, -0.09169110655784607, -0.02971288561820984, 0.02015024423599243, 0.11300510168075562, 0.18152965605258942, 0.24220548570156097, 0.29828178882598877, 0.34874051809310913, 0.3926585912704468, 0.42922455072402954, 0.45775362849235535, 0.47770005464553833, 0.4886680245399475, 0.49041903018951416, 0.4828769266605377, 0.4661300480365753, 0.4404304623603821, 0.4061899185180664, 0.3639736771583557, 0.3144909739494324, 0.2585832476615906, 0.1972094625234604, 0.13142997026443481, 0.036481305956840515, -0.02314695715904236, -0.08807840943336487, -0.14816579222679138, -0.2034224271774292, -0.25962990522384644, -0.3229784369468689, -0.3809642791748047, -0.42600393295288086, -0.4646347165107727, -0.49614447355270386, -0.5199440121650696, -0.5355777144432068, -0.5427320003509521, -0.541242241859436, -0.531095564365387, -0.5124316811561584, -0.48554176092147827, -0.4508631229400635, -0.4089723229408264, -0.3576199412345886, -0.2956695556640625, -0.2317732274532318, -0.17564398050308228, -0.11713609099388123, -0.048473745584487915, 0.008784204721450806, 0.10769513249397278, 0.1862487643957138, 0.2554047107696533, 0.31977003812789917, 0.37819188833236694, 0.4296168088912964, 0.4731106758117676, 0.5078747272491455, 0.5332608222961426, 0.5487827658653259, 0.5541263818740845, 0.5491546988487244, 0.5339116454124451, 0.5086216330528259, 0.47368675470352173, 0.429679811000824, 0.37733522057533264, 0.3175366222858429, 0.2513018846511841, 0.17976582050323486, 0.0946430116891861, -0.0008048117160797119, -0.06367456912994385, -0.13364839553833008, -0.19577237963676453, -0.2588328719139099, -0.33121389150619507, -0.39596492052078247, -0.44861888885498047, -0.4945441484451294, -0.5329113602638245, -0.5630194544792175, -0.5843098163604736, -0.5963758230209351, -0.5989702343940735, -0.5920109152793884, -0.575581967830658, -0.5499330163002014, -0.5154757499694824, -0.4727770686149597, -0.4225497841835022, -0.3634212017059326, -0.2916775941848755, -0.22107362747192383, -0.15758979320526123, -0.08868458867073059, -0.01735055446624756, 0.06943053007125854, 0.1678004413843155, 0.24635714292526245, 0.32034504413604736, 0.38846123218536377, 0.4495004415512085, 0.502376139163971, 0.5461400151252747, 0.5799989104270935, 0.6033289432525635, 0.6156866550445557, 0.6168177127838135, 0.6066615581512451, 0.5853534936904907, 0.5532221794128418, 0.5107849836349487, 0.4587398171424866, 0.39795294404029846, 0.3294449746608734, 0.2543734312057495, 0.17401306331157684, 0.07455676794052124, -0.017124980688095093, -0.0918746292591095, -0.16394859552383423, -0.23145973682403564, -0.3105325698852539, -0.38727062940597534, -0.4495270848274231, -0.505150318145752, -0.5531591176986694, -0.5926997661590576, -0.623062789440155, -0.6436940431594849, -0.6542053818702698, -0.6543815732002258, -0.6441848278045654, -0.6237553954124451, -0.5934099555015564, -0.5536364912986755, -0.5050864219665527, -0.4485641121864319, -0.3850128650665283, -0.30597996711730957, -0.22596630454063416, -0.1561400592327118, -0.07974603772163391, -0.005126476287841797, 0.1041678935289383, 0.19979383051395416, 0.284895658493042, 0.36483973264694214, 0.4382430911064148, 0.5038303732872009, 0.5604565143585205, 0.6071261763572693, 0.6430109739303589, 0.6674649715423584, 0.680034875869751, 0.6804689764976501, 0.6687218546867371, 0.6449555158615112, 0.6095368266105652, 0.5630327463150024, 0.5061999559402466, 0.4399726390838623, 0.3654477000236511, 0.28386586904525757, 0.19659097492694855, 0.09593337774276733, -0.012003690004348755, -0.09154212474822998, -0.16965633630752563, -0.24309378862380981, -0.33276861906051636, -0.41287267208099365, -0.480989933013916, -0.5420650839805603, -0.595044732093811, -0.6390092372894287, -0.6731893420219421, -0.6969791054725647, -0.7099464535713196, -0.7118411660194397, -0.7025989890098572, -0.6823436617851257, -0.6513842344284058, -0.6102115511894226, -0.559489369392395, -0.5000438094139099, -0.4328497052192688, -0.3558310270309448, -0.2650381922721863, -0.18578317761421204, -0.10775452852249146, -0.020321309566497803, 0.08250454068183899, 0.19224879145622253, 0.2855665683746338, 0.3738233745098114, 0.4555278718471527, 0.5292949676513672, 0.5938692092895508, 0.6481460928916931, 0.6911908388137817, 0.7222539186477661, 0.7407845258712769, 0.7464396953582764, 0.7390903234481812, 0.7188242673873901, 0.6859443783760071, 0.6409646272659302, 0.5846011638641357, 0.5177613496780396, 0.441528856754303, 0.3571454584598541, 0.2659911513328552, 0.16956140100955963, 0.046302974224090576, -0.0442071259021759, -0.1348220705986023, -0.21499979496002197, -0.3061802387237549, -0.3982636332511902, -0.47536367177963257, -0.5456859469413757, -0.6080512404441833, -0.6614097952842712, -0.7048583030700684, -0.7376553416252136, -0.7592336535453796, -0.7692095637321472, -0.7673901915550232, -0.7537761330604553, -0.72856205701828, -0.6921342611312866, -0.6450637578964233, -0.5880972743034363, -0.5221460461616516, -0.44826990365982056, -0.36573606729507446, -0.267172634601593, -0.1814333200454712, -0.09592241048812866, -0.007509112358093262, 0.12003374099731445, 0.2256602793931961, 0.3249743580818176, 0.41870856285095215, 0.5053207278251648, 0.5833824872970581, 0.6516021490097046, 0.7088459730148315, 0.7541572451591492, 0.7867714166641235, 0.8061292171478271, 0.8118859529495239, 0.8039172291755676, 0.782321035861969, 0.7474168539047241, 0.6997400522232056, 0.640034556388855, 0.5692393779754639, 0.48847538232803345, 0.3990257978439331, 0.30231672525405884, 0.19989363849163055, 0.07965622842311859, -0.02840709686279297, -0.12667256593704224, -0.2125314176082611, -0.31041616201400757, -0.4084640145301819, -0.4920560121536255, -0.5687925815582275, -0.637427031993866, -0.6968407034873962, -0.746061384677887, -0.7842786908149719, -0.810857355594635, -0.8253474831581116, -0.827491819858551, -0.8172311186790466, -0.7947028279304504, -0.760241687297821, -0.7143726944923401, -0.6578041315078735, -0.5914150476455688, -0.5162426829338074, -0.43346548080444336, -0.33907073736190796, -0.23422563076019287, -0.14684811234474182, -0.049068599939346313, 0.053624704480171204, 0.1859888732433319, 0.2940749228000641, 0.39724215865135193, 0.49384891986846924, 0.5823553204536438, 0.6613479256629944, 0.7295613288879395, 0.785898745059967, 0.8294494152069092, 0.8595027923583984, 0.8755601048469543, 0.8773423433303833, 0.8647941946983337, 0.838085949420929, 0.7976104021072388, 0.7439766526222229, 0.6780008673667908, 0.6006935834884644, 0.5132441520690918, 0.41700154542922974, 0.313454270362854, 0.20420601963996887, 0.07625192403793335, -0.036726951599121094, -0.13900303840637207, -0.22982758283615112, -0.3385940194129944, -0.4381263256072998, -0.5265029072761536, -0.6077153086662292, -0.680491030216217, -0.7436863780021667, -0.7963059544563293, -0.8375166058540344, -0.8666607737541199]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ipdb> train_ys\n", + "[1.0, 0.9999999910945311, 0.9999999198522608, 0.9999996790771866, 0.9999991066924582, 0.9999979837910936, 0.9999960318551232, 0.9999929091393012, 0.9999882062104364, 0.9999814406286233, 0.9999720507522349, 0.99995938864448, 0.9999427120556592, 0.9999211754520029, 0.9998938200591698, 0.9998595628861638, 0.9998171846936232, 0.9997653168692014, 0.9997024271721292, 0.9996268043090873, 0.9995365413042816, 0.9994295176281586, 0.9993033800516088, 0.9991555221958434, 0.9989830627524896, 0.9987828223539091, 0.9985512990804113, 0.9982846425989912, 0.9979786269375996, 0.9976286219098381, 0.9972295632174979, 0.9967759212726425, 0.9962616687970814, 0.9956802472752349, 0.9950245323566472, 0.994286798326904, 0.9934586817905299, 0.9925311447367143, 0.9914944371884851, 0.9903380596683096, 0.9890507257480735, 0.9876203249889757, 0.9860338866170509, 0.9842775443227177, 0.9823365026177825, 0.9801950052305569, 0.9778363060688365, 0.9752426433311261, 0.9723952173981886, 0.9692741731891905, 0.9658585877187169, 0.9621264636419244, 0.9580547296241031, 0.9536192484168413, 0.9487948335645304, 0.9435552757006918, 0.9378733794219011, 0.9317210117462336, 0.925069163171124, 0.9178880223403417, 0.9101470653091464, 0.9018151603583608, 0.8928606892496839, 0.8832516857336673, 0.8729559920161116, 0.8619414337558884, 0.8501760140053676, 0.8376281263119683, 0.824266786974378, 0.8100618861889562, 0.7949844575302867, 0.7790069648853952, 0.7621036056049999, 0.744250628249673, 0.7254266628974063, 0.7056130615464159, 0.6847942456990141, 0.6629580577564328, 0.6400961123992553, 0.6162041436838025, 0.5912823431627549, 0.5653356839512562, 0.5383742253214271, 0.5104133921328258, 0.48147422320926164, 0.4515835826679898, 0.42077432821062954, 0.3890854305094812, 0.35656203808065917, 0.32325548243632124, 0.28922321885981783, 0.2545286988535653, 0.2192411711698261, 0.18343540934504937, 0.14719136480963987, 0.11059374592245369, 0.07373152466343098, 0.03669737418363381, -0.00041295807017589786, -0.037501335403378626, -0.0744679673249738, -0.11121218617802132, -0.14763325437115177, -0.18363119042396353, -0.21910760101532972, -0.25396650542254123, -0.28811513819510126, -0.32146471564699186, -0.35393115179635093, -0.3854357097437049, -0.4159055751627377, -0.44527433957463, -0.4734823823726119, -0.5004771421328761, -0.5262132695579935, -0.5506526564099944, -0.5737643369561705, -0.595524260723435, -0.6159149376859064, -0.6349249593466324, -0.6525484014717374, -0.6687841164531488, -0.6836349253803936, -0.6971067218673693, -0.7092075014898059, -0.7199463323362644, -0.729332283661256, -0.7373733309626591, -0.7440752570013993, -0.7494405693569872, -0.7534674560850376, -0.7561488019255895, -0.7574712883086907, -0.7574146011093805, -0.7559507706940899, -0.7530436692313542, -0.7486486904453411, -0.7427126368804504, -0.7351738392038212, -0.7259625309616335, -0.7150015003673365, -0.7022070379653861, -0.6874901952098658, -0.6707583639596291, -0.6519171804808083, -0.6308727496660278, -0.6075341757907845, -0.5818163752757692, -0.5536431347543069, -0.5229503645189311, -0.4896894835321019, -0.4538308581604776, -0.4153672032883422, -0.374316842259938, -0.33072671205774057, -0.28467499315924344, -0.23627324053889967, -0.18566789413561363, -0.13304105448858364, -0.07861042263614869, -0.02262832296265693, 0.03462024669768518, 0.09282056229772219, 0.15163210717465353, 0.21069344235514711, 0.2696279239039355, 0.3280501383319103, 0.38557289064752537, 0.4418145483056956, 0.49640652015508874, 0.5490006342782053, 0.5992761736707294, 0.6469463347462272, 0.6917638907377248, 0.7335258695516799, 0.7720770921834346, 0.8073124614920122, 0.8391779395538649, 0.8676702022693104, 0.8928350095859888, 0.9147643759465568, 0.9335926659860233, 0.9494917731958721, 0.9626655629323815, 0.973343775136797, 0.9817755864937774, 0.9882230271528751, 0.9929544347853346, 0.9962381102625714, 0.9983363164933309, 0.999499736903073, 0.9999624845720818, 0.9999377288491531, 0.9996139846831834, 0.9991520919190354, 0.9986828978907165, 0.9983056468593409, 0.9980870737964366, 0.9980611969363409, 0.9982298023477425, 0.9985636132407466, 0.9990041354909477, 0.9994661676347849, 0.9998409572610492, 0.9999999754894677, 0.99979926670566, 0.9990843120349359, 0.9976953228635163, 0.995472856291271, 0.9922636194672504, 0.9879263064539591, 0.9823372919483264, 0.9753959932228395, 0.9670297071636768, 0.9571977349023117, 0.9458946231648148, 0.9331523790985957, 0.9190415529839779, 0.9036711289177931, 0.8871872144264215, 0.8697705725533166, 0.8516330904824311, 0.8330133234693136, 0.8141712884534322, 0.795382705692625, 0.7769328976084606, 0.7591105514724575, 0.7422015375346774, 0.7264829487576538, 0.7122174954353333, 0.6996483512156466, 0.6889945102198332, 0.6804466817534187, 0.6741637227555183, 0.6702695910945916, 0.6688507965590815, 0.6699543312466303, 0.6735860761687079, 0.6797097042354382, 0.6882461282903349, 0.699073572589243, 0.7120283725450245, 0.7269066259545918, 0.7434668247717592, 0.761433585983945, 0.7805025707157159, 0.8003466314538108, 0.820623159505612, 0.8409825220260545, 0.8610773860281274, 0.8805726335455245, 0.8991554866767885, 0.9165453931302362, 0.932503180890307, 0.9468389815999014, 0.9594184500807207, 0.9701668722434438, 0.9790708515986974, 0.9861773880413249, 0.9915903009916562, 0.9954640901580224, 0.9979954589099684, 0.9994128368820057, 0.9999643223254764, 0.9999045172295538, 0.999480750031436, 0.9989191765846399, 0.99841122786265, 0.9981008423073924, 0.998072891507764, 0.9983431880154605, 0.9988504582422126, 0.9994506716176381, 0.9999141343155473, 0.9999257713811709, 0.9990890200268584, 0.9969337214107924, 0.9929283102878917, 0.986496446398301, 0.9770379998341144, 0.9639539966232668, 0.9466747652206995, 0.9246901288211219, 0.8975801053595118, 0.8650442597177184, 0.8269276575384448, 0.7832413484831371, 0.734175495016238, 0.6801036729041954, 0.6215774830499048, 0.5593113801896004, 0.49415846323669693, 0.4270787871014152, 0.3591024448976264, 0.2912901428196509, 0.22469418478875508, 0.1603226748441182, 0.09910934820760223, 0.04189081098359598, -0.010607814389631301, -0.0577786796016218, -0.099129078658902, -0.13427271722229267, -0.1629162132313451, -0.18484296141045642, -0.19989658814147132, -0.20796612347113003, -0.20897475025321136, -0.2028735874587927, -0.18964145556973225, -0.16929098410234827, -0.14188078101470408, -0.10753272042855168, -0.06645275633984066, -0.018953085593866464, 0.03452697391114146, 0.0934042826103129, 0.15694223895081458, 0.2242507482855584, 0.2942976700075959, 0.36593261072335653, 0.4379226892408981, 0.5089985506452639, 0.5779076252709505, 0.6434706005054386, 0.7046364652069674, 0.7605314103069679, 0.81049735773115, 0.8541168860893561, 0.891222686732936, 0.9218912221544437, 0.9464217547967742, 0.9653031724307337, 0.9791719139349342, 0.988764727684821, 0.9948699821488037, 0.9982808683680645, 0.9997532029233138, 0.9999697889632412, 0.9995125424296244, 0.9988429315691196, 0.9982907612706834, 0.9980509704302531, 0.9981878777808135, 0.9986461655290509, 0.9992677797875752, 0.9998138079446379, 0.9999902390979706, 0.9994763217765125, 0.9979540255007083, 0.9951369314216748, 0.9907967747308957, 0.9847858875475304, 0.9770539794977796, 0.9676580516722423, 0.9567647440099915, 0.9446450128639371, 0.9316616506902913, 0.9182507136122489, 0.904898345225718, 0.8921147305613297, 0.8804069689570666, 0.8702525382478942, 0.8620747813908153, 0.8562215415238096, 0.8529477641906912, 0.8524026249301471, 0.8546215530066108, 0.8595234073932372, 0.8669129938768091, 0.8764890493087031, 0.8878577111793226, 0.9005512954551571, 0.9140519004823906, 0.9278189467678621, 0.9413192920553006, 0.9540580984743647, 0.9656082617996179, 0.97563603008616, 0.9839205073761849, 0.990365085854054, 0.9949994550555766, 0.9979716289243032, 0.9995303043743146, 0.9999986992623411, 0.9997417073040651, 0.9991286826526268, 0.9984944068958582, 0.9981008241465712, 0.9981020188074862, 0.9985147306236387, 0.9991965136474023, 0.9998334737305216, 0.9999393375786034, 0.9988673416629857, 0.9958359770119678, 0.9899688824015034, 0.9803480811229738, 0.9660783242238606, 0.9463586648843922, 0.9205557876676438, 0.8882723807633464, 0.8494033171426026, 0.8041728803920215, 0.7531478496217372, 0.6972238339210778, 0.6375854682301929, 0.5756444106049121, 0.5129619046615008, 0.45116445169316277, 0.3918615408679204, 0.336573365161501, 0.28667424788029416, 0.24335458343893634, 0.20760102293898106, 0.1801919507885942, 0.16170341551396392, 0.1525198252089829, 0.15284393669918073, 0.16270184104868896, 0.18194054803289567, 0.21021809918679893, 0.2469885510466372, 0.29148630209547605, 0.3427157228400502, 0.3994525604690838, 0.46026289814773663, 0.523543496829613, 0.5875843132135429, 0.6506503085210402, 0.7110759899377804, 0.7673632036890867, 0.8182711886239281, 0.8628882042028494, 0.9006761836023052, 0.9314834392055077, 0.955524756966621, 0.973332419544338, 0.985685040221762, 0.9935230747463438, 0.9978603583577361, 0.9997001727989734, 0.9999625930167806, 0.9994276891867356, 0.9986970077266731, 0.9981739219589043, 0.9980620504183955, 0.9983799585821975, 0.9989896677712058, 0.9996359557946317, 0.9999929559274366, 0.9997141340900682, 0.9984814219279439, 0.9960492315101929, 0.9922794014313572, 0.9871638944443706, 0.980833257815985, 0.9735503381915039, 0.9656903013030125, 0.9577094039576963, 0.9501059964387015, 0.9433777780069126, 0.9379793748777655, 0.9342839439349561, 0.9325518694629246, 0.9329088636832522, 0.9353350148614428, 0.939665594447795, 0.9456037195305942, 0.9527442153996258, 0.9606071880059655, 0.9686788984768331, 0.9764566069855408, 0.9834932699730993, 0.9894375232170568, 0.9940644396824126, 0.997293215539659, 0.9991891881581291, 0.9999492712096235, 0.9998717480781595, 0.999313104718616, 0.998635959824329, 0.9981530223816502, 0.9980723620244648, 0.998449208459339, 0.9991491383893569, 0.9998269661585876, 0.9999249419497108, 0.9986928854401915, 0.9952314792651161, 0.988557966920199, 0.9776909230773987, 0.9617477961685938, 0.9400460340259764, 0.9121964707268037, 0.8781769969892849, 0.838375885385862, 0.7935975991825955, 0.7450290275797262, 0.6941698939828211, 0.642736326386985, 0.5925500772437668, 0.5454268743605373, 0.5030757625013157, 0.46701760957680427, 0.43852622926402074, 0.41859100434142166, 0.40789651521793124, 0.40681313036479905, 0.41539295755536293, 0.43336768459580005, 0.46014799981515064, 0.4948276114131757, 0.536197466230905, 0.5827768063343528, 0.6328666929339956, 0.6846285204759043, 0.7361853326363962, 0.785738439407145, 0.8316872328456785, 0.8727374873253249, 0.9079836636686284, 0.9369539508813608, 0.9596123128749424, 0.9763183909145716, 0.9877522589047232, 0.9948154796339589, 0.998521969132484, 0.9998918215389323, 0.9998590138358339, 0.999200623106254, 0.9984916653266229, 0.9980864947972417, 0.9981251672865937, 0.9985612816667849, 0.9992064456715808, 0.9997855289161653, 0.9999962306928382, 0.9995662778171935, 0.9983019226175391, 0.9961224504319272, 0.9930771278044491, 0.9893432564110298, 0.9852064226870744, 0.9810262581078435, 0.9771926981585982, 0.9740786300894725, 0.9719949062523241, 0.9711530805525326, 0.9716400956460236, 0.973407709349509, 0.9762778514933382, 0.9799634366951373, 0.9841024842022651, 0.9883017967186208, 0.992185075772096, 0.995439420561066, 0.9978539110207447, 0.9993445924994749, 0.9999616887253585, 0.9998771054347196, 0.9993529067805987, 0.9986940187613396, 0.9981905388175434, 0.9980564465399523, 0.99837213764796, 0.9990381114846173, 0.9997464571027956, 0.9999755834710886, 0.9990118986916514, 0.9959997541424094, 0.9900178559540849, 0.9801766210835486, 0.9657270501129552, 0.9461683721548766, 0.9213399456240257, 0.8914835151451255, 0.8572652981801886, 0.8197531341919881, 0.7803509402698986, 0.7406993598603517, 0.7025561601641277, 0.6676715725903999, 0.6376722207227156, 0.6139633034814703, 0.5976536818007582, 0.5895039780203847, 0.5898949213933513, 0.5988124867017339, 0.6158476572920468, 0.6402110603261185, 0.6707650856453944, 0.706077227208559, 0.7444974501092101, 0.7842592005335478, 0.8235988295626147, 0.8608829653362007, 0.8947293530850523, 0.9241053254097286, 0.9483901116723791, 0.9673923939254396, 0.981321667107315, 0.9907193139041656, 0.9963611488877329, 0.9991463807069597, 0.9999881822333644, 0.9997188206453483, 0.9990185220112429, 0.9983729419685705, 0.9980600700512247, 0.9981640397055123, 0.9986107727259698, 0.9992186217054334, 0.9997561459281887, 0.9999988950179604, 0.9997776631790846, 0.9990121606716248, 0.9977263362426273, 0.996044394561305, 0.994169451125968, 0.9923492689214445, 0.9908352230991384, 0.9898413194074754, 0.9895097359569219, 0.9898881215903718, 0.9909220128323899, 0.9924634885012603, 0.9942948151387484, 0.9961635889731282, 0.9978240093255248, 0.9990777027135738, 0.9998072154256351, 0.999996065877445, 0.999731076015104, 0.9991853404317714, 0.9985832117010837, 0.9981515680212719, 0.998063926907052, 0.9983853799139397, 0.9990267424419871, 0.9997157712015536, 0.9999918732765694, 0.9992284572843783, 0.9966839768299667, 0.9915788813075065, 0.9831914306328497, 0.970961281093139, 0.9545868236789244, 0.9341014255705261, 0.9099156551739199, 0.8828172632981417, 0.8539273208649094, 0.8246179748223467, 0.7964030598112155, 0.7708159484850055, 0.7492890351158639, 0.7330466041196406, 0.7230187577022831, 0.7197800252572031, 0.723513408656955, 0.7339994098980647, 0.7506297441771224, 0.7724460797141601, 0.7982041815930419, 0.8264624360590674, 0.8556906809508245, 0.8843911380346914, 0.9112192603175697, 0.9350899458006395, 0.9552549917762911, 0.971341234327946, 0.9833448994630912, 0.99158483940094, 0.9966237917464162, 0.9991711276839106, 0.9999820219252289, 0.9997666918399699, 0.9991200577742574, 0.9984778741747555, 0.998100986602845, 0.9980855120805558, 0.9983937453620738, 0.9988985856925672, 0.9994333010413233, 0.999838526529405, 0.9999995226944058, 0.9998687859585844, 0.9994718356031398, 0.9988969686373934, 0.9982724670616919, 0.9977366956815031, 0.9974074342685731, 0.9973565522867398, 0.9975948701986531, 0.998070020204032, 0.9986776647432588, 0.9992839289589609, 0.9997547321914562, 0.9999862188708591, 0.9999299792086407, 0.9996073715195437, 0.9991089720958786, 0.9985777252983031, 0.9981773040304538, 0.9980499968990628, 0.9982706369019093, 0.9988043443201083, 0.9994760139753089, 0.9999585130111487, 0.9997845282641892, 0.9983840229058667, 0.995145531671889, 0.9894954208944129, 0.9809853863933713, 0.9693756898780418, 0.9547007930880594, 0.9373057013337394, 0.9178454490468096, 0.8972460039579773, 0.8766311163390438, 0.8572248159585859, 0.8402422304562356, 0.8267817639075565, 0.8177298448960955, 0.8136864058177029, 0.8149160817254115, 0.8213275841373967, 0.832482003769292, 0.8476295695848017, 0.8657730067731257, 0.885753570769741, 0.9063530137291385, 0.9264016766965254, 0.9448805608170361, 0.9610046609924694, 0.9742767121985635, 0.984504745653623, 0.9917826414433264, 0.9964388425237535, 0.9989631628999115, 0.9999241961122658, 0.999889898719258, 0.9993618303378606, 0.9987300362516495, 0.9982515278988074, 0.9980514866284537, 0.9981431882266908, 0.998460501699897, 0.9988957837163744, 0.9993360931617911, 0.9996918103224448, 0.9999137574751099, 0.9999974378395446, 0.9999755877515973, 0.99990240015874, 0.9998341373485915, 0.9998111887757486, 0.9998459357500541, 0.999919241158432, 0.9999863133242695, 0.9999905001123586, 0.9998816690739192, 0.9996345933763138, 0.9992624568587237, 0.9988213162302092, 0.9984030145764486, 0.9981163327126324, 0.9980586605797962, 0.9982826962427918, 0.9987642227397848, 0.999377591981763, 0.9998850316473642, 0.9999442953161253, 0.9991366287074579, 0.9970137850085965, 0.9931593065414697, 0.987256084672253, 0.9791500122184298, 0.9688990031430519, 0.9567981499635699, 0.9433752206272201, 0.9293554235787305, 0.9155993546353014, 0.9030221664438917, 0.8925044450342933, 0.8848057537152731, 0.8804905917330575, 0.8798742541851423, 0.8829934547591527, 0.8896040373782107, 0.899205770639665, 0.9110919533633846, 0.9244191963817444, 0.9382903251952056, 0.9518412293274997, 0.964321270328663, 0.9751571564584115, 0.9839923045810983, 0.990697436503427, 0.9953527801388083, 0.9982067431476781, 0.9996193065863532, 0.9999999975939916, 0.9997499906984584, 0.9992159687621853, 0.9986604491148371, 0.9982500269024412, 0.9980599965877196, 0.9980915064621304, 0.9982960254300091, 0.9986015449588417, 0.9989355515982369, 0.9992412052213443, 0.9994850454282344, 0.9996565430939521, 0.9997615218710394, 0.9998125695181433, 0.9998198534878695, 0.9997852365536394, 0.9997014135430942, 0.9995562461650394, 0.9993409141819103, 0.9990592809905383, 0.9987352736198772, 0.9984152571166979, 0.9981633323466662, 0.9980490265754137, 0.998128666257585, 0.9984234437915497, 0.9988984520125073, 0.9994474836201719, 0.9998880256135498, 0.9999696102182183, 0.999396643995329, 0.997864294740222, 0.9951033714901907, 0.9909278859953761, 0.9852776549518707, 0.9782483045970508, 0.9701025336687364, 0.9612592847154969, 0.9522610278987332, 0.94372292455172, 0.9362704697841882, 0.9304738056527669, 0.9267870949643785, 0.9255003312915423, 0.9267091097464091, 0.9303055913316595, 0.9359914520556791, 0.943311181259277, 0.9517017926168587, 0.9605529963552661, 0.969270419891836, 0.9773338937037607, 0.9843434206091258, 0.9900472891218615, 0.994349633912517, 0.9972980552416763, 0.9990549988316008, 0.999858834536014, 0.9999815575364127, 0.9996896954983713, 0.9992135663227946, 0.9987279156432111, 0.9983446538514664, 0.9981163417922221, 0.9980475587763583, 0.9981105016592461, 0.9982611440351399, 0.9984529362739691, 0.9986461497742258, 0.9988122907789989, 0.998934237845507, 0.9990036379036459, 0.9990174653823571, 0.9989754681146917, 0.998879582019196, 0.998735486533733, 0.998555543624927, 0.9983616633612932, 0.9981863601633442, 0.9980704969277542, 0.998056927520742, 0.9981802927508483, 0.9984543676643325, 0.9988593332755121, 0.999331912903331, 0.9997613049025126, 0.9999931978239429, 0.9998429280592537, 0.9991172084655203, 0.9976420952908364, 0.9952933123619807, 0.9920240661969902, 0.9878853415699531, 0.9830344861627388, 0.9777295840510393, 0.9723093761040787, 0.9671608614693514, 0.962678733850871, 0.959222097095127, 0.957074282048128, 0.9564110688307119, 0.9572813769080519, 0.9596027643967874, 0.9631721283633878, 0.9676900455626927, 0.9727954459946305, 0.9781059757848488, 0.9832586818561915, 0.9879456958582709, 0.9919404612410675, 0.9951116302223001, 0.9974237852891817, 0.998926225435571, 0.9997327944614572, 0.9999968003192135, 0.9998853305829871, 0.9995567491651453, 0.999144059793212, 0.9987454278868497, 0.9984217632706629, 0.9982001325263768, 0.9980810564637814, 0.9980475185938984, 0.9980737299316733, 0.9981322485414315, 0.9981987716378415, 0.9982546213275755, 0.9982874737534047, 0.9982911343083469, 0.9982651144886128, 0.9982144726836545, 0.998149956253503, 0.9980880695000273, 0.9980504275107436, 0.9980617326912017, 0.9981459561059798, 0.9983207727263439, 0.998590878331838, 0.9989413587428999, 0.9993326389814143, 0.9996985925414659, 0.9999490806853912, 0.9999775358989249, 0.9996732977509557, 0.998937414777687, 0.99769974376674, 0.9959346101219739, 0.9936721969849271, 0.9910032752382353, 0.9880758248325167, 0.985083369578449, 0.9822462120068514, 0.9797879491011067, 0.9779104514952305, 0.9767707647898234, 0.9764631165326308, 0.977008457992865, 0.9783528758293861, 0.9803749493808817, 0.9829008870088949, 0.9857252225533574, 0.9886341394572409, 0.9914282232468674, 0.9939416682845091, 0.9960556469039102, 0.997704567844386, 0.9988751180474862, 0.9995990799845084, 0.9999417491771709, 0.9999882128235303, 0.9998297517001079, 0.9995522471222891, 0.9992278354965071, 0.9989103113063271, 0.9986340877781861, 0.9984160014680045, 0.9982589590012648, 0.9981563805000616, 0.9980965536577066, 0.9980662987463123, 0.9980536675441514, 0.9980496752390964, 0.9980492366361522, 0.998051526066726, 0.9980599218473961, 0.9980815781499653, 0.9981265509853287, 0.9982063475523018, 0.9983318053631115, 0.9985103437242626, 0.9987428363063715, 0.9990205743034962, 0.9993229574231383, 0.9996166023106344, 0.9998564546942912, 0.9999892264250098, 0.9999590845631777, 0.9997150648842099, 0.9992192580008472, 0.9984545187542916, 0.9974303575284683, 0.9961858273605453, 0.9947886144039016, 0.9933301105298724, 0.9919168938545502, 0.9906596451045822, 0.9896609722205634, 0.9890038215328921, 0.9887420872518262, 0.9888947082320135, 0.9894440215798376, 0.9903385156596971, 0.9914994924171237, 0.9928306096032093, 0.9942289089696467, 0.9955957989366634, 0.9968465638675528, 0.9979172901276491, 0.9987685678423527, 0.9993858579829592, 0.9997769113414817, 0.9999670061029188, 0.9999929803015589, 0.9998970587334773, 0.9997213325987708, 0.9995034943579565, 0.9992741242794089, 0.9990555321256211, 0.9988619268417471, 0.9987005469991584, 0.9985733397655885, 0.9984788109270254, 0.9984137546288137, 0.9983746765807978, 0.9983588203401678, 0.9983647752178347, 0.9983926816138413, 0.9984440621887941, 0.9985213096056977, 0.998626869310187, 0.9987621799746639, 0.9989264770066087, 0.999115618038434, 0.9993211375938125, 0.9995297616556388, 0.999723594540825, 0.9998811212973389, 0.9999790517408641, 0.9999948832096086, 0.9999099048394522, 0.9997122388692428, 0.9993994448165853, 0.9989802219893382, 0.9984748415582027, 0.9979141105617441, 0.9973368896481017, 0.9967864157598112, 0.9963058786565233, 0.995933829758652, 0.9957000395722688, 0.9956223593321707, 0.9957049947382257, 0.9959383910660184, 0.996300695420734, 0.9967605422070878, 0.9972807367195096, 0.9978223140326485, 0.9983484373815354, 0.9988276683048426, 0.9992362727786763, 0.999559396504509, 0.9997911174224682, 0.9999335353478379, 0.999995165561385, 0.9999889540146922, 0.9999302267830049, 0.9998348355904076, 0.9997176811387898, 0.99959170527108, 0.9994673585754947, 0.9993524840873768, 0.9992525164090573, 0.9991708791121773, 0.9991094674976961, 0.9990691220269861, 0.9990500230368953, 0.999051964046986, 0.9990744855346567, 0.999116872108869, 0.9991780336423902, 0.9992563056929954, 0.9993492165805956, 0.9994532769412586, 0.9995638506601919, 0.9996751616300649, 0.9997804771562327, 0.999872485868064, 0.999943857626437, 0.9999879392652539, 0.9999995088433903, 0.9999754887424068, 0.9999155099079045, 0.9998222290712326, 0.99970132799333, 0.9995611652440365, 0.9994121003500738, 0.9992655590465586, 0.9991329483491689, 0.9990245541825584, 0.9989485581566242, 0.9989102932091373, 0.9989118233303699, 0.9989518865250199, 0.9990261903555107, 0.999128003886888, 0.9992489554349431, 0.9993799266802491, 0.9995119319795619, 0.999636885655523, 0.9997481858309566, 0.9998410756625106, 0.9999127759038331, 0.9999624115359905, 0.9999907761352553, 0.9999999889521111, 0.9999931014489265, 0.9999737038923652, 0.9999455710692713, 0.9999123721563842, 0.9998774558501154, 0.999843710050478, 0.9998134868809351, 0.9997885790002713, 0.9997702317568582, 0.999759177047171, 0.9997556778543344, 0.999759576417396, 0.9997703429771932, 0.9997871253914525, 0.999808802145085, 0.9998340421502022, 0.99986137419814, 0.9998892671677566, 0.9999162194797682, 0.9999408533403679, 0.9999620066494395, 0.9999788136593251, 0.9999907650372801, 0.9999977391607598, 0.9999999992164146, 0.9999981546280124, 0.9999930898928218, 0.9999858682976662, 0.9999776214495154, 0.9999694374756634, 0.9999622607838076, 0.9999568144220536, 0.9999535526786947, 0.9999526472098046, 0.9999540054157398, 0.9999573157345882, 0.9999621115683673, 0.9999678440554419, 0.9999739539214195, 0.999979933992569, 0.9999853762598895, 0.9999900001601674, 0.9999936615170154, 0.9999963439702587, 0.9999981364646708, 0.9999992013716485, 0.9999997381192927, 0.999999946942885, 0.9999999967192557, 0.9999999999919014, 0.99999999736624, 0.9999999525355343, 0.9999997583100512, 0.9999992531467713, 0.9999982467831489, 0.9999965526523982, 0.9999940238301828, 0.9999905884234341, 0.9999862797091513, 0.999981256148729, 0.9999758068230493, 0.99997033898244, 0.9999653463040508, 0.9999613589708372, 0.9999588795441368, 0.9999583113704977, 0.9999598884405846, 0.9999636167168061, 0.9999692366027515, 0.9999762142983329, 0.9999837664102746, 0.9999909178005737, 0.9999965879232147, 0.9999996966094434, 0.9999992771703861, 0.999994583361101, 0.999985177448774, 0.9999709892329366, 0.9999523399052732, 0.9999299293785564, 0.9999047903170547, 0.9998782157853512, 0.9998516696352759, 0.9998266892240805, 0.9998047888991016, 0.9997873702964273, 0.9997756424914672, 0.9997705520981596, 0.9997727211906992, 0.9997823899267403, 0.9997993612853981, 0.9998229474513305, 0.999851920865485, 0.9998844773371935, 0.9999182231159371, 0.9999502014634252, 0.9999769759049739, 0.9999947858353099, 0.9999997846087431, 0.9999883602876275, 0.9999575253053616, 0.9999053448908798, 0.9998313577112283, 0.9997369291305092, 0.9996254713534518, 0.9995024686508847, 0.9993752616744219, 0.9992525723314635, 0.9991437871119818, 0.999058057045235, 0.999003309787662, 0.9989852963059455, 0.9990068047494247, 0.9990671633589663, 0.9991621221811428, 0.9992841535986806, 0.99942315168194, 0.9995674501416731, 0.9997050289937737, 0.9998247502773786, 0.9999174591978461, 0.9999768099651193, 0.9999997212242314, 0.9999864257478239, 0.9999401417530104, 0.9998664473492042, 0.9997724759542054, 0.9996660638448184, 0.9995549710787806, 0.9994462681667236, 0.9993459406068766, 0.9992587204106548, 0.9991881158307143, 0.9991365829770665, 0.9991057680619079, 0.9990967460303486, 0.9991101879795419, 0.9991464034099844, 0.9992052222286426, 0.9992857051036792, 0.9993856996872782, 0.9995012944107982, 0.9996262590707575, 0.9997515971185458, 0.9998653598959527, 0.9999528773277124, 0.9999975326257086, 0.9999821443224919, 0.999890919010354, 0.9997118139998693, 0.999439022078835, 0.9990751889013689, 0.9986329266973288, 0.9981352191472928, 0.9976144310548815, 0.9971098339280736, 0.9966638063044284, 0.9963171216539387, 0.9961039458754718, 0.9960472835137792, 0.9961556043706393, 0.996421240703893, 0.996820886941763, 0.997318200896311, 0.9978681576643711, 0.9984225109620027, 0.9989355305861302, 0.9993691478038605, 0.9996967616341301, 0.9999052138965733, 0.9999947765296363, 0.9999773411175468, 0.9998732874615278, 0.9997076812256471, 0.999506484443507, 0.9992933641775434, 0.9990874898352684, 0.9989024720561789, 0.998746371860359, 0.9986225424852988, 0.99853098113876, 0.9984698622536814, 0.9984369757458315, 0.998430870607071, 0.9984515750410017, 0.9985008111248019, 0.998581645180688, 0.9986975325914079, 0.9988507547763188, 0.999040329955147, 0.9992596151070158, 0.9994939860269177, 0.9997191418493186, 0.999900669439886, 0.9999954614092262, 0.999955369766682, 0.9997330955618475, 0.999289813672962, 0.9986035096123915, 0.9976765908514331, 0.9965411577794049, 0.9952604711410141, 0.9939256561359193, 0.9926474741820188, 0.9915439274618073, 0.9907253449811206, 0.9902792344903838, 0.9902574188530452, 0.9906677351018058, 0.9914718830007517, 0.9925899835082563, 0.9939112337498881, 0.9953089498384378, 0.9966574942669889, 0.9978482627149488, 0.9988021350208949, 0.9994765355515824, 0.9998663384609259, 0.999999047763162, 0.9999257123409201, 0.9997096818403794, 0.9994154558335643, 0.9990995402999832, 0.9988045333453536, 0.9985568193035571, 0.998367473172115, 0.9982354380993155, 0.9981518256476755, 0.9981042910146134, 0.9980807562718449, 0.9980721471845856, 0.9980741264206353, 0.9980879493616537, 0.9981205198720774, 0.9981835488481416, 0.9982915449971035, 0.9984583349449015, 0.9986920163760886, 0.9989887048931358, 0.9993260489419544, 0.99965807407383, 0.9999132520171717, 0.9999975700998439, 0.9998036953075331, 0.9992261250733631, 0.9981807018334083, 0.9966253905996243, 0.9945781952664823, 0.9921278895434602, 0.989434060341578, 0.9867147422483584, 0.984222324004575, 0.9822108982485342, 0.9809002218146279, 0.9804424841802388, 0.9808979002226159, 0.9822237676104211, 0.9842793106108283, 0.9868457943328631, 0.9896585630695683, 0.9924453905987775, 0.9949643245725726, 0.9970343779613227, 0.998553990423649, 0.9995048236565319, 0.9999415419505305, 0.9999710061490129, 0.999726114470749, 0.9993399861795499, 0.9989253148703539, 0.9985618737213653, 0.9982929031233874, 0.9981290561686783, 0.9980572013034256, 0.9980509331411204, 0.998080095164513, 0.9981176969078354, 0.9981438830287658, 0.9981476331987567, 0.9981273139398537, 0.9980909679204503, 0.9980564729486326, 0.9980507962690802, 0.9981069532741487, 0.9982573190296655, 0.9985227848976527, 0.9988987587201356, 0.9993407598688092, 0.9997537780286843, 0.9999900639061013, 0.9998591951801747, 0.999152030840663, 0.9976768592271543, 0.9953023867217707, 0.991999202195196, 0.9878699972122457, 0.9831598099477411, 0.9782409533861526, 0.9735724084170276, 0.9696390600615102, 0.966880811611032, 0.9656242003701158, 0.96602912935663, 0.9680608234293853, 0.9714926778866888, 0.9759401010907078, 0.980919693428563, 0.9859231786271488, 0.9904924622893154, 0.9942818995197085, 0.9970966393627944, 0.9989012901744024, 0.999799804729309, 0.9999936473455415, 0.9997293534005932, 0.9992475983698542, 0.9987438840726923, 0.9983467842649021, 0.9981146974738646, 0.9980475979225075, 0.9981074116099891, 0.9982399050807508, 0.9983922837972949, 0.9985234257631279, 0.9986068283875953, 0.9986288592000184, 0.9985859750025686, 0.9984839316247918, 0.9983399810558017, 0.9981864796977467, 0.9980722728939508, 0.998057608261432, 0.9981996347044029, 0.9985285821572375, 0.9990186668673495, 0.9995613909033968, 0.9999509044851446, 0.9998904882809904, 0.9990256130596699, 0.9970028700406712, 0.9935466161538227, 0.9885384258222185, 0.9820806164036961, 0.97452595659947, 0.9664616254236048, 0.958645301555697, 0.9519022593485403, 0.9470014461904269, 0.944533409027495, 0.9448128144495896, 0.9478237624812402, 0.9532184536558826, 0.9603704111253549, 0.9684736249232164, 0.9766700511155997, 0.9841816486325443, 0.9904215110733136, 0.9950628886140584, 0.9980545716182481, 0.9995838110438882, 0.9999999965431735, 0.9997201895173562, 0.9991393472332862, 0.9985638281390429, 0.9981784408962038, 0.9980476242814609, 0.9981429649578312, 0.9983841135972882, 0.9986791767418557, 0.9989536579406866, 0.9991627673169123, 0.999288447254819, 0.99932763421834, 0.9992804568728699, 0.9991456619747517, 0.9989261940353157, 0.9986421812646905, 0.9983437355152248, 0.9981138955364985, 0.9980538127580152, 0.9982477046736238, 0.9987127075968234, 0.9993462216551625, 0.9998881787436606, 0.9999159401197362, 0.9988843370400327, 0.9962131301012556, 0.9914108198188784, 0.9842107036423774, 0.9746866268635203, 0.963315786380152, 0.95096565251657, 0.9387994477459537, 0.9281143954841214, 0.9201429242147331, 0.9158548352595413, 0.9157972485139739, 0.9200010334715566, 0.927970471451487, 0.9387592404969283, 0.9511214166031098, 0.9637120043541653, 0.9752999442983205, 0.9849514707227481, 0.9921466163582222, 0.9968070937210974, 0.9992358826767785, 0.9999905983993183, 0.9997270651841562, 0.999052555326965, 0.9984203017828113, 0.9980819658698821, 0.9980978252781356, 0.9983901103810545, 0.9988163095368401, 0.9992378700683185, 0.9995652709019811, 0.9997709076923236, 0.9998731965690797, 0.9999049091691069, 0.9998829628209204, 0.9997945204843127, 0.9996063205229, 0.9992934876026254, 0.9988744300645259, 0.9984333585544384, 0.9981136736591224, 0.9980739905611481, 0.9984115780059793, 0.9990715377375736, 0.9997698513827706, 0.9999610857399208, 0.9988752997985123, 0.9956336779126331, 0.9894310043759225, 0.9797502975483343, 0.9665580032629262, 0.9504245500343651, 0.9325287546913643, 0.9145328264212415, 0.8983481862359768, 0.8858391731595395, 0.8785237252648108, 0.8773264572170382, 0.8824256139845381, 0.8932179779021215, 0.9084085419293294, 0.9262140922029243, 0.9446502212950955, 0.9618514886481624, 0.9763611060629461, 0.9873280449381581, 0.9945699514554094, 0.9984952141821738, 0.9999149078518498, 0.9998016383539741, 0.999059412404313, 0.9983568205209923, 0.998051788142795, 0.9982087409092488, 0.9986856706807538, 0.9992540880561659, 0.9997115678329354, 0.9999545566481262, 0.999995684715152, 0.9999298269905691, 0.9998701811855351, 0.9998842508451048, 0.9999573620472962, 0.9999993490095844, 0.9998924656939028, 0.9995612319457362, 0.999033713942371, 0.9984629868032385, 0.998088374039368, 0.9981357018214579, 0.9986782737523312, 0.999498583259418, 0.9999992557667472, 0.9992071512143708, 0.9958962691675103, 0.9888250230818918, 0.9770472504594274, 0.9602243545295353, 0.9388514512329393, 0.9143234412856256, 0.8888071707351057, 0.8649395934175645, 0.8454179227568284, 0.832569312527182, 0.8279815265507489, 0.8322527920201488, 0.8448933158206253, 0.864391023913642, 0.8884375851437146, 0.9142897875214823, 0.9392123130179771, 0.9609185313500567, 0.9779124658406545, 0.9896516908954244, 0.9964978075055134, 0.9994817678911772, 0.9999612979030564, 0.9992687460539329, 0.9984364208299624, 0.9980525981849158, 0.9982596000764398, 0.9988679498587306, 0.9995350328598774, 0.9999467000943841, 0.9999470871412522, 0.999583594406331, 0.9990642196486728, 0.9986536565384628, 0.9985532752739332, 0.9988126738291222, 0.9993069713574887, 0.9997893079830458, 0.9999998770476464, 0.9997899489646063, 0.9992095289167313, 0.9985150096294526, 0.9980773197399262, 0.9982044826233261, 0.9989246108449304, 0.9997964296923372, 0.9998184981919408, 0.9974932077644311, 0.9910677435337542, 0.978925110303932, 0.9600442781509286, 0.934408100566668, 0.9032324023642032, 0.8689308344444786, 0.8348069398656803, 0.8045456646370308, 0.7816261200400109, 0.7687791670699544, 0.7675784891858943, 0.778210248335418, 0.7994374554766539, 0.8287639398543967, 0.8627934722506234, 0.8977533195256124, 0.9301049878191503, 0.9571179599389164, 0.9772668837942962, 0.9903499339331471, 0.9973077857607321, 0.9998139276403216, 0.9997681896515949, 0.9988355650889896, 0.9981374485924009, 0.9981444029380612, 0.9987607870223814, 0.999545684802102, 0.9999876535074211, 0.9997458753468796, 0.9987883880202717, 0.9973953299526492, 0.9960406996183598, 0.9952049353614583, 0.9951900241739217, 0.9960039572270506, 0.9973555955297907, 0.9987630334255682, 0.9997390544778643, 0.9999874887439766, 0.9995343884587221, 0.9987336126652503, 0.9981247848202152, 0.9981707102785519, 0.9989451690499653, 0.9998677070474756, 0.9995837425662797, 0.9960649119016753, 0.986956354040247, 0.9701278761440612, 0.944308471425113, 0.9096273038469713, 0.8678832944049619, 0.8224360625903991, 0.7777312929760558, 0.7385887120257861, 0.709436058875232, 0.6936500879461805, 0.6930951012709795, 0.7078823209914242, 0.7363452416101672, 0.775234299437487, 0.8201444401854721, 0.866164084845386, 0.9086645759752942, 0.9440676982143043, 0.9703910299885649, 0.9874159244812865, 0.99644105881104, 0.9997200910185677, 0.9997711970902484, 0.9987575859968931, 0.9980838738246285, 0.9982699125665238, 0.9990841469740069, 0.9998590798216015, 0.9998765897585156, 0.9987034479333228, 0.9963804450997752, 0.9934190819257951, 0.9906234951976153, 0.988810329163728, 0.9885271171140644, 0.9898640599260747, 0.992420689177979, 0.9954394813274385, 0.9980654687451946, 0.9996469630071034, 0.9999712685612219, 0.9993415587128749, 0.9984474341984788, 0.9980481450610625, 0.9985511182405652, 0.9996095411189545, 0.9998728755177155, 0.9970034847815885, 0.9880213147872413, 0.9699555529472014, 0.9406761513913438, 0.8996819032823462, 0.848588234379241, 0.7911275748193752, 0.7326359984726198, 0.6791781144586464, 0.6365653034878315, 0.6095036590408397, 0.6009989847969845, 0.6120290078120411, 0.6414387804238425, 0.6860382950096691, 0.740936214039847, 0.8001606272749263, 0.857552906455859, 0.907792223434736, 0.9472934896593913, 0.9747073473558493, 0.9908680433708389, 0.9982294419719877, 0.9999995031298231, 0.9992519245107228, 0.9982516646723653, 0.9981256024319028, 0.9988968543709199, 0.9998128778986017, 0.9998396248245651, 0.998166084555228, 0.9945705982494119, 0.9895474943476356, 0.9841714320020586, 0.9797612645522283, 0.9774641327960704, 0.9778958035066355, 0.9809472301490066, 0.9858146863927424, 0.9912467652540075, 0.9959376290277555, 0.9989450356110304, 0.9999913546438715, 0.9995309223984279, 0.9985364201377029, 0.9980475973616613, 0.9986044633995448, 0.9997304532249455, 0.999634498779512, 0.995269657747094, 0.9828193716268308, 0.9585735957004895, 0.9200145400356792, 0.866803755929366, 0.8013281864287203, 0.7285793616871454, 0.6553789728789277, 0.5892064441062423, 0.5369947909572704, 0.5041887932509307, 0.494172828335532, 0.5080057598160709, 0.5443458320265848, 0.5995177662099693, 0.6677913574328835, 0.7419983594191332, 0.81454067070561, 0.8786549335315016, 0.9296065034650896, 0.9654264683172218, 0.9869418459544547, 0.9971149267680521, 0.9999544051649587, 0.9993677021863522, 0.9982724497291225, 0.9981438745465627, 0.9990271559548822, 0.9999333763144419, 0.9994666600979687, 0.9964907800782373, 0.9906416813762736, 0.9825411553177172, 0.9736630965530993, 0.9659159695886195, 0.9610889600526871, 0.9603363647069442, 0.9638473717647323, 0.9707887592315989, 0.9795368162634763, 0.9881412238117783, 0.9948945068600296, 0.9988334440804907, 0.9999995843628927, 0.9993493529977611, 0.9983139484154439, 0.9981226100230366, 0.999078419071227, 0.9999988357439212, 0.9980135766569125, 0.9888589980978035, 0.967710068888476, 0.9304338774758094, 0.8749575939674755, 0.8023087213471133, 0.7169204295151798, 0.6260409855518235, 0.5384418185853419, 0.4628831352484172, 0.40681486478350776, 0.37557459640762253, 0.3720532679158506, 0.3966177669465027, 0.4470950623724431, 0.5187920055999993, 0.6047172666641223, 0.6962417864625361, 0.784301862084173, 0.8609616968124286, 0.9208805107815848, 0.9621655240089311, 0.9863048155112751, 0.997244614103345, 0.9999876073304338, 0.9991917499599601, 0.9981468079630731, 0.9983104944143691, 0.9994078212968676, 0.9999781790516322, 0.9981814281108605, 0.9926303233581559, 0.9830128509996947, 0.9703282753340972, 0.9566835976185866, 0.9447393976658209]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--KeyboardInterrupt--\n", + "\n", + "KeyboardInterrupt: Interrupted by user\n" ] } ], diff --git a/tests/test_synthetic.py b/tests/test_synthetic.py index 03bee19..a6795fd 100644 --- a/tests/test_synthetic.py +++ b/tests/test_synthetic.py @@ -13,9 +13,33 @@ print("library path: {:}".format(lib_dir)) if str(lib_dir) not in sys.path: sys.path.insert(0, str(lib_dir)) +from datasets import QuadraticFunction from datasets import SynAdaptiveEnv +class TestQuadraticFunction(unittest.TestCase): + """Test the quadratic function.""" + + def test_simple(self): + function = QuadraticFunction([[0, 1], [0.5, 4], [1, 1]]) + print(function) + for x in (0, 0.5, 1): + print("f({:})={:}".format(x, function[x])) + thresh = 0.2 + self.assertTrue(abs(function[0] - 1) < thresh) + self.assertTrue(abs(function[0.5] - 4) < thresh) + self.assertTrue(abs(function[1] - 1) < thresh) + + def test_none(self): + function = QuadraticFunction() + function.fit([[0, 1], [0.5, 4], [1, 1]], max_iter=3000, verbose=True) + print(function) + thresh = 0.2 + self.assertTrue(abs(function[0] - 1) < thresh) + self.assertTrue(abs(function[0.5] - 4) < thresh) + self.assertTrue(abs(function[1] - 1) < thresh) + + class TestSynAdaptiveEnv(unittest.TestCase): """Test the synethtic adaptive environment."""