diff --git a/README.md b/README.md index 683ec2541404f087c27cbdc4f47fa5ce403cdb9d..03c132b0899a19359e36518e0ace8c1828088bf8 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,14 @@ --- -title: Et Viewer -emoji: ๐Ÿ  -colorFrom: gray -colorTo: pink +title: DIRECTOR Demo +emoji: ๐Ÿ‘€ +colorFrom: purple +colorTo: yellow sdk: gradio -sdk_version: 5.8.0 +sdk_version: 4.32.2 app_file: app.py pinned: false +license: mit --- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference +arxiv.org/abs/2407.01516 diff --git a/app.py b/app.py new file mode 100644 index 0000000000000000000000000000000000000000..351e5be29fb33a19d7271ad5884cee3246bd9fce --- /dev/null +++ b/app.py @@ -0,0 +1,220 @@ +import spaces + +from functools import partial +from typing import Any, Callable, Dict + +import clip +import gradio as gr +from gradio_rerun import Rerun +import numpy as np +import trimesh +import rerun as rr +import torch + +from utils.common_viz import init, get_batch +from utils.random_utils import set_random_seed +from utils.rerun import log_sample +from src.diffuser import Diffuser +from src.datasets.multimodal_dataset import MultimodalDataset + +# ------------------------------------------------------------------------------------- # + +batch_size, num_cams, num_verts = None, None, None + +SAMPLE_IDS = [ + "2011_KAeAqaA0Llg_00005_00001", + "2011_F_EuMeT2wBo_00014_00001", + "2011_MCkKihQrNA4_00014_00000", +] +LABEL_TO_IDS = { + "right": 0, + "static": 1, + "complex": 2, +} +EXAMPLES = [ + "While the character moves right, the camera trucks right.", + "While the character moves right, the camera performs a push in.", + "While the character moves right, the camera performs a pull out.", + "While the character stays static, the camera performs a boom bottom.", + "While the character stays static, the camera performs a boom top.", + "While the character moves to the right, the camera trucks right alongside them. Once the character comes to a stop, the camera remains static.", # noqa + "While the character moves to the right, the camera remains static. Once the character comes to a stop, the camera pushes in.", # noqa +] +DEFAULT_TEXT = [ + "While the character moves right, the camera [...].", + "While the character remains static, [...].", + "While the character moves to the right, the camera [...]. " + "Once the character comes to a stop, the camera [...].", +] + +HEADER = """ + +
+

E.T. the Exceptional Trajectories

+Robin Courant +ยท +Nicolas Dufour +ยท +Xi Wang +ยท +Marc Christie +ยท +Vicky Kalogeiton +
+ + +
+ [Webpage]      + [DIRECTOR]      + [CLaTr]      + [Data]      +
+ +
+""" + +# ------------------------------------------------------------------------------------- # + + +def get_normals(vertices: torch.Tensor, faces: torch.Tensor) -> torch.Tensor: + num_frames, num_faces = vertices.shape[0], faces.shape[-2] + faces = faces.expand(num_frames, num_faces, 3) + + normals = [ + trimesh.Trimesh(vertices=v, faces=f, process=False).vertex_normals + for v, f in zip(vertices, faces) + ] + normals = torch.from_numpy(np.stack(normals)) + + return normals + + +@spaces.GPU +def generate( + prompt: str, + seed: int, + guidance_weight: float, + sample_label: str, + # ----------------------- # + dataset: MultimodalDataset, + device: torch.device, + diffuser: Diffuser, + clip_model: clip.model.CLIP, +) -> Dict[str, Any]: + diffuser.to(device) + clip_model.to(device) + + # Set arguments + set_random_seed(seed) + diffuser.gen_seeds = np.array([seed]) + diffuser.guidance_weight = guidance_weight + + # Inference + sample_id = SAMPLE_IDS[LABEL_TO_IDS[sample_label]] + seq_feat = diffuser.net.model.clip_sequential + + batch = get_batch(prompt, sample_id, clip_model, dataset, seq_feat, device) + with torch.no_grad(): + out = diffuser.predict_step(batch, 0) + + # Run visualization + padding_mask = out["padding_mask"][0].to(bool).cpu() + padded_traj = out["gen_samples"][0].cpu() + traj = padded_traj[padding_mask] + padded_vertices = out["char_raw"]["char_vertices"][0] + vertices = padded_vertices[padding_mask] + faces = out["char_raw"]["char_faces"][0] + normals = get_normals(vertices, faces) + fx, fy, cx, cy = out["intrinsics"][0].cpu().numpy() + K = np.array([[fx, 0, cx], [0, fy, cy], [0, 0, 1]]) + caption = out["caption_raw"][0] + + rr.init(f"{sample_id}") + rr.save(".tmp_gr.rrd") + log_sample( + root_name="world", + traj=traj.numpy(), + K=K, + vertices=vertices.numpy(), + faces=faces.numpy(), + normals=normals.numpy(), + caption=caption, + mesh_masks=None, + ) + return "./.tmp_gr.rrd" + + +# ------------------------------------------------------------------------------------- # + + +def launch_app(gen_fn: Callable): + theme = gr.themes.Default(primary_hue="blue", secondary_hue="gray") + + with gr.Blocks(theme=theme) as demo: + gr.Markdown(HEADER) + + with gr.Row(): + with gr.Column(scale=3): + with gr.Column(scale=2): + sample_str = gr.Dropdown( + choices=["static", "right", "complex"], + label="Character trajectory", + value="right", + interactive=True, + ) + text = gr.Textbox( + placeholder="Type the camera motion you want to generate", + show_label=True, + label="Text prompt", + value=DEFAULT_TEXT[LABEL_TO_IDS[sample_str.value]], + ) + seed = gr.Number(value=33, label="Seed") + guidance = gr.Slider(0, 10, value=1.4, label="Guidance", step=0.1) + + with gr.Column(scale=1): + btn = gr.Button("Generate", variant="primary") + + with gr.Column(scale=2): + examples = gr.Examples( + examples=[[x, None, None] for x in EXAMPLES], + inputs=[text], + ) + + with gr.Row(): + output = Rerun() + + def load_example(example_id): + processed_example = examples.non_none_processed_examples[example_id] + return gr.utils.resolve_singleton(processed_example) + + def change_fn(change): + sample_index = LABEL_TO_IDS[change] + return gr.update(value=DEFAULT_TEXT[sample_index]) + + sample_str.change(fn=change_fn, inputs=[sample_str], outputs=[text]) + + inputs = [text, seed, guidance, sample_str] + examples.dataset.click( + load_example, + inputs=[examples.dataset], + outputs=examples.inputs_with_examples, + show_progress=False, + postprocess=False, + queue=False, + ).then(fn=gen_fn, inputs=inputs, outputs=[output]) + btn.click(fn=gen_fn, inputs=inputs, outputs=[output]) + text.submit(fn=gen_fn, inputs=inputs, outputs=[output]) + demo.queue().launch(share=False) + + +# ------------------------------------------------------------------------------------- # + +diffuser, clip_model, dataset, device = init("config") +generate_sample = partial( + generate, + dataset=dataset, + device=device, + diffuser=diffuser, + clip_model=clip_model, +) +launch_app(generate_sample) diff --git a/checkpoints/ca-mixed-e449.ckpt b/checkpoints/ca-mixed-e449.ckpt new file mode 100644 index 0000000000000000000000000000000000000000..9a2b40e870b53033bcb47488afd13beaa99ed533 --- /dev/null +++ b/checkpoints/ca-mixed-e449.ckpt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e71711804d4c7aeed8f2c74c06ea88633bcc15e7389c51ea6705f8b615506c33 +size 794933338 diff --git a/configs/compnode/cpu.yaml b/configs/compnode/cpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..56b4d7f319e85b54c2ad80c4e351a99613b359d7 --- /dev/null +++ b/configs/compnode/cpu.yaml @@ -0,0 +1,3 @@ +device: cpu +num_gpus: 1 +num_workers: 8 \ No newline at end of file diff --git a/configs/compnode/gpu.yaml b/configs/compnode/gpu.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3e55225d9d063e28fea1614fcb1ebf0160b00216 --- /dev/null +++ b/configs/compnode/gpu.yaml @@ -0,0 +1,3 @@ +device: cuda +num_gpus: 1 +num_workers: 8 \ No newline at end of file diff --git a/configs/config.yaml b/configs/config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ad2522b9a7a3da0fa57763e646ae33fedb974168 --- /dev/null +++ b/configs/config.yaml @@ -0,0 +1,17 @@ +defaults: + - dataset: traj+caption+char + - diffuser: rn_director_edm + - compnode: gpu + - _self_ + +dataset: + char: + load_vertices: true + +checkpoint_path: 'checkpoints/ca-mixed-e449.ckpt' +batch_size: 128 +data_dir: data + +hydra: + run: + dir: ./${results_dir}/${xp_name}/${timestamp} \ No newline at end of file diff --git a/configs/dataset/caption/caption.yaml b/configs/dataset/caption/caption.yaml new file mode 100644 index 0000000000000000000000000000000000000000..914662e6d607b171ae2b34a2dbb65f4921402160 --- /dev/null +++ b/configs/dataset/caption/caption.yaml @@ -0,0 +1,14 @@ +_target_: src.datasets.modalities.caption_dataset.CaptionDataset + +name: caption + +dataset_dir: ${dataset.dataset_dir} +segment_dir: ${dataset.dataset_dir}/cam_segments +raw_caption_dir: ${dataset.dataset_dir}/caption +feat_caption_dir: ${dataset.dataset_dir}/caption_clip + +num_segments: 27 +num_feats: 512 +num_cams: ${dataset.standardization.num_cams} +sequential: ${diffuser.network.module.clip_sequential} +max_feat_length: 77 \ No newline at end of file diff --git a/configs/dataset/char/char.yaml b/configs/dataset/char/char.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1875117e51c6757a330f127c7e9dc54808a7f634 --- /dev/null +++ b/configs/dataset/char/char.yaml @@ -0,0 +1,15 @@ +_target_: src.datasets.modalities.char_dataset.CharacterDataset + +name: char +dataset_dir: ${dataset.dataset_dir} +num_cams: ${dataset.num_cams} +num_raw_feats: 3 +num_frequencies: 10 +min_freq: 0 +max_freq: 4 +num_encoding: 3 # ${eval:'2 * ${dataset.char.num_frequencies} * ${dataset.char.num_raw_feats}'} +sequential: ${diffuser.network.module.cond_sequential} +num_feats: ${eval:'${dataset.char.num_encoding} if ${dataset.char.sequential} else ${dataset.num_cams} * ${dataset.char.num_encoding}'} +standardize: ${dataset.trajectory.standardize} +standardization: ${dataset.standardization} +load_vertices: ${diffuser.do_projection} \ No newline at end of file diff --git a/configs/dataset/standardization/0300.yaml b/configs/dataset/standardization/0300.yaml new file mode 100644 index 0000000000000000000000000000000000000000..12384e75acdd5dbc5c90735363b5c357ba0949c6 --- /dev/null +++ b/configs/dataset/standardization/0300.yaml @@ -0,0 +1,15 @@ +name: '0300' +num_interframes: 0 +num_cams: 300 +num_total_frames: ${eval:'${dataset.standardization.num_interframes} * (${dataset.standardization.num_cams} - 1) + ${dataset.standardization.num_cams} '} + +norm_mean: [7.93987673e-05, -9.98621393e-05, 4.12940653e-04] +norm_std: [0.027841, 0.01819818, 0.03138536] + +shift_mean: [0.00201079, -0.27488501, -1.23616805] +shift_std: [1.13433516, 1.19061042, 1.58744263] + +norm_mean_h: [6.676e-05, -5.084e-05, -7.782e-04] +norm_std_h: [0.0105, 0.006958, 0.01145] + +velocity: true \ No newline at end of file diff --git a/configs/dataset/traj+caption+char.yaml b/configs/dataset/traj+caption+char.yaml new file mode 100644 index 0000000000000000000000000000000000000000..45a971bc13e01af22db13b779b5fc8dd791716fe --- /dev/null +++ b/configs/dataset/traj+caption+char.yaml @@ -0,0 +1,18 @@ +_target_: src.datasets.multimodal_dataset.MultimodalDataset + +defaults: + - _self_ + - trajectory: rot6d_trajectory + - char: char + - caption: caption + - standardization: '0300' + +name: "${dataset.standardization.name}-t:${dataset.trajectory.name}|c:${dataset.caption.name}|h:${dataset.char.name}" +dataset_name: ${dataset.standardization.name} +dataset_dir: ${data_dir} + +num_rawfeats: 12 +num_cams: ${dataset.standardization.num_cams} +feature_type: ${dataset.trajectory.name} +num_feats: ${dataset.trajectory.num_feats} +num_cond_feats: ['${dataset.char.num_feats}','${dataset.caption.num_feats}'] \ No newline at end of file diff --git a/configs/dataset/trajectory/rot6d_trajectory.yaml b/configs/dataset/trajectory/rot6d_trajectory.yaml new file mode 100644 index 0000000000000000000000000000000000000000..38cadafb64cab0b078aaf31378fd31a4b2200b5b --- /dev/null +++ b/configs/dataset/trajectory/rot6d_trajectory.yaml @@ -0,0 +1,10 @@ +_target_: src.datasets.modalities.trajectory_dataset.TrajectoryDataset + +name: rot6d +set_name: null +dataset_dir: ${dataset.dataset_dir} +num_feats: 9 +num_rawfeats: ${dataset.num_rawfeats} +num_cams: ${dataset.num_cams} +standardize: true +standardization: ${dataset.standardization} diff --git a/configs/diffuser/network/module/ca_director.yaml b/configs/diffuser/network/module/ca_director.yaml new file mode 100644 index 0000000000000000000000000000000000000000..91f92b53a8388e1d4694b6fe8517a6ff4257dcd9 --- /dev/null +++ b/configs/diffuser/network/module/ca_director.yaml @@ -0,0 +1,17 @@ +_target_: src.models.modules.director.CrossAttentionDirector +name: ca_director +num_feats: ${dataset.num_feats} +num_rawfeats: ${dataset.num_rawfeats} +num_cams: ${dataset.num_cams} +num_cond_feats: ${dataset.num_cond_feats} +latent_dim: 512 +mlp_multiplier: 4 +num_layers: 8 +num_heads: 16 +dropout: 0.1 +stochastic_depth: 0.1 +label_dropout: 0.1 +num_text_registers: 16 +clip_sequential: True +cond_sequential: True +device: ${compnode.device} \ No newline at end of file diff --git a/configs/diffuser/network/rn_director.yaml b/configs/diffuser/network/rn_director.yaml new file mode 100644 index 0000000000000000000000000000000000000000..107ee0c7d874dfff4dbece350448ad7fd2018576 --- /dev/null +++ b/configs/diffuser/network/rn_director.yaml @@ -0,0 +1,7 @@ +_target_: src.models.networks.RnEDMPrecond + +defaults: + - module: ca_director + +name: rn_director +sigma_data: 0.5 \ No newline at end of file diff --git a/configs/diffuser/rn_director_edm.yaml b/configs/diffuser/rn_director_edm.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6ae73cd15f8e345ce447b52da5bef612a1afbbbb --- /dev/null +++ b/configs/diffuser/rn_director_edm.yaml @@ -0,0 +1,24 @@ +_target_: src.diffuser.Diffuser + +defaults: + - _self_ + - network: rn_director + +guidance_weight: 1.4 +edm2_normalization: true + +# EMA +ema_kwargs: + beta: 0.9999 + update_every: 1 + +# Sampling +sampling_kwargs: + num_steps: 10 + sigma_min: 0.002 + sigma_max: 80 + rho: 40 + S_churn: 0 + S_min: 0 + S_max: inf + S_noise: 1 \ No newline at end of file diff --git a/data/cam_segments/2011_F_EuMeT2wBo_00014_00001.npy b/data/cam_segments/2011_F_EuMeT2wBo_00014_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..3c794c67ff2af7690b04e78e810157d333e82955 --- /dev/null +++ b/data/cam_segments/2011_F_EuMeT2wBo_00014_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2ddd2730d1600267369237b3974e448dec0f67633da72f4ebf800b34a5f42b50 +size 312 diff --git a/data/cam_segments/2011_KAeAqaA0Llg_00005_00001.npy b/data/cam_segments/2011_KAeAqaA0Llg_00005_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..808f68768cabe1debf0174670f9b0d1cb7346756 --- /dev/null +++ b/data/cam_segments/2011_KAeAqaA0Llg_00005_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6c2454b01407582c738e4397d34b73849fe9487217a1e1ae6f1c2700c6d7ffa8 +size 584 diff --git a/data/cam_segments/2011_MCkKihQrNA4_00014_00000.npy b/data/cam_segments/2011_MCkKihQrNA4_00014_00000.npy new file mode 100644 index 0000000000000000000000000000000000000000..ff8afb69bb4a67bc1a68b4269bf8360dde1d8fbc --- /dev/null +++ b/data/cam_segments/2011_MCkKihQrNA4_00014_00000.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c9b22e7e0028de49a546d8753074b936e9e66d3388cf515ec060727aedab1a89 +size 1816 diff --git a/data/caption/2011_F_EuMeT2wBo_00014_00001.txt b/data/caption/2011_F_EuMeT2wBo_00014_00001.txt new file mode 100644 index 0000000000000000000000000000000000000000..1f9f40824130ba4323de74d0c179a64b00d4304e --- /dev/null +++ b/data/caption/2011_F_EuMeT2wBo_00014_00001.txt @@ -0,0 +1 @@ +The camera trucks right and pushes in while the main character remains stationary. \ No newline at end of file diff --git a/data/caption/2011_KAeAqaA0Llg_00005_00001.txt b/data/caption/2011_KAeAqaA0Llg_00005_00001.txt new file mode 100644 index 0000000000000000000000000000000000000000..30c0833b0a140bb8e7799390fed6c966b898897b --- /dev/null +++ b/data/caption/2011_KAeAqaA0Llg_00005_00001.txt @@ -0,0 +1 @@ +As the character moves right, the camera trucks left to maintain a consistent framing. \ No newline at end of file diff --git a/data/caption/2011_MCkKihQrNA4_00014_00000.txt b/data/caption/2011_MCkKihQrNA4_00014_00000.txt new file mode 100644 index 0000000000000000000000000000000000000000..90e8fa4a254bfc9408db4ae226061fe285094c42 --- /dev/null +++ b/data/caption/2011_MCkKihQrNA4_00014_00000.txt @@ -0,0 +1 @@ +While the character moves to the right, the camera trucks right alongside them. Once the character comes to a stop, the camera remains static. \ No newline at end of file diff --git a/data/caption_clip/seq/2011_F_EuMeT2wBo_00014_00001.npy b/data/caption_clip/seq/2011_F_EuMeT2wBo_00014_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..3852a76fcd353a0b125be7e913b090be07be46c0 --- /dev/null +++ b/data/caption_clip/seq/2011_F_EuMeT2wBo_00014_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f24a29b321cea7039bb84590ef1c91e8c8a7f045f237f1d2c6d91b4078668e8c +size 32896 diff --git a/data/caption_clip/seq/2011_KAeAqaA0Llg_00005_00001.npy b/data/caption_clip/seq/2011_KAeAqaA0Llg_00005_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..41c692a52a22109d0b4aa4487e3f08e7a8277d62 --- /dev/null +++ b/data/caption_clip/seq/2011_KAeAqaA0Llg_00005_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e5a3985783a63231b7d138d022f923ca79b6d2e5367dba50830c4b8132dddaab +size 36992 diff --git a/data/caption_clip/seq/2011_MCkKihQrNA4_00014_00000.npy b/data/caption_clip/seq/2011_MCkKihQrNA4_00014_00000.npy new file mode 100644 index 0000000000000000000000000000000000000000..bc41e842dceaa94b7dd31164eb3119cef7c4febb --- /dev/null +++ b/data/caption_clip/seq/2011_MCkKihQrNA4_00014_00000.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:14a7b37dd38d4d73366ffff1599e929cbb9b6524cb1720dce5cf0d10036b404f +size 61568 diff --git a/data/caption_clip/token/2011_F_EuMeT2wBo_00014_00001.npy b/data/caption_clip/token/2011_F_EuMeT2wBo_00014_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..aa403117f6cd273b33f42aeeb0f812ae4be84268 --- /dev/null +++ b/data/caption_clip/token/2011_F_EuMeT2wBo_00014_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4d535a49f0dc2a507461c7777adebb6a18c076c8071e8becba1694f98bf2090e +size 2176 diff --git a/data/caption_clip/token/2011_KAeAqaA0Llg_00005_00001.npy b/data/caption_clip/token/2011_KAeAqaA0Llg_00005_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..cbccecd4ece55b5d5198f8c0eb313ba0d2c82357 --- /dev/null +++ b/data/caption_clip/token/2011_KAeAqaA0Llg_00005_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:22889b436b84e1cb69557e07d345fdd83562887d457f401b797408da91d2e059 +size 2176 diff --git a/data/caption_clip/token/2011_MCkKihQrNA4_00014_00000.npy b/data/caption_clip/token/2011_MCkKihQrNA4_00014_00000.npy new file mode 100644 index 0000000000000000000000000000000000000000..d1ac58da6ae5cd848d715b222825a746356b5535 --- /dev/null +++ b/data/caption_clip/token/2011_MCkKihQrNA4_00014_00000.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:928e26a4e3f436e6bd0005347762dda59d7dc6027c7e671e41ce3df7850bde8b +size 2176 diff --git a/data/char/2011_F_EuMeT2wBo_00014_00001.npy b/data/char/2011_F_EuMeT2wBo_00014_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..a1a489bcd007f09cc193e6a4899695cf8acb1961 --- /dev/null +++ b/data/char/2011_F_EuMeT2wBo_00014_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d0cf8af579334a058e40e58bec59f73c28065b87854221f2d85dd335e9c81102 +size 704 diff --git a/data/char/2011_KAeAqaA0Llg_00005_00001.npy b/data/char/2011_KAeAqaA0Llg_00005_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..b090ef14e5f2e86c4f357707aa58fb2c1b823301 --- /dev/null +++ b/data/char/2011_KAeAqaA0Llg_00005_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:24fb04444b38e8f35e859b353b984bd7bb0b97f8829fa21e8428c0e434e1c1b4 +size 1520 diff --git a/data/char/2011_MCkKihQrNA4_00014_00000.npy b/data/char/2011_MCkKihQrNA4_00014_00000.npy new file mode 100644 index 0000000000000000000000000000000000000000..fee43a3d1bbb52152187c92d7e83d9cb19b5f102 --- /dev/null +++ b/data/char/2011_MCkKihQrNA4_00014_00000.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:471bc1a581956713f6df919bf0f1af3734568adb24f4f4296244e91cae5da5e6 +size 5216 diff --git a/data/char_raw/2011_F_EuMeT2wBo_00014_00001.npy b/data/char_raw/2011_F_EuMeT2wBo_00014_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..942d158f8b867f98b6ee0dd58d98bfd37d4268ea --- /dev/null +++ b/data/char_raw/2011_F_EuMeT2wBo_00014_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5f0a15435b04e3e43306d74fedbbfd66344e6c41f9fcbccc052520bf13e41744 +size 704 diff --git a/data/char_raw/2011_KAeAqaA0Llg_00005_00001.npy b/data/char_raw/2011_KAeAqaA0Llg_00005_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..64de40928844c68835826ad3eb56ddf02ed46dae --- /dev/null +++ b/data/char_raw/2011_KAeAqaA0Llg_00005_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:809c835ae7bb4de4fe7beb30e4855e9fcb00946568b7477af66c5be551742b08 +size 1520 diff --git a/data/char_raw/2011_MCkKihQrNA4_00014_00000.npy b/data/char_raw/2011_MCkKihQrNA4_00014_00000.npy new file mode 100644 index 0000000000000000000000000000000000000000..f1ca75bf8ee0a473d802e7d2828de6cca3532ca9 --- /dev/null +++ b/data/char_raw/2011_MCkKihQrNA4_00014_00000.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:911bb9bca1fe30ce2aca6dbd194a3f7f86c70a9cbf067cec4fed7ef02ae25601 +size 5216 diff --git a/data/char_segments/2011_F_EuMeT2wBo_00014_00001.npy b/data/char_segments/2011_F_EuMeT2wBo_00014_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..7dac3ab860774aa3e541adf641c3243e47fb74d6 --- /dev/null +++ b/data/char_segments/2011_F_EuMeT2wBo_00014_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:127e16e3515c959ac5e1600a59b2858f11c8f345124a7616131093e5c79b6a35 +size 312 diff --git a/data/char_segments/2011_KAeAqaA0Llg_00005_00001.npy b/data/char_segments/2011_KAeAqaA0Llg_00005_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..c3d884380c068d8ff17e82a903767b8a9e3aa588 --- /dev/null +++ b/data/char_segments/2011_KAeAqaA0Llg_00005_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c0d0c6b7e2e9cd10a208a304678250ce1d97245cc3beaa35bf507526395742fe +size 584 diff --git a/data/char_segments/2011_MCkKihQrNA4_00014_00000.npy b/data/char_segments/2011_MCkKihQrNA4_00014_00000.npy new file mode 100644 index 0000000000000000000000000000000000000000..7060700435b7a99050afb8d15b1a445badb3f2f3 --- /dev/null +++ b/data/char_segments/2011_MCkKihQrNA4_00014_00000.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4cfbb0a8d8284ad1028f57fde802ee71ca41d7a7368a644b0dbffacc79fb3b5b +size 1816 diff --git a/data/demo_split.txt b/data/demo_split.txt new file mode 100644 index 0000000000000000000000000000000000000000..3cc0377800fb04064751c5978551ab59a411959f --- /dev/null +++ b/data/demo_split.txt @@ -0,0 +1,3 @@ +2011_KAeAqaA0Llg_00005_00001 +2011_F_EuMeT2wBo_00014_00001 +2011_MCkKihQrNA4_00014_00000 \ No newline at end of file diff --git a/data/intrinsics/2011_F_EuMeT2wBo_00014_00001.npy b/data/intrinsics/2011_F_EuMeT2wBo_00014_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..ee1de5f882fbf08dc2ef587f47f04c50c5374e0d --- /dev/null +++ b/data/intrinsics/2011_F_EuMeT2wBo_00014_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:28a4cb0776cada1d339c2cccc09b9d2e8edc329b6853cfaf24536f995d2d4e1c +size 136 diff --git a/data/intrinsics/2011_KAeAqaA0Llg_00005_00001.npy b/data/intrinsics/2011_KAeAqaA0Llg_00005_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..21fc32c3f730bcaa9fc9f9d2f6f7ef52302aa77f --- /dev/null +++ b/data/intrinsics/2011_KAeAqaA0Llg_00005_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:818b4d56b928e7e2e429ef77cd16fd0296db1385034a5344c9eb16062201efcd +size 136 diff --git a/data/intrinsics/2011_MCkKihQrNA4_00014_00000.npy b/data/intrinsics/2011_MCkKihQrNA4_00014_00000.npy new file mode 100644 index 0000000000000000000000000000000000000000..92855ed71fa008d3186752473b20c984f2af9ae7 --- /dev/null +++ b/data/intrinsics/2011_MCkKihQrNA4_00014_00000.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:68f72b77760f0800c001f9569657c32487cb59b2430b29f4e63868ec58235871 +size 136 diff --git a/data/traj/2011_F_EuMeT2wBo_00014_00001.txt b/data/traj/2011_F_EuMeT2wBo_00014_00001.txt new file mode 100644 index 0000000000000000000000000000000000000000..07728f7cdbd0aa56616df232fe45fcb5aa70b03d --- /dev/null +++ b/data/traj/2011_F_EuMeT2wBo_00014_00001.txt @@ -0,0 +1,24 @@ +9.985352e-01 2.099609e-02 -4.824829e-02 2.094011e-01 -1.538849e-02 9.931641e-01 1.138916e-01 -3.347295e-01 5.032349e-02 -1.130371e-01 9.921875e-01 -2.208318e+00 +9.985352e-01 2.120972e-02 -4.913330e-02 2.113752e-01 -1.540375e-02 9.931641e-01 1.155396e-01 -3.352814e-01 5.123901e-02 -1.146240e-01 9.921875e-01 -2.206313e+00 +9.985352e-01 2.154541e-02 -4.995728e-02 2.161967e-01 -1.554871e-02 9.931641e-01 1.172485e-01 -3.367126e-01 5.212402e-02 -1.163330e-01 9.916992e-01 -2.201502e+00 +9.985352e-01 2.186584e-02 -5.090332e-02 2.223684e-01 -1.567078e-02 9.926758e-01 1.192017e-01 -3.386434e-01 5.316162e-02 -1.182251e-01 9.916992e-01 -2.195479e+00 +9.985352e-01 2.264404e-02 -5.136108e-02 2.288649e-01 -1.626587e-02 9.926758e-01 1.212158e-01 -3.407262e-01 5.371094e-02 -1.202393e-01 9.912109e-01 -2.189288e+00 +9.985352e-01 2.291870e-02 -5.224609e-02 2.353682e-01 -1.634216e-02 9.921875e-01 1.230469e-01 -3.427306e-01 5.465698e-02 -1.220093e-01 9.912109e-01 -2.183115e+00 +9.985352e-01 2.351379e-02 -5.313110e-02 2.417701e-01 -1.670837e-02 9.921875e-01 1.251221e-01 -3.445977e-01 5.563354e-02 -1.239624e-01 9.907227e-01 -2.176930e+00 +9.980469e-01 2.445984e-02 -5.361938e-02 2.482157e-01 -1.745605e-02 9.916992e-01 1.273193e-01 -3.463514e-01 5.627441e-02 -1.260986e-01 9.902344e-01 -2.170475e+00 +9.980469e-01 2.424622e-02 -5.468750e-02 2.549237e-01 -1.701355e-02 9.916992e-01 1.291504e-01 -3.480787e-01 5.734253e-02 -1.280518e-01 9.902344e-01 -2.163509e+00 +9.980469e-01 2.445984e-02 -5.596924e-02 2.616896e-01 -1.692200e-02 9.912109e-01 1.312256e-01 -3.498808e-01 5.868530e-02 -1.301270e-01 9.897461e-01 -2.156410e+00 +9.980469e-01 2.520752e-02 -5.685425e-02 2.681601e-01 -1.741028e-02 9.907227e-01 1.335449e-01 -3.517837e-01 5.969238e-02 -1.323242e-01 9.892578e-01 -2.149725e+00 +9.980469e-01 2.571106e-02 -5.737305e-02 2.743219e-01 -1.770020e-02 9.907227e-01 1.359863e-01 -3.537634e-01 6.033325e-02 -1.347656e-01 9.892578e-01 -2.143470e+00 +9.980469e-01 2.583313e-02 -5.859375e-02 2.803579e-01 -1.750183e-02 9.902344e-01 1.385498e-01 -3.557284e-01 6.161499e-02 -1.372070e-01 9.887695e-01 -2.137333e+00 +9.980469e-01 2.630615e-02 -5.957031e-02 2.862232e-01 -1.768494e-02 9.897461e-01 1.409912e-01 -3.576265e-01 6.268311e-02 -1.395264e-01 9.882812e-01 -2.131371e+00 +9.980469e-01 2.691650e-02 -6.048584e-02 2.918755e-01 -1.802063e-02 9.897461e-01 1.430664e-01 -3.594637e-01 6.372070e-02 -1.417236e-01 9.877930e-01 -2.125673e+00 +9.975586e-01 2.725220e-02 -6.134033e-02 2.973304e-01 -1.808167e-02 9.892578e-01 1.453857e-01 -3.612198e-01 6.463623e-02 -1.439209e-01 9.873047e-01 -2.120247e+00 +9.975586e-01 2.764893e-02 -6.240845e-02 3.025778e-01 -1.817322e-02 9.887695e-01 1.478271e-01 -3.629181e-01 6.579590e-02 -1.462402e-01 9.868164e-01 -2.115124e+00 +9.975586e-01 2.812195e-02 -6.335449e-02 3.075362e-01 -1.834106e-02 9.882812e-01 1.500244e-01 -3.646296e-01 6.683350e-02 -1.485596e-01 9.868164e-01 -2.110404e+00 +9.975586e-01 2.885437e-02 -6.427002e-02 3.121473e-01 -1.875305e-02 9.882812e-01 1.525879e-01 -3.663803e-01 6.787109e-02 -1.510010e-01 9.863281e-01 -2.106179e+00 +9.975586e-01 2.932739e-02 -6.512451e-02 3.164429e-01 -1.892090e-02 9.877930e-01 1.551514e-01 -3.681046e-01 6.884766e-02 -1.535645e-01 9.858398e-01 -2.102398e+00 +9.975586e-01 3.102112e-02 -6.610107e-02 3.205023e-01 -2.024841e-02 9.873047e-01 1.578369e-01 -3.697458e-01 7.012939e-02 -1.560059e-01 9.853516e-01 -2.098871e+00 +9.970703e-01 3.140259e-02 -6.701660e-02 3.244532e-01 -2.030945e-02 9.868164e-01 1.601562e-01 -3.713188e-01 7.116699e-02 -1.583252e-01 9.848633e-01 -2.095402e+00 +9.970703e-01 3.204346e-02 -6.787109e-02 3.283897e-01 -2.061462e-02 9.863281e-01 1.625977e-01 -3.728545e-01 7.214355e-02 -1.607666e-01 9.843750e-01 -2.091886e+00 +9.970703e-01 3.262329e-02 -6.848145e-02 3.323311e-01 -2.091980e-02 9.858398e-01 1.650391e-01 -3.743794e-01 7.293701e-02 -1.630859e-01 9.838867e-01 -2.088349e+00 diff --git a/data/traj/2011_KAeAqaA0Llg_00005_00001.txt b/data/traj/2011_KAeAqaA0Llg_00005_00001.txt new file mode 100644 index 0000000000000000000000000000000000000000..c5530475567789fb3e6f42c0b6a24c41945e1fa9 --- /dev/null +++ b/data/traj/2011_KAeAqaA0Llg_00005_00001.txt @@ -0,0 +1,58 @@ +9.741211e-01 -1.998901e-02 2.250977e-01 -4.372981e-01 2.278137e-02 9.995117e-01 -9.826660e-03 -4.677700e-01 -2.247314e-01 1.470184e-02 9.741211e-01 -1.838427e+00 +9.711914e-01 -2.186584e-02 2.373047e-01 -4.391429e-01 2.473450e-02 9.995117e-01 -9.124756e-03 -4.675170e-01 -2.370605e-01 1.472473e-02 9.711914e-01 -1.837376e+00 +9.677734e-01 -2.359009e-02 2.509766e-01 -4.439013e-01 2.639771e-02 9.995117e-01 -7.827759e-03 -4.670338e-01 -2.507324e-01 1.420593e-02 9.677734e-01 -1.834699e+00 +9.638672e-01 -2.511597e-02 2.644043e-01 -4.503341e-01 2.783203e-02 9.995117e-01 -6.538391e-03 -4.665928e-01 -2.641602e-01 1.366425e-02 9.643555e-01 -1.830273e+00 +9.599609e-01 -2.644348e-02 2.792969e-01 -4.575170e-01 2.899170e-02 9.995117e-01 -4.985809e-03 -4.663441e-01 -2.790527e-01 1.288605e-02 9.599609e-01 -1.823789e+00 +9.555664e-01 -2.764893e-02 2.939453e-01 -4.648938e-01 3.009033e-02 9.995117e-01 -3.807068e-03 -4.663945e-01 -2.937012e-01 1.248169e-02 9.560547e-01 -1.815169e+00 +9.506836e-01 -2.983093e-02 3.085938e-01 -4.723135e-01 3.240967e-02 9.995117e-01 -3.246307e-03 -4.668498e-01 -3.083496e-01 1.308441e-02 9.511719e-01 -1.804609e+00 +9.453125e-01 -3.056335e-02 3.244629e-01 -4.800132e-01 3.314209e-02 9.995117e-01 -2.403259e-03 -4.677840e-01 -3.242188e-01 1.302338e-02 9.458008e-01 -1.792842e+00 +9.399414e-01 -3.454590e-02 3.400879e-01 -4.882299e-01 3.713989e-02 9.995117e-01 -1.118660e-03 -4.690314e-01 -3.398438e-01 1.367950e-02 9.404297e-01 -1.780957e+00 +9.335938e-01 -3.387451e-02 3.569336e-01 -4.972414e-01 3.616333e-02 9.995117e-01 2.758503e-04 -4.700927e-01 -3.566895e-01 1.264954e-02 9.340820e-01 -1.770506e+00 +9.272461e-01 -3.536987e-02 3.728027e-01 -5.067158e-01 3.771973e-02 9.995117e-01 1.014709e-03 -4.704562e-01 -3.725586e-01 1.311493e-02 9.277344e-01 -1.762218e+00 +9.208984e-01 -3.701782e-02 3.884277e-01 -5.163684e-01 3.955078e-02 9.990234e-01 1.469612e-03 -4.700372e-01 -3.881836e-01 1.401520e-02 9.213867e-01 -1.756018e+00 +9.140625e-01 -3.814697e-02 4.042969e-01 -5.259498e-01 4.089355e-02 9.990234e-01 1.882553e-03 -4.691658e-01 -4.040527e-01 1.480103e-02 9.145508e-01 -1.751260e+00 +9.067383e-01 -3.942871e-02 4.201660e-01 -5.355878e-01 4.254150e-02 9.990234e-01 1.962662e-03 -4.682593e-01 -4.199219e-01 1.609802e-02 9.072266e-01 -1.748106e+00 +8.989258e-01 -4.049683e-02 4.362793e-01 -5.456745e-01 4.421997e-02 9.990234e-01 1.652718e-03 -4.677213e-01 -4.357910e-01 1.780701e-02 8.999023e-01 -1.747388e+00 +8.906250e-01 -4.260254e-02 4.523926e-01 -5.566938e-01 4.678345e-02 9.990234e-01 1.966476e-03 -4.678367e-01 -4.519043e-01 1.940918e-02 8.920898e-01 -1.750303e+00 +8.818359e-01 -4.577637e-02 4.694824e-01 -5.687745e-01 5.035400e-02 9.985352e-01 2.817154e-03 -4.686526e-01 -4.689941e-01 2.114868e-02 8.828125e-01 -1.757593e+00 +8.725586e-01 -4.827881e-02 4.860840e-01 -5.814021e-01 5.270386e-02 9.985352e-01 4.547119e-03 -4.697918e-01 -4.855957e-01 2.165222e-02 8.740234e-01 -1.768167e+00 +8.627930e-01 -5.145264e-02 5.024414e-01 -5.936762e-01 5.603027e-02 9.985352e-01 6.000519e-03 -4.703106e-01 -5.019531e-01 2.297974e-02 8.642578e-01 -1.777817e+00 +8.530273e-01 -5.465698e-02 5.185547e-01 -6.044027e-01 5.947876e-02 9.980469e-01 7.328033e-03 -4.694327e-01 -5.180664e-01 2.459717e-02 8.549805e-01 -1.781961e+00 +8.432617e-01 -5.596924e-02 5.346680e-01 -6.128738e-01 6.106567e-02 9.980469e-01 8.186340e-03 -4.670735e-01 -5.341797e-01 2.574158e-02 8.452148e-01 -1.779072e+00 +8.334961e-01 -5.825806e-02 5.493164e-01 -6.194468e-01 6.396484e-02 9.980469e-01 8.804321e-03 -4.638741e-01 -5.488281e-01 2.780151e-02 8.354492e-01 -1.771993e+00 +8.242188e-01 -5.975342e-02 5.634766e-01 -6.249860e-01 6.573486e-02 9.975586e-01 9.689331e-03 -4.607763e-01 -5.625000e-01 2.905273e-02 8.261719e-01 -1.763899e+00 +8.144531e-01 -6.097412e-02 5.766602e-01 -6.302268e-01 6.713867e-02 9.975586e-01 1.065826e-02 -4.581519e-01 -5.761719e-01 3.005981e-02 8.168945e-01 -1.756076e+00 +8.051758e-01 -6.222534e-02 5.893555e-01 -6.355590e-01 6.854248e-02 9.975586e-01 1.162720e-02 -4.557545e-01 -5.888672e-01 3.106689e-02 8.076172e-01 -1.747956e+00 +7.963867e-01 -6.341553e-02 6.015625e-01 -6.411917e-01 6.994629e-02 9.975586e-01 1.262665e-02 -4.530967e-01 -6.010742e-01 3.201294e-02 7.988281e-01 -1.738591e+00 +7.875977e-01 -6.420898e-02 6.127930e-01 -6.473389e-01 7.080078e-02 9.975586e-01 1.349640e-02 -4.498012e-01 -6.118164e-01 3.274536e-02 7.900391e-01 -1.727775e+00 +7.788086e-01 -6.500244e-02 6.240234e-01 -6.542532e-01 7.189941e-02 9.970703e-01 1.423645e-02 -4.456802e-01 -6.235352e-01 3.378296e-02 7.812500e-01 -1.716242e+00 +7.695312e-01 -6.542969e-02 6.352539e-01 -6.617646e-01 7.263184e-02 9.970703e-01 1.475525e-02 -4.409550e-01 -6.342773e-01 3.479004e-02 7.724609e-01 -1.705874e+00 +7.617188e-01 -6.652832e-02 6.440430e-01 -6.698112e-01 7.391357e-02 9.970703e-01 1.554871e-02 -4.361504e-01 -6.435547e-01 3.576660e-02 7.646484e-01 -1.698497e+00 +7.543945e-01 -6.701660e-02 6.533203e-01 -6.784095e-01 7.409668e-02 9.970703e-01 1.675415e-02 -4.318303e-01 -6.523438e-01 3.576660e-02 7.568359e-01 -1.693742e+00 +7.465820e-01 -6.793213e-02 6.616211e-01 -6.874385e-01 7.470703e-02 9.970703e-01 1.808167e-02 -4.282111e-01 -6.606445e-01 3.591919e-02 7.495117e-01 -1.690611e+00 +7.392578e-01 -6.896973e-02 6.694336e-01 -6.966511e-01 7.525635e-02 9.970703e-01 1.953125e-02 -4.253444e-01 -6.689453e-01 3.594971e-02 7.421875e-01 -1.688063e+00 +7.314453e-01 -7.025146e-02 6.782227e-01 -7.057805e-01 7.623291e-02 9.970703e-01 2.104187e-02 -4.231055e-01 -6.772461e-01 3.628540e-02 7.348633e-01 -1.685560e+00 +7.246094e-01 -7.122803e-02 6.855469e-01 -7.147168e-01 7.702637e-02 9.965820e-01 2.218628e-02 -4.215238e-01 -6.850586e-01 3.674316e-02 7.275391e-01 -1.683242e+00 +7.167969e-01 -7.281494e-02 6.933594e-01 -7.235158e-01 7.836914e-02 9.965820e-01 2.359009e-02 -4.206826e-01 -6.928711e-01 3.744507e-02 7.202148e-01 -1.680854e+00 +7.094727e-01 -7.397461e-02 7.011719e-01 -7.320131e-01 7.922363e-02 9.965820e-01 2.503967e-02 -4.205368e-01 -7.006836e-01 3.778076e-02 7.128906e-01 -1.677558e+00 +7.021484e-01 -7.525635e-02 7.080078e-01 -7.400187e-01 7.983398e-02 9.965820e-01 2.671814e-02 -4.210481e-01 -7.075195e-01 3.778076e-02 7.055664e-01 -1.672601e+00 +6.948242e-01 -7.641602e-02 7.148438e-01 -7.476481e-01 8.032227e-02 9.965820e-01 2.845764e-02 -4.223456e-01 -7.143555e-01 3.762817e-02 6.987305e-01 -1.665415e+00 +6.879883e-01 -7.788086e-02 7.216797e-01 -7.551360e-01 8.081055e-02 9.960938e-01 3.047180e-02 -4.245052e-01 -7.211914e-01 3.738403e-02 6.914062e-01 -1.655879e+00 +6.796875e-01 -7.916260e-02 7.294922e-01 -7.625563e-01 7.861328e-02 9.960938e-01 3.491211e-02 -4.269787e-01 -7.294922e-01 3.359985e-02 6.831055e-01 -1.644971e+00 +6.733398e-01 -8.044434e-02 7.348633e-01 -7.697109e-01 8.026123e-02 9.960938e-01 3.552246e-02 -4.284711e-01 -7.348633e-01 3.506470e-02 6.772461e-01 -1.634182e+00 +6.660156e-01 -8.117676e-02 7.416992e-01 -7.764445e-01 7.983398e-02 9.960938e-01 3.735352e-02 -4.288901e-01 -7.416992e-01 3.433228e-02 6.699219e-01 -1.622686e+00 +6.586914e-01 -8.227539e-02 7.480469e-01 -7.827757e-01 8.056641e-02 9.960938e-01 3.857422e-02 -4.284060e-01 -7.480469e-01 3.485107e-02 6.625977e-01 -1.609460e+00 +6.508789e-01 -8.245850e-02 7.548828e-01 -7.887357e-01 7.952881e-02 9.960938e-01 4.025269e-02 -4.274380e-01 -7.548828e-01 3.381348e-02 6.547852e-01 -1.594344e+00 +6.425781e-01 -8.325195e-02 7.617188e-01 -7.943563e-01 7.958984e-02 9.960938e-01 4.171753e-02 -4.263000e-01 -7.622070e-01 3.381348e-02 6.464844e-01 -1.578784e+00 +6.352539e-01 -8.331299e-02 7.675781e-01 -7.998423e-01 7.818604e-02 9.960938e-01 4.336548e-02 -4.253245e-01 -7.685547e-01 3.247070e-02 6.391602e-01 -1.565369e+00 +6.279297e-01 -8.465576e-02 7.739258e-01 -8.056821e-01 7.769775e-02 9.960938e-01 4.595947e-02 -4.247170e-01 -7.744141e-01 3.128052e-02 6.318359e-01 -1.555790e+00 +6.191406e-01 -8.538818e-02 7.807617e-01 -8.119400e-01 7.684326e-02 9.960938e-01 4.797363e-02 -4.242177e-01 -7.817383e-01 3.030396e-02 6.230469e-01 -1.550475e+00 +6.132812e-01 -8.648682e-02 7.851562e-01 -8.185066e-01 7.647705e-02 9.956055e-01 4.995728e-02 -4.234311e-01 -7.861328e-01 2.941895e-02 6.171875e-01 -1.549090e+00 +6.054688e-01 -8.758545e-02 7.910156e-01 -8.254965e-01 7.623291e-02 9.956055e-01 5.191040e-02 -4.222932e-01 -7.924805e-01 2.888489e-02 6.093750e-01 -1.549202e+00 +5.971680e-01 -8.892822e-02 7.973633e-01 -8.330282e-01 7.598877e-02 9.956055e-01 5.416870e-02 -4.210888e-01 -7.988281e-01 2.824402e-02 6.010742e-01 -1.549421e+00 +5.893555e-01 -8.966064e-02 8.032227e-01 -8.404527e-01 7.476807e-02 9.956055e-01 5.633545e-02 -4.199587e-01 -8.046875e-01 2.687073e-02 5.932617e-01 -1.548479e+00 +5.805664e-01 -9.057617e-02 8.090820e-01 -8.471390e-01 7.464600e-02 9.956055e-01 5.789185e-02 -4.188834e-01 -8.110352e-01 2.677917e-02 5.844727e-01 -1.545612e+00 +5.727539e-01 -9.100342e-02 8.149414e-01 -8.528808e-01 7.305908e-02 9.956055e-01 5.984497e-02 -4.179030e-01 -8.164062e-01 2.523804e-02 5.766602e-01 -1.541203e+00 +5.639648e-01 -9.204102e-02 8.208008e-01 -8.576700e-01 7.250977e-02 9.956055e-01 6.176758e-02 -4.169065e-01 -8.227539e-01 2.468872e-02 5.683594e-01 -1.535808e+00 +5.556641e-01 -9.252930e-02 8.261719e-01 -8.618127e-01 7.159424e-02 9.956055e-01 6.335449e-02 -4.158991e-01 -8.286133e-01 2.397156e-02 5.595703e-01 -1.530118e+00 +5.468750e-01 -9.362793e-02 8.320312e-01 -8.656946e-01 7.128906e-02 9.951172e-01 6.512451e-02 -4.148987e-01 -8.339844e-01 2.369690e-02 5.512695e-01 -1.524463e+00 diff --git a/data/traj/2011_MCkKihQrNA4_00014_00000.txt b/data/traj/2011_MCkKihQrNA4_00014_00000.txt new file mode 100644 index 0000000000000000000000000000000000000000..e07805c8fcb2e2af426e0fca8109c26f93e06225 --- /dev/null +++ b/data/traj/2011_MCkKihQrNA4_00014_00000.txt @@ -0,0 +1,212 @@ +1.000000e+00 2.065897e-04 -4.680157e-04 3.201442e-01 -2.065897e-04 1.000000e+00 4.732609e-05 -3.790914e-01 4.680157e-04 -4.720688e-05 1.000000e+00 -1.379326e+00 +1.000000e+00 -2.864838e-03 1.213837e-02 3.222806e-01 2.878189e-03 1.000000e+00 -1.036644e-03 -3.789712e-01 -1.213837e-02 1.070976e-03 1.000000e+00 -1.379700e+00 +9.995117e-01 -6.122589e-03 2.548218e-02 3.276742e-01 6.164551e-03 1.000000e+00 -1.481056e-03 -3.786976e-01 -2.548218e-02 1.637459e-03 9.995117e-01 -1.380687e+00 +9.990234e-01 -9.407043e-03 3.909302e-02 3.348400e-01 9.513855e-03 1.000000e+00 -2.374649e-03 -3.783800e-01 -3.906250e-02 2.744675e-03 9.990234e-01 -1.382084e+00 +9.985352e-01 -1.340485e-02 5.212402e-02 3.426817e-01 1.345825e-02 1.000000e+00 -6.346703e-04 -3.780773e-01 -5.209351e-02 1.335144e-03 9.985352e-01 -1.383737e+00 +9.980469e-01 -1.692200e-02 6.262207e-02 3.505807e-01 1.698303e-02 1.000000e+00 -4.401207e-04 -3.777734e-01 -6.262207e-02 1.502991e-03 9.980469e-01 -1.385554e+00 +9.970703e-01 -1.953125e-02 7.421875e-02 3.582895e-01 1.956177e-02 1.000000e+00 2.375841e-04 -3.774463e-01 -7.421875e-02 1.214981e-03 9.970703e-01 -1.387506e+00 +9.960938e-01 -2.238464e-02 8.361816e-02 3.658186e-01 2.233887e-02 9.995117e-01 1.630783e-03 -3.770862e-01 -8.361816e-02 2.428293e-04 9.965820e-01 -1.389594e+00 +9.951172e-01 -2.458191e-02 9.631348e-02 3.732936e-01 2.474976e-02 9.995117e-01 -4.959106e-04 -3.766999e-01 -9.625244e-02 2.876282e-03 9.951172e-01 -1.391828e+00 +9.936523e-01 -2.745056e-02 1.079102e-01 3.808682e-01 2.735901e-02 9.995117e-01 2.296448e-03 -3.763120e-01 -1.079102e-01 6.690025e-04 9.941406e-01 -1.394214e+00 +9.921875e-01 -3.085327e-02 1.203003e-01 3.886193e-01 3.063965e-02 9.995117e-01 3.650665e-03 -3.758654e-01 -1.203613e-01 6.258488e-05 9.926758e-01 -1.396767e+00 +9.907227e-01 -3.430176e-02 1.331787e-01 3.965574e-01 3.421021e-02 9.995117e-01 3.053665e-03 -3.753057e-01 -1.331787e-01 1.531601e-03 9.912109e-01 -1.399494e+00 +9.887695e-01 -3.805542e-02 1.455078e-01 4.046652e-01 3.802490e-02 9.995117e-01 2.908707e-03 -3.746374e-01 -1.455078e-01 2.656937e-03 9.892578e-01 -1.402385e+00 +9.863281e-01 -4.156494e-02 1.583252e-01 4.129134e-01 4.153442e-02 9.990234e-01 3.517151e-03 -3.739027e-01 -1.584473e-01 3.105164e-03 9.873047e-01 -1.405416e+00 +9.843750e-01 -4.434204e-02 1.699219e-01 4.212611e-01 4.455566e-02 9.990234e-01 2.580643e-03 -3.731669e-01 -1.697998e-01 5.031586e-03 9.853516e-01 -1.408582e+00 +9.819336e-01 -4.779053e-02 1.821289e-01 4.296774e-01 4.751587e-02 9.990234e-01 5.813599e-03 -3.724761e-01 -1.821289e-01 2.943039e-03 9.833984e-01 -1.411883e+00 +9.794922e-01 -5.175781e-02 1.944580e-01 4.381925e-01 5.072021e-02 9.985352e-01 1.021576e-02 -3.717659e-01 -1.947021e-01 -1.381636e-04 9.809570e-01 -1.415340e+00 +9.770508e-01 -5.453491e-02 2.067871e-01 4.468320e-01 5.368042e-02 9.985352e-01 9.658813e-03 -3.709583e-01 -2.069092e-01 1.666069e-03 9.785156e-01 -1.418977e+00 +9.741211e-01 -5.822754e-02 2.194824e-01 4.555781e-01 5.755615e-02 9.985352e-01 9.552002e-03 -3.700933e-01 -2.196045e-01 3.330231e-03 9.755859e-01 -1.422819e+00 +9.707031e-01 -6.192017e-02 2.324219e-01 4.644125e-01 6.127930e-02 9.980469e-01 1.000214e-02 -3.692466e-01 -2.325439e-01 4.535675e-03 9.726562e-01 -1.426890e+00 +9.667969e-01 -6.536865e-02 2.479248e-01 4.733362e-01 6.494141e-02 9.980469e-01 9.773254e-03 -3.684760e-01 -2.480469e-01 6.656647e-03 9.687500e-01 -1.431191e+00 +9.638672e-01 -6.762695e-02 2.585449e-01 4.823274e-01 6.683350e-02 9.975586e-01 1.190186e-02 -3.677868e-01 -2.587891e-01 5.817413e-03 9.658203e-01 -1.435699e+00 +9.614258e-01 -6.866455e-02 2.661133e-01 4.912350e-01 6.829834e-02 9.975586e-01 1.059723e-02 -3.670994e-01 -2.663574e-01 7.987976e-03 9.638672e-01 -1.440331e+00 +9.565430e-01 -7.250977e-02 2.817383e-01 4.998991e-01 7.159424e-02 9.975586e-01 1.361847e-02 -3.663322e-01 -2.819824e-01 7.137299e-03 9.594727e-01 -1.444981e+00 +9.521484e-01 -7.623291e-02 2.956543e-01 5.083853e-01 7.580566e-02 9.970703e-01 1.282501e-02 -3.654099e-01 -2.956543e-01 1.020813e-02 9.550781e-01 -1.449645e+00 +9.487305e-01 -7.904053e-02 3.054199e-01 5.168542e-01 7.934570e-02 9.965820e-01 1.139069e-02 -3.643804e-01 -3.051758e-01 1.342773e-02 9.521484e-01 -1.454380e+00 +9.448242e-01 -8.325195e-02 3.173828e-01 5.254235e-01 8.276367e-02 9.965820e-01 1.506042e-02 -3.633362e-01 -3.173828e-01 1.203156e-02 9.482422e-01 -1.459235e+00 +9.399414e-01 -8.770752e-02 3.293457e-01 5.341560e-01 8.642578e-02 9.960938e-01 1.847839e-02 -3.622877e-01 -3.298340e-01 1.110077e-02 9.438477e-01 -1.464262e+00 +9.375000e-01 -8.941650e-02 3.356934e-01 5.429996e-01 8.874512e-02 9.960938e-01 1.739502e-02 -3.612690e-01 -3.359375e-01 1.348114e-02 9.418945e-01 -1.469457e+00 +9.340820e-01 -9.375000e-02 3.444824e-01 5.517401e-01 9.210205e-02 9.956055e-01 2.128601e-02 -3.604049e-01 -3.449707e-01 1.184845e-02 9.384766e-01 -1.474737e+00 +9.277344e-01 -9.625244e-02 3.608398e-01 5.602215e-01 9.619141e-02 9.951172e-01 1.832581e-02 -3.597833e-01 -3.608398e-01 1.768494e-02 9.326172e-01 -1.480072e+00 +9.238281e-01 -1.035156e-01 3.681641e-01 5.684847e-01 9.997559e-02 9.946289e-01 2.873230e-02 -3.594569e-01 -3.691406e-01 1.026154e-02 9.291992e-01 -1.485506e+00 +9.179688e-01 -1.033325e-01 3.833008e-01 5.765801e-01 1.007080e-01 9.946289e-01 2.694702e-02 -3.593383e-01 -3.840332e-01 1.385498e-02 9.233398e-01 -1.491069e+00 +9.130859e-01 -1.052246e-01 3.935547e-01 5.846207e-01 1.023560e-01 9.941406e-01 2.822876e-02 -3.595274e-01 -3.942871e-01 1.450348e-02 9.189453e-01 -1.496788e+00 +9.077148e-01 -1.082764e-01 4.057617e-01 5.925629e-01 1.040649e-01 9.941406e-01 3.244019e-02 -3.601327e-01 -4.069824e-01 1.278687e-02 9.135742e-01 -1.502622e+00 +8.999023e-01 -1.160278e-01 4.199219e-01 6.002438e-01 1.074829e-01 9.931641e-01 4.403687e-02 -3.610504e-01 -4.221191e-01 5.496979e-03 9.067383e-01 -1.508449e+00 +8.930664e-01 -1.218872e-01 4.328613e-01 6.075313e-01 1.104736e-01 9.926758e-01 5.151367e-02 -3.619239e-01 -4.360352e-01 1.817703e-03 8.999023e-01 -1.514149e+00 +8.842773e-01 -1.295166e-01 4.487305e-01 6.143624e-01 1.139526e-01 9.916992e-01 6.170654e-02 -3.624120e-01 -4.528809e-01 -3.414154e-03 8.916016e-01 -1.519632e+00 +8.750000e-01 -1.341553e-01 4.648438e-01 6.207978e-01 1.176758e-01 9.907227e-01 6.457520e-02 -3.623137e-01 -4.692383e-01 -1.815796e-03 8.833008e-01 -1.524892e+00 +8.666992e-01 -1.376953e-01 4.797363e-01 6.269678e-01 1.201172e-01 9.907227e-01 6.726074e-02 -3.616886e-01 -4.843750e-01 -6.842613e-04 8.750000e-01 -1.530004e+00 +8.598633e-01 -1.403809e-01 4.912109e-01 6.329594e-01 1.211548e-01 9.902344e-01 7.098389e-02 -3.607145e-01 -4.963379e-01 -1.530647e-03 8.681641e-01 -1.535085e+00 +8.540039e-01 -1.440430e-01 5.000000e-01 6.387826e-01 1.242676e-01 9.897461e-01 7.293701e-02 -3.595413e-01 -5.053711e-01 -1.158714e-04 8.627930e-01 -1.540226e+00 +8.471680e-01 -1.472168e-01 5.102539e-01 6.444105e-01 1.260986e-01 9.892578e-01 7.586670e-02 -3.582601e-01 -5.156250e-01 6.514788e-05 8.564453e-01 -1.545464e+00 +8.398438e-01 -1.502686e-01 5.214844e-01 6.498799e-01 1.293945e-01 9.887695e-01 7.653809e-02 -3.569227e-01 -5.268555e-01 3.175735e-03 8.496094e-01 -1.550805e+00 +8.334961e-01 -1.529541e-01 5.307617e-01 6.552650e-01 1.315918e-01 9.882812e-01 7.824707e-02 -3.555878e-01 -5.366211e-01 4.646301e-03 8.437500e-01 -1.556254e+00 +8.276367e-01 -1.562500e-01 5.390625e-01 6.605953e-01 1.335449e-01 9.877930e-01 8.123779e-02 -3.542733e-01 -5.454102e-01 4.795074e-03 8.383789e-01 -1.561781e+00 +8.212891e-01 -1.584473e-01 5.483398e-01 6.658787e-01 1.353760e-01 9.873047e-01 8.251953e-02 -3.529787e-01 -5.546875e-01 6.462097e-03 8.320312e-01 -1.567338e+00 +8.144531e-01 -1.617432e-01 5.571289e-01 6.711596e-01 1.374512e-01 9.868164e-01 8.544922e-02 -3.517327e-01 -5.634766e-01 6.958008e-03 8.261719e-01 -1.572906e+00 +8.085938e-01 -1.654053e-01 5.649414e-01 6.764931e-01 1.392822e-01 9.863281e-01 8.935547e-02 -3.505690e-01 -5.717773e-01 6.500244e-03 8.203125e-01 -1.578499e+00 +8.032227e-01 -1.707764e-01 5.703125e-01 6.818772e-01 1.428223e-01 9.853516e-01 9.387207e-02 -3.494990e-01 -5.781250e-01 6.000519e-03 8.159180e-01 -1.584106e+00 +7.978516e-01 -1.757812e-01 5.766602e-01 6.872497e-01 1.451416e-01 9.843750e-01 9.930420e-02 -3.485228e-01 -5.849609e-01 4.440308e-03 8.110352e-01 -1.589667e+00 +7.924805e-01 -1.796875e-01 5.830078e-01 6.925216e-01 1.467285e-01 9.838867e-01 1.037598e-01 -3.476026e-01 -5.922852e-01 3.345490e-03 8.056641e-01 -1.595131e+00 +7.871094e-01 -1.842041e-01 5.883789e-01 6.975990e-01 1.489258e-01 9.829102e-01 1.085205e-01 -3.466908e-01 -5.986328e-01 2.246857e-03 8.012695e-01 -1.600435e+00 +7.822266e-01 -1.881104e-01 5.942383e-01 7.024170e-01 1.496582e-01 9.819336e-01 1.140137e-01 -3.457284e-01 -6.049805e-01 -2.851486e-04 7.963867e-01 -1.605544e+00 +7.773438e-01 -1.911621e-01 5.996094e-01 7.069600e-01 1.506348e-01 9.814453e-01 1.177368e-01 -3.446680e-01 -6.108398e-01 -1.223564e-03 7.915039e-01 -1.610479e+00 +7.719727e-01 -1.932373e-01 6.054688e-01 7.112765e-01 1.517334e-01 9.809570e-01 1.196899e-01 -3.435266e-01 -6.171875e-01 -5.583763e-04 7.866211e-01 -1.615313e+00 +7.690430e-01 -1.961670e-01 6.083984e-01 7.154447e-01 1.529541e-01 9.804688e-01 1.229858e-01 -3.423614e-01 -6.210938e-01 -1.505852e-03 7.836914e-01 -1.620147e+00 +7.666016e-01 -1.990967e-01 6.108398e-01 7.194941e-01 1.541748e-01 9.799805e-01 1.260986e-01 -3.412003e-01 -6.235352e-01 -2.477646e-03 7.817383e-01 -1.625017e+00 +7.636719e-01 -2.008057e-01 6.137695e-01 7.234516e-01 1.546631e-01 9.794922e-01 1.280518e-01 -3.400360e-01 -6.269531e-01 -2.948761e-03 7.792969e-01 -1.629934e+00 +7.612305e-01 -2.012939e-01 6.166992e-01 7.273728e-01 1.544189e-01 9.794922e-01 1.291504e-01 -3.388563e-01 -6.298828e-01 -3.141403e-03 7.768555e-01 -1.634909e+00 +7.583008e-01 -2.031250e-01 6.196289e-01 7.313158e-01 1.558838e-01 9.790039e-01 1.301270e-01 -3.376582e-01 -6.333008e-01 -2.099991e-03 7.739258e-01 -1.639952e+00 +7.548828e-01 -2.060547e-01 6.225586e-01 7.353035e-01 1.578369e-01 9.785156e-01 1.325684e-01 -3.364420e-01 -6.367188e-01 -1.753807e-03 7.714844e-01 -1.645043e+00 +7.519531e-01 -2.082520e-01 6.254883e-01 7.392854e-01 1.600342e-01 9.780273e-01 1.334229e-01 -3.351918e-01 -6.391602e-01 -2.582073e-04 7.690430e-01 -1.650123e+00 +7.480469e-01 -2.097168e-01 6.298828e-01 7.431977e-01 1.612549e-01 9.775391e-01 1.340332e-01 -3.339198e-01 -6.440430e-01 1.359940e-03 7.651367e-01 -1.655137e+00 +7.441406e-01 -2.113037e-01 6.333008e-01 7.470453e-01 1.618652e-01 9.775391e-01 1.358643e-01 -3.326300e-01 -6.479492e-01 1.398087e-03 7.617188e-01 -1.660061e+00 +7.402344e-01 -2.136230e-01 6.372070e-01 7.508677e-01 1.621094e-01 9.770508e-01 1.391602e-01 -3.313049e-01 -6.523438e-01 2.944469e-04 7.578125e-01 -1.664911e+00 +7.373047e-01 -2.171631e-01 6.396484e-01 7.546704e-01 1.639404e-01 9.760742e-01 1.424561e-01 -3.299389e-01 -6.552734e-01 -1.850128e-04 7.553711e-01 -1.669737e+00 +7.329102e-01 -2.204590e-01 6.440430e-01 7.583559e-01 1.661377e-01 9.755859e-01 1.448975e-01 -3.285813e-01 -6.601562e-01 7.715225e-04 7.514648e-01 -1.674542e+00 +7.290039e-01 -2.247314e-01 6.469727e-01 7.618434e-01 1.679688e-01 9.746094e-01 1.492920e-01 -3.272973e-01 -6.635742e-01 -1.434088e-04 7.480469e-01 -1.679298e+00 +7.246094e-01 -2.258301e-01 6.513672e-01 7.651630e-01 1.690674e-01 9.741211e-01 1.497803e-01 -3.260942e-01 -6.684570e-01 1.615524e-03 7.436523e-01 -1.684001e+00 +7.207031e-01 -2.274170e-01 6.552734e-01 7.684235e-01 1.697998e-01 9.736328e-01 1.513672e-01 -3.249877e-01 -6.723633e-01 2.126694e-03 7.402344e-01 -1.688616e+00 +7.187500e-01 -2.293701e-01 6.562500e-01 7.717330e-01 1.718750e-01 9.731445e-01 1.518555e-01 -3.239618e-01 -6.738281e-01 3.681183e-03 7.387695e-01 -1.693088e+00 +7.158203e-01 -2.307129e-01 6.591797e-01 7.751407e-01 1.738281e-01 9.731445e-01 1.519775e-01 -3.230377e-01 -6.762695e-01 5.825043e-03 7.363281e-01 -1.697363e+00 +7.124023e-01 -2.333984e-01 6.616211e-01 7.786044e-01 1.759033e-01 9.721680e-01 1.536865e-01 -3.222967e-01 -6.791992e-01 6.900787e-03 7.338867e-01 -1.701380e+00 +7.280273e-01 -2.298584e-01 6.459961e-01 7.817589e-01 1.745605e-01 9.731445e-01 1.496582e-01 -3.218182e-01 -6.630859e-01 3.870010e-03 7.485352e-01 -1.704946e+00 +7.114258e-01 -2.365723e-01 6.621094e-01 7.840236e-01 1.723633e-01 9.716797e-01 1.621094e-01 -3.215613e-01 -6.816406e-01 -1.184464e-03 7.319336e-01 -1.707810e+00 +7.207031e-01 -2.338867e-01 6.523438e-01 7.856557e-01 1.730957e-01 9.721680e-01 1.574707e-01 -3.211871e-01 -6.713867e-01 -5.326271e-04 7.412109e-01 -1.710250e+00 +7.226562e-01 -2.318115e-01 6.513672e-01 7.867243e-01 1.729736e-01 9.726562e-01 1.542969e-01 -3.207552e-01 -6.689453e-01 1.133919e-03 7.431641e-01 -1.712394e+00 +7.255859e-01 -2.336426e-01 6.469727e-01 7.873752e-01 1.732178e-01 9.721680e-01 1.568604e-01 -3.204110e-01 -6.660156e-01 -1.768112e-03 7.460938e-01 -1.714350e+00 +7.260742e-01 -2.362061e-01 6.459961e-01 7.878724e-01 1.729736e-01 9.716797e-01 1.610107e-01 -3.200325e-01 -6.655273e-01 -5.180359e-03 7.465820e-01 -1.716228e+00 +7.304688e-01 -2.312012e-01 6.425781e-01 7.885405e-01 1.729736e-01 9.731445e-01 1.534424e-01 -3.194433e-01 -6.606445e-01 -9.088516e-04 7.504883e-01 -1.718194e+00 +7.333984e-01 -2.307129e-01 6.396484e-01 7.896112e-01 1.728516e-01 9.731445e-01 1.528320e-01 -3.187189e-01 -6.577148e-01 -1.466751e-03 7.534180e-01 -1.720372e+00 +7.358398e-01 -2.286377e-01 6.372070e-01 7.912471e-01 1.723633e-01 9.736328e-01 1.502686e-01 -3.178989e-01 -6.547852e-01 -7.486343e-04 7.558594e-01 -1.722843e+00 +7.387695e-01 -2.274170e-01 6.342773e-01 7.934423e-01 1.726074e-01 9.736328e-01 1.481934e-01 -3.170165e-01 -6.513672e-01 4.881620e-05 7.587891e-01 -1.725590e+00 +7.534180e-01 -2.220459e-01 6.186523e-01 7.957770e-01 1.715088e-01 9.750977e-01 1.409912e-01 -3.161899e-01 -6.347656e-01 -1.256466e-04 7.729492e-01 -1.728295e+00 +7.543945e-01 -2.221680e-01 6.176758e-01 7.974742e-01 1.722412e-01 9.750977e-01 1.403809e-01 -3.156310e-01 -6.333008e-01 5.607605e-04 7.739258e-01 -1.730322e+00 +7.553711e-01 -2.213135e-01 6.166992e-01 7.982354e-01 1.721191e-01 9.750977e-01 1.390381e-01 -3.154043e-01 -6.323242e-01 1.159668e-03 7.749023e-01 -1.731449e+00 +7.553711e-01 -2.214355e-01 6.166992e-01 7.981557e-01 1.721191e-01 9.750977e-01 1.394043e-01 -3.154725e-01 -6.323242e-01 7.581711e-04 7.749023e-01 -1.731816e+00 +7.534180e-01 -2.232666e-01 6.181641e-01 7.975280e-01 1.726074e-01 9.746094e-01 1.416016e-01 -3.157209e-01 -6.342773e-01 4.100800e-05 7.729492e-01 -1.731619e+00 +7.539062e-01 -2.227783e-01 6.181641e-01 7.966896e-01 1.716309e-01 9.750977e-01 1.420898e-01 -3.160023e-01 -6.342773e-01 -1.063347e-03 7.729492e-01 -1.731032e+00 +7.529297e-01 -2.225342e-01 6.196289e-01 7.958863e-01 1.711426e-01 9.750977e-01 1.423340e-01 -3.162095e-01 -6.357422e-01 -1.109123e-03 7.719727e-01 -1.730282e+00 +7.524414e-01 -2.222900e-01 6.196289e-01 7.952886e-01 1.708984e-01 9.750977e-01 1.422119e-01 -3.162981e-01 -6.357422e-01 -1.128197e-03 7.719727e-01 -1.729600e+00 +7.534180e-01 -2.220459e-01 6.186523e-01 7.949371e-01 1.706543e-01 9.750977e-01 1.419678e-01 -3.162796e-01 -6.347656e-01 -1.372337e-03 7.724609e-01 -1.729116e+00 +7.524414e-01 -2.214355e-01 6.206055e-01 7.947638e-01 1.705322e-01 9.750977e-01 1.412354e-01 -3.161990e-01 -6.362305e-01 -5.078316e-04 7.714844e-01 -1.728850e+00 +7.524414e-01 -2.204590e-01 6.206055e-01 7.946802e-01 1.700439e-01 9.755859e-01 1.402588e-01 -3.161284e-01 -6.362305e-01 7.432699e-05 7.714844e-01 -1.728760e+00 +7.529297e-01 -2.203369e-01 6.201172e-01 7.945591e-01 1.699219e-01 9.755859e-01 1.402588e-01 -3.161298e-01 -6.357422e-01 -2.399683e-04 7.714844e-01 -1.728784e+00 +7.529297e-01 -2.203369e-01 6.201172e-01 7.942251e-01 1.697998e-01 9.755859e-01 1.403809e-01 -3.162418e-01 -6.357422e-01 -3.745556e-04 7.719727e-01 -1.728854e+00 +7.534180e-01 -2.204590e-01 6.196289e-01 7.935090e-01 1.696777e-01 9.755859e-01 1.407471e-01 -3.164930e-01 -6.352539e-01 -9.050369e-04 7.724609e-01 -1.728901e+00 +7.548828e-01 -2.194824e-01 6.181641e-01 7.923944e-01 1.694336e-01 9.755859e-01 1.395264e-01 -3.168765e-01 -6.337891e-01 -5.130768e-04 7.734375e-01 -1.728888e+00 +7.387695e-01 -2.264404e-01 6.342773e-01 7.912226e-01 1.663818e-01 9.741211e-01 1.539307e-01 -3.172577e-01 -6.528320e-01 -8.186340e-03 7.573242e-01 -1.728816e+00 +7.509766e-01 -2.213135e-01 6.220703e-01 7.906259e-01 1.695557e-01 9.750977e-01 1.423340e-01 -3.173633e-01 -6.376953e-01 -1.484871e-03 7.700195e-01 -1.728741e+00 +7.431641e-01 -2.246094e-01 6.298828e-01 7.905312e-01 1.683350e-01 9.746094e-01 1.488037e-01 -3.172982e-01 -6.474609e-01 -4.554749e-03 7.622070e-01 -1.728743e+00 +7.446289e-01 -2.236328e-01 6.289062e-01 7.909347e-01 1.690674e-01 9.746094e-01 1.463623e-01 -3.171132e-01 -6.455078e-01 -2.653122e-03 7.636719e-01 -1.728828e+00 +7.529297e-01 -2.203369e-01 6.201172e-01 7.915440e-01 1.690674e-01 9.755859e-01 1.412354e-01 -3.169365e-01 -6.362305e-01 -1.447678e-03 7.714844e-01 -1.728969e+00 +7.519531e-01 -2.198486e-01 6.210938e-01 7.918491e-01 1.685791e-01 9.755859e-01 1.411133e-01 -3.169017e-01 -6.372070e-01 -1.368523e-03 7.709961e-01 -1.729156e+00 +7.441406e-01 -2.214355e-01 6.303711e-01 7.918609e-01 1.672363e-01 9.750977e-01 1.452637e-01 -3.169642e-01 -6.464844e-01 -2.738953e-03 7.626953e-01 -1.729348e+00 +7.412109e-01 -2.227783e-01 6.328125e-01 7.919849e-01 1.666260e-01 9.750977e-01 1.478271e-01 -3.169525e-01 -6.499023e-01 -4.131317e-03 7.597656e-01 -1.729498e+00 +7.529297e-01 -2.196045e-01 6.206055e-01 7.921751e-01 1.688232e-01 9.755859e-01 1.403809e-01 -3.168614e-01 -6.362305e-01 -8.525848e-04 7.714844e-01 -1.729612e+00 +7.495117e-01 -2.197266e-01 6.240234e-01 7.917793e-01 1.688232e-01 9.755859e-01 1.407471e-01 -3.169756e-01 -6.401367e-01 -1.939535e-04 7.685547e-01 -1.729755e+00 +7.490234e-01 -2.202148e-01 6.250000e-01 7.904776e-01 1.687012e-01 9.755859e-01 1.414795e-01 -3.175207e-01 -6.406250e-01 -5.693436e-04 7.675781e-01 -1.729921e+00 +7.416992e-01 -2.227783e-01 6.323242e-01 7.883248e-01 1.672363e-01 9.750977e-01 1.472168e-01 -3.185338e-01 -6.494141e-01 -3.438950e-03 7.602539e-01 -1.730034e+00 +7.348633e-01 -2.259521e-01 6.391602e-01 7.858085e-01 1.660156e-01 9.741211e-01 1.534424e-01 -3.198221e-01 -6.572266e-01 -6.626129e-03 7.534180e-01 -1.729976e+00 +7.304688e-01 -2.286377e-01 6.435547e-01 7.835127e-01 1.666260e-01 9.736328e-01 1.567383e-01 -3.210999e-01 -6.625977e-01 -7.202148e-03 7.490234e-01 -1.729694e+00 +7.260742e-01 -2.336426e-01 6.469727e-01 7.818568e-01 1.658936e-01 9.721680e-01 1.649170e-01 -3.220970e-01 -6.674805e-01 -1.245117e-02 7.446289e-01 -1.729242e+00 +7.255859e-01 -2.309570e-01 6.479492e-01 7.810658e-01 1.658936e-01 9.731445e-01 1.610107e-01 -3.225746e-01 -6.679688e-01 -9.307861e-03 7.441406e-01 -1.728794e+00 +7.231445e-01 -2.344971e-01 6.494141e-01 7.812048e-01 1.691895e-01 9.721680e-01 1.625977e-01 -3.225199e-01 -6.694336e-01 -7.778168e-03 7.426758e-01 -1.728477e+00 +7.304688e-01 -2.314453e-01 6.425781e-01 7.821190e-01 1.682129e-01 9.726562e-01 1.590576e-01 -3.220138e-01 -6.621094e-01 -8.079529e-03 7.495117e-01 -1.728368e+00 +7.363281e-01 -2.220459e-01 6.391602e-01 7.833011e-01 1.658936e-01 9.750977e-01 1.477051e-01 -3.212577e-01 -6.562500e-01 -2.683640e-03 7.548828e-01 -1.728418e+00 +7.377930e-01 -2.205811e-01 6.381836e-01 7.843334e-01 1.661377e-01 9.755859e-01 1.450195e-01 -3.205658e-01 -6.542969e-01 -9.298325e-04 7.563477e-01 -1.728499e+00 +7.397461e-01 -2.203369e-01 6.357422e-01 7.850994e-01 1.658936e-01 9.755859e-01 1.450195e-01 -3.200827e-01 -6.518555e-01 -1.802444e-03 7.583008e-01 -1.728490e+00 +7.343750e-01 -2.207031e-01 6.416016e-01 7.856901e-01 1.656494e-01 9.755859e-01 1.457520e-01 -3.198026e-01 -6.582031e-01 -7.762909e-04 7.529297e-01 -1.728312e+00 +7.475586e-01 -2.148438e-01 6.284180e-01 7.861883e-01 1.673584e-01 9.765625e-01 1.347656e-01 -3.197635e-01 -6.425781e-01 4.402161e-03 7.661133e-01 -1.727976e+00 +7.465820e-01 -2.150879e-01 6.298828e-01 7.863009e-01 1.658936e-01 9.765625e-01 1.368408e-01 -3.201078e-01 -6.445312e-01 2.368927e-03 7.646484e-01 -1.727485e+00 +7.431641e-01 -2.163086e-01 6.328125e-01 7.860423e-01 1.657715e-01 9.765625e-01 1.389160e-01 -3.207236e-01 -6.479492e-01 1.678467e-03 7.617188e-01 -1.726874e+00 +7.441406e-01 -2.135010e-01 6.328125e-01 7.856698e-01 1.650391e-01 9.770508e-01 1.353760e-01 -3.214004e-01 -6.474609e-01 3.768921e-03 7.622070e-01 -1.726232e+00 +7.485352e-01 -2.106934e-01 6.289062e-01 7.852785e-01 1.651611e-01 9.775391e-01 1.308594e-01 -3.220334e-01 -6.425781e-01 5.920410e-03 7.666016e-01 -1.725603e+00 +7.480469e-01 -2.103271e-01 6.293945e-01 7.848600e-01 1.641846e-01 9.775391e-01 1.314697e-01 -3.225707e-01 -6.430664e-01 5.035400e-03 7.656250e-01 -1.724956e+00 +7.490234e-01 -2.059326e-01 6.298828e-01 7.845169e-01 1.635742e-01 9.785156e-01 1.253662e-01 -3.229301e-01 -6.420898e-01 9.078979e-03 7.666016e-01 -1.724288e+00 +7.504883e-01 -2.047119e-01 6.279297e-01 7.843597e-01 1.646729e-01 9.785156e-01 1.221313e-01 -3.231337e-01 -6.396484e-01 1.177979e-02 7.685547e-01 -1.723644e+00 +7.534180e-01 -2.021484e-01 6.254883e-01 7.844458e-01 1.651611e-01 9.790039e-01 1.175537e-01 -3.232828e-01 -6.362305e-01 1.473999e-02 7.709961e-01 -1.723027e+00 +7.563477e-01 -1.986084e-01 6.230469e-01 7.847680e-01 1.630859e-01 9.799805e-01 1.142578e-01 -3.234717e-01 -6.333008e-01 1.523590e-02 7.739258e-01 -1.722378e+00 +7.602539e-01 -1.951904e-01 6.196289e-01 7.853347e-01 1.619873e-01 9.804688e-01 1.101074e-01 -3.237103e-01 -6.293945e-01 1.667786e-02 7.768555e-01 -1.721626e+00 +7.631836e-01 -1.910400e-01 6.171875e-01 7.861386e-01 1.590576e-01 9.814453e-01 1.071777e-01 -3.239537e-01 -6.264648e-01 1.638794e-02 7.792969e-01 -1.720737e+00 +7.675781e-01 -1.884766e-01 6.127930e-01 7.871651e-01 1.585693e-01 9.819336e-01 1.034546e-01 -3.241251e-01 -6.210938e-01 1.776123e-02 7.836914e-01 -1.719683e+00 +7.709961e-01 -1.862793e-01 6.088867e-01 7.883578e-01 1.572266e-01 9.824219e-01 1.015625e-01 -3.241925e-01 -6.171875e-01 1.744080e-02 7.866211e-01 -1.718508e+00 +7.763672e-01 -1.835938e-01 6.030273e-01 7.896336e-01 1.569824e-01 9.829102e-01 9.710693e-02 -3.241632e-01 -6.103516e-01 1.927185e-02 7.919922e-01 -1.717278e+00 +7.792969e-01 -1.839600e-01 5.986328e-01 7.908343e-01 1.584473e-01 9.829102e-01 9.576416e-02 -3.241166e-01 -6.059570e-01 2.018738e-02 7.954102e-01 -1.716084e+00 +7.827148e-01 -1.822510e-01 5.952148e-01 7.918941e-01 1.572266e-01 9.829102e-01 9.436035e-02 -3.241216e-01 -6.020508e-01 1.963806e-02 7.983398e-01 -1.714974e+00 +7.856445e-01 -1.817627e-01 5.913086e-01 7.928006e-01 1.577148e-01 9.829102e-01 9.265137e-02 -3.241866e-01 -5.981445e-01 2.041626e-02 8.012695e-01 -1.713954e+00 +7.885742e-01 -1.804199e-01 5.878906e-01 7.935689e-01 1.571045e-01 9.833984e-01 9.094238e-02 -3.243036e-01 -5.947266e-01 2.067566e-02 8.037109e-01 -1.713030e+00 +7.915039e-01 -1.791992e-01 5.839844e-01 7.942175e-01 1.566162e-01 9.833984e-01 8.947754e-02 -3.244482e-01 -5.908203e-01 2.064514e-02 8.066406e-01 -1.712193e+00 +7.939453e-01 -1.779785e-01 5.815430e-01 7.947373e-01 1.560059e-01 9.838867e-01 8.813477e-02 -3.245997e-01 -5.878906e-01 2.078247e-02 8.085938e-01 -1.711439e+00 +7.958984e-01 -1.765137e-01 5.791016e-01 7.951356e-01 1.551514e-01 9.838867e-01 8.673096e-02 -3.247413e-01 -5.849609e-01 2.085876e-02 8.105469e-01 -1.710778e+00 +7.978516e-01 -1.759033e-01 5.766602e-01 7.954573e-01 1.552734e-01 9.843750e-01 8.538818e-02 -3.248688e-01 -5.825195e-01 2.143860e-02 8.125000e-01 -1.710201e+00 +7.993164e-01 -1.751709e-01 5.747070e-01 7.957524e-01 1.545410e-01 9.843750e-01 8.502197e-02 -3.249899e-01 -5.805664e-01 2.082825e-02 8.139648e-01 -1.709671e+00 +8.012695e-01 -1.749268e-01 5.722656e-01 7.960588e-01 1.545410e-01 9.843750e-01 8.459473e-02 -3.251071e-01 -5.781250e-01 2.064514e-02 8.159180e-01 -1.709153e+00 +8.027344e-01 -1.743164e-01 5.703125e-01 7.963829e-01 1.542969e-01 9.843750e-01 8.374023e-02 -3.252319e-01 -5.756836e-01 2.072144e-02 8.173828e-01 -1.708633e+00 +8.046875e-01 -1.734619e-01 5.678711e-01 7.967118e-01 1.535645e-01 9.848633e-01 8.300781e-02 -3.253692e-01 -5.737305e-01 2.043152e-02 8.188477e-01 -1.708115e+00 +8.066406e-01 -1.724854e-01 5.654297e-01 7.970041e-01 1.530762e-01 9.848633e-01 8.203125e-02 -3.255146e-01 -5.712891e-01 2.037048e-02 8.208008e-01 -1.707594e+00 +8.085938e-01 -1.719971e-01 5.629883e-01 7.971988e-01 1.528320e-01 9.848633e-01 8.142090e-02 -3.256744e-01 -5.683594e-01 2.017212e-02 8.227539e-01 -1.707085e+00 +8.100586e-01 -1.715088e-01 5.610352e-01 7.972324e-01 1.524658e-01 9.848633e-01 8.093262e-02 -3.258579e-01 -5.664062e-01 2.000427e-02 8.237305e-01 -1.706616e+00 +8.105469e-01 -1.713867e-01 5.600586e-01 7.970775e-01 1.520996e-01 9.848633e-01 8.117676e-02 -3.260629e-01 -5.654297e-01 1.940918e-02 8.242188e-01 -1.706199e+00 +8.110352e-01 -1.705322e-01 5.595703e-01 7.967728e-01 1.517334e-01 9.853516e-01 8.050537e-02 -3.262819e-01 -5.649414e-01 1.960754e-02 8.247070e-01 -1.705827e+00 +8.110352e-01 -1.705322e-01 5.595703e-01 7.963840e-01 1.513672e-01 9.853516e-01 8.099365e-02 -3.265031e-01 -5.649414e-01 1.893616e-02 8.247070e-01 -1.705474e+00 +8.115234e-01 -1.697998e-01 5.585938e-01 7.959814e-01 1.507568e-01 9.853516e-01 8.044434e-02 -3.267078e-01 -5.644531e-01 1.895142e-02 8.251953e-01 -1.705099e+00 +8.125000e-01 -1.693115e-01 5.576172e-01 7.956077e-01 1.506348e-01 9.853516e-01 7.971191e-02 -3.268926e-01 -5.629883e-01 1.921082e-02 8.261719e-01 -1.704701e+00 +8.129883e-01 -1.689453e-01 5.566406e-01 7.952699e-01 1.502686e-01 9.853516e-01 7.965088e-02 -3.270646e-01 -5.620117e-01 1.889038e-02 8.266602e-01 -1.704345e+00 +8.134766e-01 -1.688232e-01 5.566406e-01 7.949673e-01 1.502686e-01 9.853516e-01 7.928467e-02 -3.272247e-01 -5.620117e-01 1.919556e-02 8.271484e-01 -1.704108e+00 +8.139648e-01 -1.688232e-01 5.556641e-01 7.947144e-01 1.502686e-01 9.853516e-01 7.916260e-02 -3.273727e-01 -5.610352e-01 1.907349e-02 8.276367e-01 -1.704018e+00 +8.139648e-01 -1.693115e-01 5.556641e-01 7.945458e-01 1.501465e-01 9.853516e-01 8.020020e-02 -3.275002e-01 -5.610352e-01 1.818848e-02 8.276367e-01 -1.704053e+00 +8.139648e-01 -1.693115e-01 5.551758e-01 7.944735e-01 1.502686e-01 9.853516e-01 8.013916e-02 -3.275993e-01 -5.610352e-01 1.818848e-02 8.276367e-01 -1.704132e+00 +8.154297e-01 -1.702881e-01 5.537109e-01 7.944651e-01 1.507568e-01 9.853516e-01 8.105469e-02 -3.276744e-01 -5.590820e-01 1.737976e-02 8.286133e-01 -1.704170e+00 +8.144531e-01 -1.691895e-01 5.551758e-01 7.944563e-01 1.499023e-01 9.853516e-01 8.032227e-02 -3.277227e-01 -5.610352e-01 1.782227e-02 8.276367e-01 -1.704114e+00 +8.144531e-01 -1.687012e-01 5.551758e-01 7.944391e-01 1.496582e-01 9.853516e-01 7.989502e-02 -3.277359e-01 -5.605469e-01 1.799011e-02 8.281250e-01 -1.703973e+00 +8.144531e-01 -1.687012e-01 5.556641e-01 7.944377e-01 1.497803e-01 9.853516e-01 7.971191e-02 -3.277102e-01 -5.610352e-01 1.832581e-02 8.276367e-01 -1.703777e+00 +8.149414e-01 -1.678467e-01 5.546875e-01 7.944746e-01 1.495361e-01 9.858398e-01 7.855225e-02 -3.276517e-01 -5.600586e-01 1.895142e-02 8.286133e-01 -1.703551e+00 +8.149414e-01 -1.676025e-01 5.551758e-01 7.945436e-01 1.495361e-01 9.858398e-01 7.806396e-02 -3.275731e-01 -5.600586e-01 1.937866e-02 8.281250e-01 -1.703321e+00 +8.149414e-01 -1.672363e-01 5.546875e-01 7.946653e-01 1.492920e-01 9.858398e-01 7.775879e-02 -3.274796e-01 -5.595703e-01 1.939392e-02 8.286133e-01 -1.703111e+00 +8.154297e-01 -1.673584e-01 5.541992e-01 7.948436e-01 1.500244e-01 9.858398e-01 7.696533e-02 -3.273873e-01 -5.590820e-01 2.035522e-02 8.291016e-01 -1.702939e+00 +8.159180e-01 -1.671143e-01 5.532227e-01 7.950726e-01 1.495361e-01 9.858398e-01 7.720947e-02 -3.273209e-01 -5.585938e-01 1.974487e-02 8.291016e-01 -1.702831e+00 +8.178711e-01 -1.691895e-01 5.498047e-01 7.953144e-01 1.519775e-01 9.853516e-01 7.714844e-02 -3.272986e-01 -5.551758e-01 2.046204e-02 8.315430e-01 -1.702751e+00 +8.193359e-01 -1.666260e-01 5.483398e-01 7.954760e-01 1.488037e-01 9.858398e-01 7.720947e-02 -3.273437e-01 -5.532227e-01 1.826477e-02 8.325195e-01 -1.702638e+00 +8.168945e-01 -1.667480e-01 5.517578e-01 7.955274e-01 1.492920e-01 9.858398e-01 7.690430e-02 -3.274177e-01 -5.566406e-01 1.957703e-02 8.305664e-01 -1.702500e+00 +8.173828e-01 -1.665039e-01 5.517578e-01 7.955243e-01 1.489258e-01 9.858398e-01 7.696533e-02 -3.274948e-01 -5.566406e-01 1.930237e-02 8.305664e-01 -1.702400e+00 +8.178711e-01 -1.665039e-01 5.507812e-01 7.955016e-01 1.488037e-01 9.858398e-01 7.714844e-02 -3.275601e-01 -5.561523e-01 1.890564e-02 8.310547e-01 -1.702361e+00 +8.183594e-01 -1.669922e-01 5.502930e-01 7.954719e-01 1.491699e-01 9.858398e-01 7.733154e-02 -3.276078e-01 -5.551758e-01 1.882935e-02 8.315430e-01 -1.702389e+00 +8.178711e-01 -1.671143e-01 5.502930e-01 7.954584e-01 1.491699e-01 9.858398e-01 7.751465e-02 -3.276438e-01 -5.556641e-01 1.870728e-02 8.310547e-01 -1.702465e+00 +8.183594e-01 -1.671143e-01 5.498047e-01 7.955064e-01 1.490479e-01 9.858398e-01 7.775879e-02 -3.276700e-01 -5.551758e-01 1.834106e-02 8.315430e-01 -1.702523e+00 +8.188477e-01 -1.673584e-01 5.488281e-01 7.956193e-01 1.495361e-01 9.858398e-01 7.739258e-02 -3.276909e-01 -5.541992e-01 1.876831e-02 8.320312e-01 -1.702513e+00 +8.188477e-01 -1.668701e-01 5.488281e-01 7.957642e-01 1.486816e-01 9.858398e-01 7.781982e-02 -3.277156e-01 -5.541992e-01 1.791382e-02 8.320312e-01 -1.702411e+00 +8.193359e-01 -1.665039e-01 5.488281e-01 7.959234e-01 1.484375e-01 9.858398e-01 7.751465e-02 -3.277255e-01 -5.537109e-01 1.791382e-02 8.325195e-01 -1.702232e+00 +8.198242e-01 -1.663818e-01 5.478516e-01 7.960906e-01 1.483154e-01 9.858398e-01 7.751465e-02 -3.277081e-01 -5.532227e-01 1.771545e-02 8.330078e-01 -1.702022e+00 +8.198242e-01 -1.657715e-01 5.483398e-01 7.962777e-01 1.480713e-01 9.858398e-01 7.678223e-02 -3.276619e-01 -5.532227e-01 1.824951e-02 8.325195e-01 -1.701831e+00 +8.203125e-01 -1.658936e-01 5.473633e-01 7.964958e-01 1.481934e-01 9.858398e-01 7.666016e-02 -3.276022e-01 -5.527344e-01 1.824951e-02 8.334961e-01 -1.701709e+00 +8.208008e-01 -1.656494e-01 5.468750e-01 7.967088e-01 1.483154e-01 9.858398e-01 7.623291e-02 -3.275568e-01 -5.517578e-01 1.850891e-02 8.339844e-01 -1.701633e+00 +8.208008e-01 -1.654053e-01 5.468750e-01 7.968737e-01 1.480713e-01 9.858398e-01 7.604980e-02 -3.275499e-01 -5.517578e-01 1.847839e-02 8.339844e-01 -1.701563e+00 +8.212891e-01 -1.654053e-01 5.458984e-01 7.969847e-01 1.478271e-01 9.858398e-01 7.641602e-02 -3.275801e-01 -5.512695e-01 1.795959e-02 8.339844e-01 -1.701475e+00 +8.212891e-01 -1.654053e-01 5.458984e-01 7.970615e-01 1.477051e-01 9.858398e-01 7.659912e-02 -3.276235e-01 -5.507812e-01 1.771545e-02 8.344727e-01 -1.701354e+00 +8.212891e-01 -1.651611e-01 5.458984e-01 7.971358e-01 1.474609e-01 9.863281e-01 7.653809e-02 -3.276593e-01 -5.507812e-01 1.762390e-02 8.344727e-01 -1.701204e+00 +8.222656e-01 -1.646729e-01 5.449219e-01 7.972256e-01 1.472168e-01 9.863281e-01 7.586670e-02 -3.276820e-01 -5.498047e-01 1.786804e-02 8.349609e-01 -1.701034e+00 +8.222656e-01 -1.640625e-01 5.444336e-01 7.973271e-01 1.472168e-01 9.863281e-01 7.464600e-02 -3.277049e-01 -5.493164e-01 1.878357e-02 8.354492e-01 -1.700861e+00 +8.232422e-01 -1.640625e-01 5.434570e-01 7.974332e-01 1.468506e-01 9.863281e-01 7.525635e-02 -3.277446e-01 -5.483398e-01 1.789856e-02 8.359375e-01 -1.700709e+00 +8.237305e-01 -1.640625e-01 5.429688e-01 7.975088e-01 1.469727e-01 9.863281e-01 7.513428e-02 -3.277902e-01 -5.478516e-01 1.789856e-02 8.364258e-01 -1.700611e+00 +8.237305e-01 -1.645508e-01 5.424805e-01 7.975323e-01 1.468506e-01 9.863281e-01 7.617188e-02 -3.278284e-01 -5.473633e-01 1.693726e-02 8.364258e-01 -1.700577e+00 +8.237305e-01 -1.646729e-01 5.429688e-01 7.975044e-01 1.470947e-01 9.863281e-01 7.598877e-02 -3.278391e-01 -5.478516e-01 1.725769e-02 8.364258e-01 -1.700598e+00 +8.232422e-01 -1.651611e-01 5.429688e-01 7.974390e-01 1.475830e-01 9.863281e-01 7.617188e-02 -3.278238e-01 -5.478516e-01 1.742554e-02 8.364258e-01 -1.700642e+00 +8.232422e-01 -1.652832e-01 5.429688e-01 7.973610e-01 1.477051e-01 9.863281e-01 7.629395e-02 -3.277980e-01 -5.478516e-01 1.737976e-02 8.364258e-01 -1.700689e+00 +8.232422e-01 -1.655273e-01 5.429688e-01 7.972901e-01 1.480713e-01 9.858398e-01 7.629395e-02 -3.277799e-01 -5.478516e-01 1.756287e-02 8.364258e-01 -1.700743e+00 +8.237305e-01 -1.651611e-01 5.424805e-01 7.972376e-01 1.474609e-01 9.863281e-01 7.629395e-02 -3.277805e-01 -5.478516e-01 1.721191e-02 8.364258e-01 -1.700800e+00 +8.232422e-01 -1.652832e-01 5.429688e-01 7.971787e-01 1.477051e-01 9.858398e-01 7.623291e-02 -3.277992e-01 -5.478516e-01 1.739502e-02 8.364258e-01 -1.700822e+00 +8.232422e-01 -1.654053e-01 5.434570e-01 7.971119e-01 1.478271e-01 9.858398e-01 7.623291e-02 -3.278369e-01 -5.483398e-01 1.751709e-02 8.359375e-01 -1.700805e+00 +8.232422e-01 -1.650391e-01 5.429688e-01 7.970576e-01 1.474609e-01 9.863281e-01 7.629395e-02 -3.278854e-01 -5.483398e-01 1.728821e-02 8.359375e-01 -1.700762e+00 +8.232422e-01 -1.651611e-01 5.429688e-01 7.970169e-01 1.474609e-01 9.863281e-01 7.635498e-02 -3.279286e-01 -5.483398e-01 1.724243e-02 8.359375e-01 -1.700697e+00 +8.232422e-01 -1.650391e-01 5.434570e-01 7.969651e-01 1.470947e-01 9.863281e-01 7.666016e-02 -3.279570e-01 -5.488281e-01 1.679993e-02 8.359375e-01 -1.700615e+00 +8.232422e-01 -1.654053e-01 5.434570e-01 7.968781e-01 1.475830e-01 9.858398e-01 7.666016e-02 -3.279750e-01 -5.483398e-01 1.707458e-02 8.359375e-01 -1.700519e+00 +8.227539e-01 -1.656494e-01 5.434570e-01 7.967613e-01 1.479492e-01 9.858398e-01 7.647705e-02 -3.280029e-01 -5.483398e-01 1.745605e-02 8.359375e-01 -1.700438e+00 +8.227539e-01 -1.652832e-01 5.439453e-01 7.966541e-01 1.474609e-01 9.863281e-01 7.659912e-02 -3.280513e-01 -5.488281e-01 1.715088e-02 8.354492e-01 -1.700406e+00 +8.222656e-01 -1.651611e-01 5.444336e-01 7.965610e-01 1.472168e-01 9.863281e-01 7.684326e-02 -3.281109e-01 -5.498047e-01 1.693726e-02 8.354492e-01 -1.700418e+00 +8.222656e-01 -1.651611e-01 5.449219e-01 7.964659e-01 1.470947e-01 9.863281e-01 7.696533e-02 -3.281717e-01 -5.498047e-01 1.689148e-02 8.349609e-01 -1.700448e+00 +8.217773e-01 -1.651611e-01 5.454102e-01 7.963420e-01 1.469727e-01 9.863281e-01 7.714844e-02 -3.282283e-01 -5.502930e-01 1.673889e-02 8.349609e-01 -1.700483e+00 +8.212891e-01 -1.650391e-01 5.458984e-01 7.961791e-01 1.468506e-01 9.863281e-01 7.727051e-02 -3.282791e-01 -5.507812e-01 1.667786e-02 8.344727e-01 -1.700517e+00 +8.208008e-01 -1.654053e-01 5.463867e-01 7.959859e-01 1.470947e-01 9.863281e-01 7.745361e-02 -3.283260e-01 -5.517578e-01 1.681519e-02 8.339844e-01 -1.700551e+00 diff --git a/data/traj_raw/2011_F_EuMeT2wBo_00014_00001.txt b/data/traj_raw/2011_F_EuMeT2wBo_00014_00001.txt new file mode 100644 index 0000000000000000000000000000000000000000..5e710d26c5321182c4e30eefbf656c4bbeca763f --- /dev/null +++ b/data/traj_raw/2011_F_EuMeT2wBo_00014_00001.txt @@ -0,0 +1,24 @@ +9.985352e-01 2.099609e-02 -4.824829e-02 4.860556e-01 -1.538849e-02 9.931641e-01 1.138916e-01 -6.379300e-01 5.032349e-02 -1.130371e-01 9.921875e-01 5.176602e-01 +9.985352e-01 2.120972e-02 -4.913330e-02 4.880297e-01 -1.540375e-02 9.931641e-01 1.155396e-01 -6.384819e-01 5.123901e-02 -1.146240e-01 9.921875e-01 5.196654e-01 +9.985352e-01 2.154541e-02 -4.995728e-02 4.928512e-01 -1.554871e-02 9.931641e-01 1.172485e-01 -6.399131e-01 5.212402e-02 -1.163330e-01 9.916992e-01 5.244759e-01 +9.985352e-01 2.186584e-02 -5.090332e-02 4.990229e-01 -1.567078e-02 9.926758e-01 1.192017e-01 -6.418439e-01 5.316162e-02 -1.182251e-01 9.916992e-01 5.304989e-01 +9.985352e-01 2.264404e-02 -5.136108e-02 5.055194e-01 -1.626587e-02 9.926758e-01 1.212158e-01 -6.439267e-01 5.371094e-02 -1.202393e-01 9.912109e-01 5.366904e-01 +9.985352e-01 2.291870e-02 -5.224609e-02 5.120227e-01 -1.634216e-02 9.921875e-01 1.230469e-01 -6.459311e-01 5.465698e-02 -1.220093e-01 9.912109e-01 5.428628e-01 +9.985352e-01 2.351379e-02 -5.313110e-02 5.184246e-01 -1.670837e-02 9.921875e-01 1.251221e-01 -6.477982e-01 5.563354e-02 -1.239624e-01 9.907227e-01 5.490476e-01 +9.980469e-01 2.445984e-02 -5.361938e-02 5.248702e-01 -1.745605e-02 9.916992e-01 1.273193e-01 -6.495519e-01 5.627441e-02 -1.260986e-01 9.902344e-01 5.555033e-01 +9.980469e-01 2.424622e-02 -5.468750e-02 5.315782e-01 -1.701355e-02 9.916992e-01 1.291504e-01 -6.512792e-01 5.734253e-02 -1.280518e-01 9.902344e-01 5.624686e-01 +9.980469e-01 2.445984e-02 -5.596924e-02 5.383441e-01 -1.692200e-02 9.912109e-01 1.312256e-01 -6.530813e-01 5.868530e-02 -1.301270e-01 9.897461e-01 5.695681e-01 +9.980469e-01 2.520752e-02 -5.685425e-02 5.448146e-01 -1.741028e-02 9.907227e-01 1.335449e-01 -6.549842e-01 5.969238e-02 -1.323242e-01 9.892578e-01 5.762525e-01 +9.980469e-01 2.571106e-02 -5.737305e-02 5.509764e-01 -1.770020e-02 9.907227e-01 1.359863e-01 -6.569639e-01 6.033325e-02 -1.347656e-01 9.892578e-01 5.825080e-01 +9.980469e-01 2.583313e-02 -5.859375e-02 5.570124e-01 -1.750183e-02 9.902344e-01 1.385498e-01 -6.589289e-01 6.161499e-02 -1.372070e-01 9.887695e-01 5.886446e-01 +9.980469e-01 2.630615e-02 -5.957031e-02 5.628777e-01 -1.768494e-02 9.897461e-01 1.409912e-01 -6.608270e-01 6.268311e-02 -1.395264e-01 9.882812e-01 5.946071e-01 +9.980469e-01 2.691650e-02 -6.048584e-02 5.685300e-01 -1.802063e-02 9.897461e-01 1.430664e-01 -6.626642e-01 6.372070e-02 -1.417236e-01 9.877930e-01 6.003045e-01 +9.975586e-01 2.725220e-02 -6.134033e-02 5.739849e-01 -1.808167e-02 9.892578e-01 1.453857e-01 -6.644203e-01 6.463623e-02 -1.439209e-01 9.873047e-01 6.057309e-01 +9.975586e-01 2.764893e-02 -6.240845e-02 5.792323e-01 -1.817322e-02 9.887695e-01 1.478271e-01 -6.661186e-01 6.579590e-02 -1.462402e-01 9.868164e-01 6.108540e-01 +9.975586e-01 2.812195e-02 -6.335449e-02 5.841907e-01 -1.834106e-02 9.882812e-01 1.500244e-01 -6.678301e-01 6.683350e-02 -1.485596e-01 9.868164e-01 6.155736e-01 +9.975586e-01 2.885437e-02 -6.427002e-02 5.888018e-01 -1.875305e-02 9.882812e-01 1.525879e-01 -6.695808e-01 6.787109e-02 -1.510010e-01 9.863281e-01 6.197994e-01 +9.975586e-01 2.932739e-02 -6.512451e-02 5.930974e-01 -1.892090e-02 9.877930e-01 1.551514e-01 -6.713051e-01 6.884766e-02 -1.535645e-01 9.858398e-01 6.235797e-01 +9.975586e-01 3.102112e-02 -6.610107e-02 5.971568e-01 -2.024841e-02 9.873047e-01 1.578369e-01 -6.729463e-01 7.012939e-02 -1.560059e-01 9.853516e-01 6.271073e-01 +9.970703e-01 3.140259e-02 -6.701660e-02 6.011077e-01 -2.030945e-02 9.868164e-01 1.601562e-01 -6.745193e-01 7.116699e-02 -1.583252e-01 9.848633e-01 6.305760e-01 +9.970703e-01 3.204346e-02 -6.787109e-02 6.050442e-01 -2.061462e-02 9.863281e-01 1.625977e-01 -6.760550e-01 7.214355e-02 -1.607666e-01 9.843750e-01 6.340923e-01 +9.970703e-01 3.262329e-02 -6.848145e-02 6.089856e-01 -2.091980e-02 9.858398e-01 1.650391e-01 -6.775799e-01 7.293701e-02 -1.630859e-01 9.838867e-01 6.376294e-01 diff --git a/data/traj_raw/2011_KAeAqaA0Llg_00005_00001.txt b/data/traj_raw/2011_KAeAqaA0Llg_00005_00001.txt new file mode 100644 index 0000000000000000000000000000000000000000..ab32eb9964b9053536c6d1e6a2fc8181467bae4c --- /dev/null +++ b/data/traj_raw/2011_KAeAqaA0Llg_00005_00001.txt @@ -0,0 +1,58 @@ +9.741211e-01 -1.998901e-02 2.250977e-01 -2.306393e-01 2.278137e-02 9.995117e-01 -9.826660e-03 -7.314096e-02 -2.247314e-01 1.470184e-02 9.741211e-01 2.434377e-01 +9.711914e-01 -2.186584e-02 2.373047e-01 -2.324841e-01 2.473450e-02 9.995117e-01 -9.124756e-03 -7.288791e-02 -2.370605e-01 1.472473e-02 9.711914e-01 2.444886e-01 +9.677734e-01 -2.359009e-02 2.509766e-01 -2.372425e-01 2.639771e-02 9.995117e-01 -7.827759e-03 -7.240475e-02 -2.507324e-01 1.420593e-02 9.677734e-01 2.471653e-01 +9.638672e-01 -2.511597e-02 2.644043e-01 -2.436753e-01 2.783203e-02 9.995117e-01 -6.538391e-03 -7.196380e-02 -2.641602e-01 1.366425e-02 9.643555e-01 2.515917e-01 +9.599609e-01 -2.644348e-02 2.792969e-01 -2.508582e-01 2.899170e-02 9.995117e-01 -4.985809e-03 -7.171503e-02 -2.790527e-01 1.288605e-02 9.599609e-01 2.580755e-01 +9.555664e-01 -2.764893e-02 2.939453e-01 -2.582350e-01 3.009033e-02 9.995117e-01 -3.807068e-03 -7.176545e-02 -2.937012e-01 1.248169e-02 9.560547e-01 2.666957e-01 +9.506836e-01 -2.983093e-02 3.085938e-01 -2.656547e-01 3.240967e-02 9.995117e-01 -3.246307e-03 -7.222077e-02 -3.083496e-01 1.308441e-02 9.511719e-01 2.772555e-01 +9.453125e-01 -3.056335e-02 3.244629e-01 -2.733544e-01 3.314209e-02 9.995117e-01 -2.403259e-03 -7.315494e-02 -3.242188e-01 1.302338e-02 9.458008e-01 2.890224e-01 +9.399414e-01 -3.454590e-02 3.400879e-01 -2.815711e-01 3.713989e-02 9.995117e-01 -1.118660e-03 -7.440231e-02 -3.398438e-01 1.367950e-02 9.404297e-01 3.009077e-01 +9.335938e-01 -3.387451e-02 3.569336e-01 -2.905826e-01 3.616333e-02 9.995117e-01 2.758503e-04 -7.546368e-02 -3.566895e-01 1.264954e-02 9.340820e-01 3.113580e-01 +9.272461e-01 -3.536987e-02 3.728027e-01 -3.000570e-01 3.771973e-02 9.995117e-01 1.014709e-03 -7.582711e-02 -3.725586e-01 1.311493e-02 9.277344e-01 3.196460e-01 +9.208984e-01 -3.701782e-02 3.884277e-01 -3.097096e-01 3.955078e-02 9.990234e-01 1.469612e-03 -7.540819e-02 -3.881836e-01 1.401520e-02 9.213867e-01 3.258460e-01 +9.140625e-01 -3.814697e-02 4.042969e-01 -3.192910e-01 4.089355e-02 9.990234e-01 1.882553e-03 -7.453679e-02 -4.040527e-01 1.480103e-02 9.145508e-01 3.306044e-01 +9.067383e-01 -3.942871e-02 4.201660e-01 -3.289290e-01 4.254150e-02 9.990234e-01 1.962662e-03 -7.363029e-02 -4.199219e-01 1.609802e-02 9.072266e-01 3.337579e-01 +8.989258e-01 -4.049683e-02 4.362793e-01 -3.390157e-01 4.421997e-02 9.990234e-01 1.652718e-03 -7.309227e-02 -4.357910e-01 1.780701e-02 8.999023e-01 3.344761e-01 +8.906250e-01 -4.260254e-02 4.523926e-01 -3.500350e-01 4.678345e-02 9.990234e-01 1.966476e-03 -7.320766e-02 -4.519043e-01 1.940918e-02 8.920898e-01 3.315610e-01 +8.818359e-01 -4.577637e-02 4.694824e-01 -3.621157e-01 5.035400e-02 9.985352e-01 2.817154e-03 -7.402352e-02 -4.689941e-01 2.114868e-02 8.828125e-01 3.242716e-01 +8.725586e-01 -4.827881e-02 4.860840e-01 -3.747433e-01 5.270386e-02 9.985352e-01 4.547119e-03 -7.516275e-02 -4.855957e-01 2.165222e-02 8.740234e-01 3.136974e-01 +8.627930e-01 -5.145264e-02 5.024414e-01 -3.870174e-01 5.603027e-02 9.985352e-01 6.000519e-03 -7.568157e-02 -5.019531e-01 2.297974e-02 8.642578e-01 3.040472e-01 +8.530273e-01 -5.465698e-02 5.185547e-01 -3.977439e-01 5.947876e-02 9.980469e-01 7.328033e-03 -7.480361e-02 -5.180664e-01 2.459717e-02 8.549805e-01 2.999029e-01 +8.432617e-01 -5.596924e-02 5.346680e-01 -4.062150e-01 6.106567e-02 9.980469e-01 8.186340e-03 -7.244446e-02 -5.341797e-01 2.574158e-02 8.452148e-01 3.027921e-01 +8.334961e-01 -5.825806e-02 5.493164e-01 -4.127880e-01 6.396484e-02 9.980469e-01 8.804321e-03 -6.924504e-02 -5.488281e-01 2.780151e-02 8.354492e-01 3.098717e-01 +8.242188e-01 -5.975342e-02 5.634766e-01 -4.183272e-01 6.573486e-02 9.975586e-01 9.689331e-03 -6.614729e-02 -5.625000e-01 2.905273e-02 8.261719e-01 3.179656e-01 +8.144531e-01 -6.097412e-02 5.766602e-01 -4.235680e-01 6.713867e-02 9.975586e-01 1.065826e-02 -6.352281e-02 -5.761719e-01 3.005981e-02 8.168945e-01 3.257888e-01 +8.051758e-01 -6.222534e-02 5.893555e-01 -4.289002e-01 6.854248e-02 9.975586e-01 1.162720e-02 -6.112549e-02 -5.888672e-01 3.106689e-02 8.076172e-01 3.339079e-01 +7.963867e-01 -6.341553e-02 6.015625e-01 -4.345329e-01 6.994629e-02 9.975586e-01 1.262665e-02 -5.846767e-02 -6.010742e-01 3.201294e-02 7.988281e-01 3.432736e-01 +7.875977e-01 -6.420898e-02 6.127930e-01 -4.406801e-01 7.080078e-02 9.975586e-01 1.349640e-02 -5.517217e-02 -6.118164e-01 3.274536e-02 7.900391e-01 3.540891e-01 +7.788086e-01 -6.500244e-02 6.240234e-01 -4.475944e-01 7.189941e-02 9.970703e-01 1.423645e-02 -5.105118e-02 -6.235352e-01 3.378296e-02 7.812500e-01 3.656226e-01 +7.695312e-01 -6.542969e-02 6.352539e-01 -4.551058e-01 7.263184e-02 9.970703e-01 1.475525e-02 -4.632592e-02 -6.342773e-01 3.479004e-02 7.724609e-01 3.759905e-01 +7.617188e-01 -6.652832e-02 6.440430e-01 -4.631524e-01 7.391357e-02 9.970703e-01 1.554871e-02 -4.152131e-02 -6.435547e-01 3.576660e-02 7.646484e-01 3.833671e-01 +7.543945e-01 -6.701660e-02 6.533203e-01 -4.717507e-01 7.409668e-02 9.970703e-01 1.675415e-02 -3.720129e-02 -6.523438e-01 3.576660e-02 7.568359e-01 3.881224e-01 +7.465820e-01 -6.793213e-02 6.616211e-01 -4.807797e-01 7.470703e-02 9.970703e-01 1.808167e-02 -3.358205e-02 -6.606445e-01 3.591919e-02 7.495117e-01 3.912534e-01 +7.392578e-01 -6.896973e-02 6.694336e-01 -4.899923e-01 7.525635e-02 9.970703e-01 1.953125e-02 -3.071535e-02 -6.689453e-01 3.594971e-02 7.421875e-01 3.938011e-01 +7.314453e-01 -7.025146e-02 6.782227e-01 -4.991217e-01 7.623291e-02 9.970703e-01 2.104187e-02 -2.847644e-02 -6.772461e-01 3.628540e-02 7.348633e-01 3.963042e-01 +7.246094e-01 -7.122803e-02 6.855469e-01 -5.080580e-01 7.702637e-02 9.965820e-01 2.218628e-02 -2.689474e-02 -6.850586e-01 3.674316e-02 7.275391e-01 3.986222e-01 +7.167969e-01 -7.281494e-02 6.933594e-01 -5.168570e-01 7.836914e-02 9.965820e-01 2.359009e-02 -2.605357e-02 -6.928711e-01 3.744507e-02 7.202148e-01 4.010102e-01 +7.094727e-01 -7.397461e-02 7.011719e-01 -5.253543e-01 7.922363e-02 9.965820e-01 2.503967e-02 -2.590779e-02 -7.006836e-01 3.778076e-02 7.128906e-01 4.043064e-01 +7.021484e-01 -7.525635e-02 7.080078e-01 -5.333599e-01 7.983398e-02 9.965820e-01 2.671814e-02 -2.641907e-02 -7.075195e-01 3.778076e-02 7.055664e-01 4.092638e-01 +6.948242e-01 -7.641602e-02 7.148438e-01 -5.409893e-01 8.032227e-02 9.965820e-01 2.845764e-02 -2.771658e-02 -7.143555e-01 3.762817e-02 6.987305e-01 4.164498e-01 +6.879883e-01 -7.788086e-02 7.216797e-01 -5.484772e-01 8.081055e-02 9.960938e-01 3.047180e-02 -2.987611e-02 -7.211914e-01 3.738403e-02 6.914062e-01 4.259856e-01 +6.796875e-01 -7.916260e-02 7.294922e-01 -5.558975e-01 7.861328e-02 9.960938e-01 3.491211e-02 -3.234961e-02 -7.294922e-01 3.359985e-02 6.831055e-01 4.368935e-01 +6.733398e-01 -8.044434e-02 7.348633e-01 -5.630521e-01 8.026123e-02 9.960938e-01 3.552246e-02 -3.384205e-02 -7.348633e-01 3.506470e-02 6.772461e-01 4.476822e-01 +6.660156e-01 -8.117676e-02 7.416992e-01 -5.697857e-01 7.983398e-02 9.960938e-01 3.735352e-02 -3.426104e-02 -7.416992e-01 3.433228e-02 6.699219e-01 4.591783e-01 +6.586914e-01 -8.227539e-02 7.480469e-01 -5.761169e-01 8.056641e-02 9.960938e-01 3.857422e-02 -3.377694e-02 -7.480469e-01 3.485107e-02 6.625977e-01 4.724047e-01 +6.508789e-01 -8.245850e-02 7.548828e-01 -5.820769e-01 7.952881e-02 9.960938e-01 4.025269e-02 -3.280895e-02 -7.548828e-01 3.381348e-02 6.547852e-01 4.875203e-01 +6.425781e-01 -8.325195e-02 7.617188e-01 -5.876975e-01 7.958984e-02 9.960938e-01 4.171753e-02 -3.167094e-02 -7.622070e-01 3.381348e-02 6.464844e-01 5.030807e-01 +6.352539e-01 -8.331299e-02 7.675781e-01 -5.931835e-01 7.818604e-02 9.960938e-01 4.336548e-02 -3.069549e-02 -7.685547e-01 3.247070e-02 6.391602e-01 5.164952e-01 +6.279297e-01 -8.465576e-02 7.739258e-01 -5.990233e-01 7.769775e-02 9.960938e-01 4.595947e-02 -3.008800e-02 -7.744141e-01 3.128052e-02 6.318359e-01 5.260742e-01 +6.191406e-01 -8.538818e-02 7.807617e-01 -6.052812e-01 7.684326e-02 9.960938e-01 4.797363e-02 -2.958863e-02 -7.817383e-01 3.030396e-02 6.230469e-01 5.313897e-01 +6.132812e-01 -8.648682e-02 7.851562e-01 -6.118478e-01 7.647705e-02 9.956055e-01 4.995728e-02 -2.880209e-02 -7.861328e-01 2.941895e-02 6.171875e-01 5.327746e-01 +6.054688e-01 -8.758545e-02 7.910156e-01 -6.188377e-01 7.623291e-02 9.956055e-01 5.191040e-02 -2.766420e-02 -7.924805e-01 2.888489e-02 6.093750e-01 5.326623e-01 +5.971680e-01 -8.892822e-02 7.973633e-01 -6.263694e-01 7.598877e-02 9.956055e-01 5.416870e-02 -2.645973e-02 -7.988281e-01 2.824402e-02 6.010742e-01 5.324435e-01 +5.893555e-01 -8.966064e-02 8.032227e-01 -6.337939e-01 7.476807e-02 9.956055e-01 5.633545e-02 -2.532964e-02 -8.046875e-01 2.687073e-02 5.932617e-01 5.333852e-01 +5.805664e-01 -9.057617e-02 8.090820e-01 -6.404802e-01 7.464600e-02 9.956055e-01 5.789185e-02 -2.425434e-02 -8.110352e-01 2.677917e-02 5.844727e-01 5.362523e-01 +5.727539e-01 -9.100342e-02 8.149414e-01 -6.462220e-01 7.305908e-02 9.956055e-01 5.984497e-02 -2.327397e-02 -8.164062e-01 2.523804e-02 5.766602e-01 5.406610e-01 +5.639648e-01 -9.204102e-02 8.208008e-01 -6.510112e-01 7.250977e-02 9.956055e-01 6.176758e-02 -2.227745e-02 -8.227539e-01 2.468872e-02 5.683594e-01 5.460566e-01 +5.556641e-01 -9.252930e-02 8.261719e-01 -6.551539e-01 7.159424e-02 9.956055e-01 6.335449e-02 -2.127001e-02 -8.286133e-01 2.397156e-02 5.595703e-01 5.517459e-01 +5.468750e-01 -9.362793e-02 8.320312e-01 -6.590358e-01 7.128906e-02 9.951172e-01 6.512451e-02 -2.026968e-02 -8.339844e-01 2.369690e-02 5.512695e-01 5.574018e-01 diff --git a/data/traj_raw/2011_MCkKihQrNA4_00014_00000.txt b/data/traj_raw/2011_MCkKihQrNA4_00014_00000.txt new file mode 100644 index 0000000000000000000000000000000000000000..24eacfed8299850abe66047cecb866d5fef60a3a --- /dev/null +++ b/data/traj_raw/2011_MCkKihQrNA4_00014_00000.txt @@ -0,0 +1,212 @@ +1.000000e+00 2.065897e-04 -4.680157e-04 1.762601e-02 -2.065897e-04 1.000000e+00 4.732609e-05 -3.684070e-02 4.680157e-04 -4.720688e-05 1.000000e+00 -2.456987e-04 +1.000000e+00 -2.864838e-03 1.213837e-02 1.976236e-02 2.878189e-03 1.000000e+00 -1.036644e-03 -3.672050e-02 -1.213837e-02 1.070976e-03 1.000000e+00 -6.191349e-04 +9.995117e-01 -6.122589e-03 2.548218e-02 2.515601e-02 6.164551e-03 1.000000e+00 -1.481056e-03 -3.644692e-02 -2.548218e-02 1.637459e-03 9.995117e-01 -1.606273e-03 +9.990234e-01 -9.407043e-03 3.909302e-02 3.232180e-02 9.513855e-03 1.000000e+00 -2.374649e-03 -3.612937e-02 -3.906250e-02 2.744675e-03 9.990234e-01 -3.003642e-03 +9.985352e-01 -1.340485e-02 5.212402e-02 4.016349e-02 1.345825e-02 1.000000e+00 -6.346703e-04 -3.582660e-02 -5.209351e-02 1.335144e-03 9.985352e-01 -4.656448e-03 +9.980469e-01 -1.692200e-02 6.262207e-02 4.806255e-02 1.698303e-02 1.000000e+00 -4.401207e-04 -3.552272e-02 -6.262207e-02 1.502991e-03 9.980469e-01 -6.473796e-03 +9.970703e-01 -1.953125e-02 7.421875e-02 5.577128e-02 1.956177e-02 1.000000e+00 2.375841e-04 -3.519562e-02 -7.421875e-02 1.214981e-03 9.970703e-01 -8.425442e-03 +9.960938e-01 -2.238464e-02 8.361816e-02 6.330044e-02 2.233887e-02 9.995117e-01 1.630783e-03 -3.483548e-02 -8.361816e-02 2.428293e-04 9.965820e-01 -1.051363e-02 +9.951172e-01 -2.458191e-02 9.631348e-02 7.077542e-02 2.474976e-02 9.995117e-01 -4.959106e-04 -3.444926e-02 -9.625244e-02 2.876282e-03 9.951172e-01 -1.274756e-02 +9.936523e-01 -2.745056e-02 1.079102e-01 7.835003e-02 2.735901e-02 9.995117e-01 2.296448e-03 -3.406132e-02 -1.079102e-01 6.690025e-04 9.941406e-01 -1.513372e-02 +9.921875e-01 -3.085327e-02 1.203003e-01 8.610108e-02 3.063965e-02 9.995117e-01 3.650665e-03 -3.361474e-02 -1.203613e-01 6.258488e-05 9.926758e-01 -1.768627e-02 +9.907227e-01 -3.430176e-02 1.331787e-01 9.403919e-02 3.421021e-02 9.995117e-01 3.053665e-03 -3.305498e-02 -1.331787e-01 1.531601e-03 9.912109e-01 -2.041361e-02 +9.887695e-01 -3.805542e-02 1.455078e-01 1.021470e-01 3.802490e-02 9.995117e-01 2.908707e-03 -3.238676e-02 -1.455078e-01 2.656937e-03 9.892578e-01 -2.330433e-02 +9.863281e-01 -4.156494e-02 1.583252e-01 1.103952e-01 4.153442e-02 9.990234e-01 3.517151e-03 -3.165199e-02 -1.584473e-01 3.105164e-03 9.873047e-01 -2.633591e-02 +9.843750e-01 -4.434204e-02 1.699219e-01 1.187429e-01 4.455566e-02 9.990234e-01 2.580643e-03 -3.091619e-02 -1.697998e-01 5.031586e-03 9.853516e-01 -2.950127e-02 +9.819336e-01 -4.779053e-02 1.821289e-01 1.271592e-01 4.751587e-02 9.990234e-01 5.813599e-03 -3.022545e-02 -1.821289e-01 2.943039e-03 9.833984e-01 -3.280259e-02 +9.794922e-01 -5.175781e-02 1.944580e-01 1.356743e-01 5.072021e-02 9.985352e-01 1.021576e-02 -2.951527e-02 -1.947021e-01 -1.381636e-04 9.809570e-01 -3.625912e-02 +9.770508e-01 -5.453491e-02 2.067871e-01 1.443138e-01 5.368042e-02 9.985352e-01 9.658813e-03 -2.870767e-02 -2.069092e-01 1.666069e-03 9.785156e-01 -3.989611e-02 +9.741211e-01 -5.822754e-02 2.194824e-01 1.530599e-01 5.755615e-02 9.985352e-01 9.552002e-03 -2.784265e-02 -2.196045e-01 3.330231e-03 9.755859e-01 -4.373806e-02 +9.707031e-01 -6.192017e-02 2.324219e-01 1.618943e-01 6.127930e-02 9.980469e-01 1.000214e-02 -2.699588e-02 -2.325439e-01 4.535675e-03 9.726562e-01 -4.780951e-02 +9.667969e-01 -6.536865e-02 2.479248e-01 1.708180e-01 6.494141e-02 9.980469e-01 9.773254e-03 -2.622528e-02 -2.480469e-01 6.656647e-03 9.687500e-01 -5.211049e-02 +9.638672e-01 -6.762695e-02 2.585449e-01 1.798092e-01 6.683350e-02 9.975586e-01 1.190186e-02 -2.553610e-02 -2.587891e-01 5.817413e-03 9.658203e-01 -5.661865e-02 +9.614258e-01 -6.866455e-02 2.661133e-01 1.887168e-01 6.829834e-02 9.975586e-01 1.059723e-02 -2.484871e-02 -2.663574e-01 7.987976e-03 9.638672e-01 -6.125049e-02 +9.565430e-01 -7.250977e-02 2.817383e-01 1.973809e-01 7.159424e-02 9.975586e-01 1.361847e-02 -2.408154e-02 -2.819824e-01 7.137299e-03 9.594727e-01 -6.590027e-02 +9.521484e-01 -7.623291e-02 2.956543e-01 2.058671e-01 7.580566e-02 9.970703e-01 1.282501e-02 -2.315918e-02 -2.956543e-01 1.020813e-02 9.550781e-01 -7.056488e-02 +9.487305e-01 -7.904053e-02 3.054199e-01 2.143360e-01 7.934570e-02 9.965820e-01 1.139069e-02 -2.212968e-02 -3.051758e-01 1.342773e-02 9.521484e-01 -7.529957e-02 +9.448242e-01 -8.325195e-02 3.173828e-01 2.229053e-01 8.276367e-02 9.965820e-01 1.506042e-02 -2.108549e-02 -3.173828e-01 1.203156e-02 9.482422e-01 -8.015492e-02 +9.399414e-01 -8.770752e-02 3.293457e-01 2.316378e-01 8.642578e-02 9.960938e-01 1.847839e-02 -2.003706e-02 -3.298340e-01 1.110077e-02 9.438477e-01 -8.518171e-02 +9.375000e-01 -8.941650e-02 3.356934e-01 2.404814e-01 8.874512e-02 9.960938e-01 1.739502e-02 -1.901831e-02 -3.359375e-01 1.348114e-02 9.418945e-01 -9.037683e-02 +9.340820e-01 -9.375000e-02 3.444824e-01 2.492219e-01 9.210205e-02 9.956055e-01 2.128601e-02 -1.815422e-02 -3.449707e-01 1.184845e-02 9.384766e-01 -9.565657e-02 +9.277344e-01 -9.625244e-02 3.608398e-01 2.577033e-01 9.619141e-02 9.951172e-01 1.832581e-02 -1.753260e-02 -3.608398e-01 1.768494e-02 9.326172e-01 -1.009916e-01 +9.238281e-01 -1.035156e-01 3.681641e-01 2.659665e-01 9.997559e-02 9.946289e-01 2.873230e-02 -1.720627e-02 -3.691406e-01 1.026154e-02 9.291992e-01 -1.064260e-01 +9.179688e-01 -1.033325e-01 3.833008e-01 2.740619e-01 1.007080e-01 9.946289e-01 2.694702e-02 -1.708758e-02 -3.840332e-01 1.385498e-02 9.233398e-01 -1.119884e-01 +9.130859e-01 -1.052246e-01 3.935547e-01 2.821025e-01 1.023560e-01 9.941406e-01 2.822876e-02 -1.727668e-02 -3.942871e-01 1.450348e-02 9.189453e-01 -1.177078e-01 +9.077148e-01 -1.082764e-01 4.057617e-01 2.900447e-01 1.040649e-01 9.941406e-01 3.244019e-02 -1.788202e-02 -4.069824e-01 1.278687e-02 9.135742e-01 -1.235419e-01 +8.999023e-01 -1.160278e-01 4.199219e-01 2.977256e-01 1.074829e-01 9.931641e-01 4.403687e-02 -1.879977e-02 -4.221191e-01 5.496979e-03 9.067383e-01 -1.293681e-01 +8.930664e-01 -1.218872e-01 4.328613e-01 3.050131e-01 1.104736e-01 9.926758e-01 5.151367e-02 -1.967325e-02 -4.360352e-01 1.817703e-03 8.999023e-01 -1.350687e-01 +8.842773e-01 -1.295166e-01 4.487305e-01 3.118442e-01 1.139526e-01 9.916992e-01 6.170654e-02 -2.016136e-02 -4.528809e-01 -3.414154e-03 8.916016e-01 -1.405519e-01 +8.750000e-01 -1.341553e-01 4.648438e-01 3.182796e-01 1.176758e-01 9.907227e-01 6.457520e-02 -2.006306e-02 -4.692383e-01 -1.815796e-03 8.833008e-01 -1.458112e-01 +8.666992e-01 -1.376953e-01 4.797363e-01 3.244496e-01 1.201172e-01 9.907227e-01 6.726074e-02 -1.943791e-02 -4.843750e-01 -6.842613e-04 8.750000e-01 -1.509237e-01 +8.598633e-01 -1.403809e-01 4.912109e-01 3.304412e-01 1.211548e-01 9.902344e-01 7.098389e-02 -1.846383e-02 -4.963379e-01 -1.530647e-03 8.681641e-01 -1.560050e-01 +8.540039e-01 -1.440430e-01 5.000000e-01 3.362644e-01 1.242676e-01 9.897461e-01 7.293701e-02 -1.729061e-02 -5.053711e-01 -1.158714e-04 8.627930e-01 -1.611458e-01 +8.471680e-01 -1.472168e-01 5.102539e-01 3.418923e-01 1.260986e-01 9.892578e-01 7.586670e-02 -1.600938e-02 -5.156250e-01 6.514788e-05 8.564453e-01 -1.663832e-01 +8.398438e-01 -1.502686e-01 5.214844e-01 3.473617e-01 1.293945e-01 9.887695e-01 7.653809e-02 -1.467200e-02 -5.268555e-01 3.175735e-03 8.496094e-01 -1.717250e-01 +8.334961e-01 -1.529541e-01 5.307617e-01 3.527468e-01 1.315918e-01 9.882812e-01 7.824707e-02 -1.333710e-02 -5.366211e-01 4.646301e-03 8.437500e-01 -1.771739e-01 +8.276367e-01 -1.562500e-01 5.390625e-01 3.580771e-01 1.335449e-01 9.877930e-01 8.123779e-02 -1.202258e-02 -5.454102e-01 4.795074e-03 8.383789e-01 -1.827008e-01 +8.212891e-01 -1.584473e-01 5.483398e-01 3.633605e-01 1.353760e-01 9.873047e-01 8.251953e-02 -1.072806e-02 -5.546875e-01 6.462097e-03 8.320312e-01 -1.882579e-01 +8.144531e-01 -1.617432e-01 5.571289e-01 3.686414e-01 1.374512e-01 9.868164e-01 8.544922e-02 -9.482061e-03 -5.634766e-01 6.958008e-03 8.261719e-01 -1.938252e-01 +8.085938e-01 -1.654053e-01 5.649414e-01 3.739749e-01 1.392822e-01 9.863281e-01 8.935547e-02 -8.318331e-03 -5.717773e-01 6.500244e-03 8.203125e-01 -1.994183e-01 +8.032227e-01 -1.707764e-01 5.703125e-01 3.793590e-01 1.428223e-01 9.853516e-01 9.387207e-02 -7.248329e-03 -5.781250e-01 6.000519e-03 8.159180e-01 -2.050252e-01 +7.978516e-01 -1.757812e-01 5.766602e-01 3.847315e-01 1.451416e-01 9.843750e-01 9.930420e-02 -6.272169e-03 -5.849609e-01 4.440308e-03 8.110352e-01 -2.105864e-01 +7.924805e-01 -1.796875e-01 5.830078e-01 3.900034e-01 1.467285e-01 9.838867e-01 1.037598e-01 -5.351949e-03 -5.922852e-01 3.345490e-03 8.056641e-01 -2.160502e-01 +7.871094e-01 -1.842041e-01 5.883789e-01 3.950808e-01 1.489258e-01 9.829102e-01 1.085205e-01 -4.440089e-03 -5.986328e-01 2.246857e-03 8.012695e-01 -2.213548e-01 +7.822266e-01 -1.881104e-01 5.942383e-01 3.998988e-01 1.496582e-01 9.819336e-01 1.140137e-01 -3.477698e-03 -6.049805e-01 -2.851486e-04 7.963867e-01 -2.264637e-01 +7.773438e-01 -1.911621e-01 5.996094e-01 4.044418e-01 1.506348e-01 9.814453e-01 1.177368e-01 -2.417356e-03 -6.108398e-01 -1.223564e-03 7.915039e-01 -2.313983e-01 +7.719727e-01 -1.932373e-01 6.054688e-01 4.087583e-01 1.517334e-01 9.809570e-01 1.196899e-01 -1.275958e-03 -6.171875e-01 -5.583763e-04 7.866211e-01 -2.362327e-01 +7.690430e-01 -1.961670e-01 6.083984e-01 4.129265e-01 1.529541e-01 9.804688e-01 1.229858e-01 -1.106907e-04 -6.210938e-01 -1.505852e-03 7.836914e-01 -2.410668e-01 +7.666016e-01 -1.990967e-01 6.108398e-01 4.169759e-01 1.541748e-01 9.799805e-01 1.260986e-01 1.050353e-03 -6.235352e-01 -2.477646e-03 7.817383e-01 -2.459361e-01 +7.636719e-01 -2.008057e-01 6.137695e-01 4.209334e-01 1.546631e-01 9.794922e-01 1.280518e-01 2.214692e-03 -6.269531e-01 -2.948761e-03 7.792969e-01 -2.508535e-01 +7.612305e-01 -2.012939e-01 6.166992e-01 4.248546e-01 1.544189e-01 9.794922e-01 1.291504e-01 3.394352e-03 -6.298828e-01 -3.141403e-03 7.768555e-01 -2.558288e-01 +7.583008e-01 -2.031250e-01 6.196289e-01 4.287976e-01 1.558838e-01 9.790039e-01 1.301270e-01 4.592421e-03 -6.333008e-01 -2.099991e-03 7.739258e-01 -2.608716e-01 +7.548828e-01 -2.060547e-01 6.225586e-01 4.327853e-01 1.578369e-01 9.785156e-01 1.325684e-01 5.808662e-03 -6.367188e-01 -1.753807e-03 7.714844e-01 -2.659624e-01 +7.519531e-01 -2.082520e-01 6.254883e-01 4.367672e-01 1.600342e-01 9.780273e-01 1.334229e-01 7.058893e-03 -6.391602e-01 -2.582073e-04 7.690430e-01 -2.710426e-01 +7.480469e-01 -2.097168e-01 6.298828e-01 4.406795e-01 1.612549e-01 9.775391e-01 1.340332e-01 8.330889e-03 -6.440430e-01 1.359940e-03 7.651367e-01 -2.760566e-01 +7.441406e-01 -2.113037e-01 6.333008e-01 4.445271e-01 1.618652e-01 9.775391e-01 1.358643e-01 9.620654e-03 -6.479492e-01 1.398087e-03 7.617188e-01 -2.809807e-01 +7.402344e-01 -2.136230e-01 6.372070e-01 4.483495e-01 1.621094e-01 9.770508e-01 1.391602e-01 1.094582e-02 -6.523438e-01 2.944469e-04 7.578125e-01 -2.858305e-01 +7.373047e-01 -2.171631e-01 6.396484e-01 4.521522e-01 1.639404e-01 9.760742e-01 1.424561e-01 1.231175e-02 -6.552734e-01 -1.850128e-04 7.553711e-01 -2.906567e-01 +7.329102e-01 -2.204590e-01 6.440430e-01 4.558377e-01 1.661377e-01 9.755859e-01 1.448975e-01 1.366942e-02 -6.601562e-01 7.715225e-04 7.514648e-01 -2.954618e-01 +7.290039e-01 -2.247314e-01 6.469727e-01 4.593252e-01 1.679688e-01 9.746094e-01 1.492920e-01 1.495337e-02 -6.635742e-01 -1.434088e-04 7.480469e-01 -3.002179e-01 +7.246094e-01 -2.258301e-01 6.513672e-01 4.626448e-01 1.690674e-01 9.741211e-01 1.497803e-01 1.615648e-02 -6.684570e-01 1.615524e-03 7.436523e-01 -3.049204e-01 +7.207031e-01 -2.274170e-01 6.552734e-01 4.659053e-01 1.697998e-01 9.736328e-01 1.513672e-01 1.726299e-02 -6.723633e-01 2.126694e-03 7.402344e-01 -3.095355e-01 +7.187500e-01 -2.293701e-01 6.562500e-01 4.692148e-01 1.718750e-01 9.731445e-01 1.518555e-01 1.828887e-02 -6.738281e-01 3.681183e-03 7.387695e-01 -3.140073e-01 +7.158203e-01 -2.307129e-01 6.591797e-01 4.726225e-01 1.738281e-01 9.731445e-01 1.519775e-01 1.921293e-02 -6.762695e-01 5.825043e-03 7.363281e-01 -3.182822e-01 +7.124023e-01 -2.333984e-01 6.616211e-01 4.760862e-01 1.759033e-01 9.721680e-01 1.536865e-01 1.995396e-02 -6.791992e-01 6.900787e-03 7.338867e-01 -3.222998e-01 +7.280273e-01 -2.298584e-01 6.459961e-01 4.792407e-01 1.745605e-01 9.731445e-01 1.496582e-01 2.043251e-02 -6.630859e-01 3.870010e-03 7.485352e-01 -3.258656e-01 +7.114258e-01 -2.365723e-01 6.621094e-01 4.815054e-01 1.723633e-01 9.716797e-01 1.621094e-01 2.068942e-02 -6.816406e-01 -1.184464e-03 7.319336e-01 -3.287297e-01 +7.207031e-01 -2.338867e-01 6.523438e-01 4.831375e-01 1.730957e-01 9.721680e-01 1.574707e-01 2.106360e-02 -6.713867e-01 -5.326271e-04 7.412109e-01 -3.311692e-01 +7.226562e-01 -2.318115e-01 6.513672e-01 4.842061e-01 1.729736e-01 9.726562e-01 1.542969e-01 2.149552e-02 -6.689453e-01 1.133919e-03 7.431641e-01 -3.333139e-01 +7.255859e-01 -2.336426e-01 6.469727e-01 4.848570e-01 1.732178e-01 9.721680e-01 1.568604e-01 2.183964e-02 -6.660156e-01 -1.768112e-03 7.460938e-01 -3.352700e-01 +7.260742e-01 -2.362061e-01 6.459961e-01 4.853542e-01 1.729736e-01 9.716797e-01 1.610107e-01 2.221819e-02 -6.655273e-01 -5.180359e-03 7.465820e-01 -3.371474e-01 +7.304688e-01 -2.312012e-01 6.425781e-01 4.860223e-01 1.729736e-01 9.731445e-01 1.534424e-01 2.280738e-02 -6.606445e-01 -9.088516e-04 7.504883e-01 -3.391138e-01 +7.333984e-01 -2.307129e-01 6.396484e-01 4.870930e-01 1.728516e-01 9.731445e-01 1.528320e-01 2.353175e-02 -6.577148e-01 -1.466751e-03 7.534180e-01 -3.412916e-01 +7.358398e-01 -2.286377e-01 6.372070e-01 4.887289e-01 1.723633e-01 9.736328e-01 1.502686e-01 2.435179e-02 -6.547852e-01 -7.486343e-04 7.558594e-01 -3.437630e-01 +7.387695e-01 -2.274170e-01 6.342773e-01 4.909241e-01 1.726074e-01 9.736328e-01 1.481934e-01 2.523415e-02 -6.513672e-01 4.881620e-05 7.587891e-01 -3.465097e-01 +7.534180e-01 -2.220459e-01 6.186523e-01 4.932588e-01 1.715088e-01 9.750977e-01 1.409912e-01 2.606074e-02 -6.347656e-01 -1.256466e-04 7.729492e-01 -3.492149e-01 +7.543945e-01 -2.221680e-01 6.176758e-01 4.949560e-01 1.722412e-01 9.750977e-01 1.403809e-01 2.661970e-02 -6.333008e-01 5.607605e-04 7.739258e-01 -3.512419e-01 +7.553711e-01 -2.213135e-01 6.166992e-01 4.957172e-01 1.721191e-01 9.750977e-01 1.390381e-01 2.684634e-02 -6.323242e-01 1.159668e-03 7.749023e-01 -3.523687e-01 +7.553711e-01 -2.214355e-01 6.166992e-01 4.956375e-01 1.721191e-01 9.750977e-01 1.394043e-01 2.677813e-02 -6.323242e-01 7.581711e-04 7.749023e-01 -3.527351e-01 +7.534180e-01 -2.232666e-01 6.181641e-01 4.950098e-01 1.726074e-01 9.746094e-01 1.416016e-01 2.652974e-02 -6.342773e-01 4.100800e-05 7.729492e-01 -3.525390e-01 +7.539062e-01 -2.227783e-01 6.181641e-01 4.941714e-01 1.716309e-01 9.750977e-01 1.420898e-01 2.624837e-02 -6.342773e-01 -1.063347e-03 7.729492e-01 -3.519519e-01 +7.529297e-01 -2.225342e-01 6.196289e-01 4.933681e-01 1.711426e-01 9.750977e-01 1.423340e-01 2.604116e-02 -6.357422e-01 -1.109123e-03 7.719727e-01 -3.512011e-01 +7.524414e-01 -2.222900e-01 6.196289e-01 4.927704e-01 1.708984e-01 9.750977e-01 1.422119e-01 2.595259e-02 -6.357422e-01 -1.128197e-03 7.719727e-01 -3.505194e-01 +7.534180e-01 -2.220459e-01 6.186523e-01 4.924189e-01 1.706543e-01 9.750977e-01 1.419678e-01 2.597104e-02 -6.347656e-01 -1.372337e-03 7.724609e-01 -3.500356e-01 +7.524414e-01 -2.214355e-01 6.206055e-01 4.922456e-01 1.705322e-01 9.750977e-01 1.412354e-01 2.605168e-02 -6.362305e-01 -5.078316e-04 7.714844e-01 -3.497700e-01 +7.524414e-01 -2.204590e-01 6.206055e-01 4.921620e-01 1.700439e-01 9.755859e-01 1.402588e-01 2.612223e-02 -6.362305e-01 7.432699e-05 7.714844e-01 -3.496799e-01 +7.529297e-01 -2.203369e-01 6.201172e-01 4.920409e-01 1.699219e-01 9.755859e-01 1.402588e-01 2.612086e-02 -6.357422e-01 -2.399683e-04 7.714844e-01 -3.497033e-01 +7.529297e-01 -2.203369e-01 6.201172e-01 4.917069e-01 1.697998e-01 9.755859e-01 1.403809e-01 2.600883e-02 -6.357422e-01 -3.745556e-04 7.719727e-01 -3.497737e-01 +7.534180e-01 -2.204590e-01 6.196289e-01 4.909908e-01 1.696777e-01 9.755859e-01 1.407471e-01 2.575765e-02 -6.352539e-01 -9.050369e-04 7.724609e-01 -3.498209e-01 +7.548828e-01 -2.194824e-01 6.181641e-01 4.898762e-01 1.694336e-01 9.755859e-01 1.395264e-01 2.537417e-02 -6.337891e-01 -5.130768e-04 7.734375e-01 -3.498076e-01 +7.387695e-01 -2.264404e-01 6.342773e-01 4.887044e-01 1.663818e-01 9.741211e-01 1.539307e-01 2.499299e-02 -6.528320e-01 -8.186340e-03 7.573242e-01 -3.497360e-01 +7.509766e-01 -2.213135e-01 6.220703e-01 4.881077e-01 1.695557e-01 9.750977e-01 1.423340e-01 2.488735e-02 -6.376953e-01 -1.484871e-03 7.700195e-01 -3.496602e-01 +7.431641e-01 -2.246094e-01 6.298828e-01 4.880130e-01 1.683350e-01 9.746094e-01 1.488037e-01 2.495244e-02 -6.474609e-01 -4.554749e-03 7.622070e-01 -3.496630e-01 +7.446289e-01 -2.236328e-01 6.289062e-01 4.884165e-01 1.690674e-01 9.746094e-01 1.463623e-01 2.513752e-02 -6.455078e-01 -2.653122e-03 7.636719e-01 -3.497473e-01 +7.529297e-01 -2.203369e-01 6.201172e-01 4.890258e-01 1.690674e-01 9.755859e-01 1.412354e-01 2.531414e-02 -6.362305e-01 -1.447678e-03 7.714844e-01 -3.498885e-01 +7.519531e-01 -2.198486e-01 6.210938e-01 4.893309e-01 1.685791e-01 9.755859e-01 1.411133e-01 2.534895e-02 -6.372070e-01 -1.368523e-03 7.709961e-01 -3.500759e-01 +7.441406e-01 -2.214355e-01 6.303711e-01 4.893427e-01 1.672363e-01 9.750977e-01 1.452637e-01 2.528646e-02 -6.464844e-01 -2.738953e-03 7.626953e-01 -3.502671e-01 +7.412109e-01 -2.227783e-01 6.328125e-01 4.894667e-01 1.666260e-01 9.750977e-01 1.478271e-01 2.529816e-02 -6.499023e-01 -4.131317e-03 7.597656e-01 -3.504178e-01 +7.529297e-01 -2.196045e-01 6.206055e-01 4.896569e-01 1.688232e-01 9.755859e-01 1.403809e-01 2.538927e-02 -6.362305e-01 -8.525848e-04 7.714844e-01 -3.505319e-01 +7.495117e-01 -2.197266e-01 6.240234e-01 4.892611e-01 1.688232e-01 9.755859e-01 1.407471e-01 2.527503e-02 -6.401367e-01 -1.939535e-04 7.685547e-01 -3.506750e-01 +7.490234e-01 -2.202148e-01 6.250000e-01 4.879594e-01 1.687012e-01 9.755859e-01 1.414795e-01 2.472993e-02 -6.406250e-01 -5.693436e-04 7.675781e-01 -3.508408e-01 +7.416992e-01 -2.227783e-01 6.323242e-01 4.858066e-01 1.672363e-01 9.750977e-01 1.472168e-01 2.371686e-02 -6.494141e-01 -3.438950e-03 7.602539e-01 -3.509538e-01 +7.348633e-01 -2.259521e-01 6.391602e-01 4.832903e-01 1.660156e-01 9.741211e-01 1.534424e-01 2.242854e-02 -6.572266e-01 -6.626129e-03 7.534180e-01 -3.508959e-01 +7.304688e-01 -2.286377e-01 6.435547e-01 4.809945e-01 1.666260e-01 9.736328e-01 1.567383e-01 2.115078e-02 -6.625977e-01 -7.202148e-03 7.490234e-01 -3.506138e-01 +7.260742e-01 -2.336426e-01 6.469727e-01 4.793386e-01 1.658936e-01 9.721680e-01 1.649170e-01 2.015367e-02 -6.674805e-01 -1.245117e-02 7.446289e-01 -3.501612e-01 +7.255859e-01 -2.309570e-01 6.479492e-01 4.785476e-01 1.658936e-01 9.731445e-01 1.610107e-01 1.967606e-02 -6.679688e-01 -9.307861e-03 7.441406e-01 -3.497132e-01 +7.231445e-01 -2.344971e-01 6.494141e-01 4.786866e-01 1.691895e-01 9.721680e-01 1.625977e-01 1.973077e-02 -6.694336e-01 -7.778168e-03 7.426758e-01 -3.493968e-01 +7.304688e-01 -2.314453e-01 6.425781e-01 4.796008e-01 1.682129e-01 9.726562e-01 1.590576e-01 2.023692e-02 -6.621094e-01 -8.079529e-03 7.495117e-01 -3.492872e-01 +7.363281e-01 -2.220459e-01 6.391602e-01 4.807829e-01 1.658936e-01 9.750977e-01 1.477051e-01 2.099294e-02 -6.562500e-01 -2.683640e-03 7.548828e-01 -3.493380e-01 +7.377930e-01 -2.205811e-01 6.381836e-01 4.818152e-01 1.661377e-01 9.755859e-01 1.450195e-01 2.168489e-02 -6.542969e-01 -9.298325e-04 7.563477e-01 -3.494183e-01 +7.397461e-01 -2.203369e-01 6.357422e-01 4.825812e-01 1.658936e-01 9.755859e-01 1.450195e-01 2.216799e-02 -6.518555e-01 -1.802444e-03 7.583008e-01 -3.494092e-01 +7.343750e-01 -2.207031e-01 6.416016e-01 4.831719e-01 1.656494e-01 9.755859e-01 1.457520e-01 2.244807e-02 -6.582031e-01 -7.762909e-04 7.529297e-01 -3.492314e-01 +7.475586e-01 -2.148438e-01 6.284180e-01 4.836701e-01 1.673584e-01 9.765625e-01 1.347656e-01 2.248721e-02 -6.425781e-01 4.402161e-03 7.661133e-01 -3.488959e-01 +7.465820e-01 -2.150879e-01 6.298828e-01 4.837827e-01 1.658936e-01 9.765625e-01 1.368408e-01 2.214289e-02 -6.445312e-01 2.368927e-03 7.646484e-01 -3.484042e-01 +7.431641e-01 -2.163086e-01 6.328125e-01 4.835241e-01 1.657715e-01 9.765625e-01 1.389160e-01 2.152711e-02 -6.479492e-01 1.678467e-03 7.617188e-01 -3.477936e-01 +7.441406e-01 -2.135010e-01 6.328125e-01 4.831516e-01 1.650391e-01 9.770508e-01 1.353760e-01 2.085027e-02 -6.474609e-01 3.768921e-03 7.622070e-01 -3.471519e-01 +7.485352e-01 -2.106934e-01 6.289062e-01 4.827603e-01 1.651611e-01 9.775391e-01 1.308594e-01 2.021725e-02 -6.425781e-01 5.920410e-03 7.666016e-01 -3.465223e-01 +7.480469e-01 -2.103271e-01 6.293945e-01 4.823418e-01 1.641846e-01 9.775391e-01 1.314697e-01 1.967998e-02 -6.430664e-01 5.035400e-03 7.656250e-01 -3.458756e-01 +7.490234e-01 -2.059326e-01 6.298828e-01 4.819987e-01 1.635742e-01 9.785156e-01 1.253662e-01 1.932058e-02 -6.420898e-01 9.078979e-03 7.666016e-01 -3.452079e-01 +7.504883e-01 -2.047119e-01 6.279297e-01 4.818415e-01 1.646729e-01 9.785156e-01 1.221313e-01 1.911694e-02 -6.396484e-01 1.177979e-02 7.685547e-01 -3.445635e-01 +7.534180e-01 -2.021484e-01 6.254883e-01 4.819276e-01 1.651611e-01 9.790039e-01 1.175537e-01 1.896786e-02 -6.362305e-01 1.473999e-02 7.709961e-01 -3.439468e-01 +7.563477e-01 -1.986084e-01 6.230469e-01 4.822498e-01 1.630859e-01 9.799805e-01 1.142578e-01 1.877895e-02 -6.333008e-01 1.523590e-02 7.739258e-01 -3.432977e-01 +7.602539e-01 -1.951904e-01 6.196289e-01 4.828165e-01 1.619873e-01 9.804688e-01 1.101074e-01 1.854035e-02 -6.293945e-01 1.667786e-02 7.768555e-01 -3.425455e-01 +7.631836e-01 -1.910400e-01 6.171875e-01 4.836204e-01 1.590576e-01 9.814453e-01 1.071777e-01 1.829696e-02 -6.264648e-01 1.638794e-02 7.792969e-01 -3.416564e-01 +7.675781e-01 -1.884766e-01 6.127930e-01 4.846469e-01 1.585693e-01 9.819336e-01 1.034546e-01 1.812558e-02 -6.210938e-01 1.776123e-02 7.836914e-01 -3.406030e-01 +7.709961e-01 -1.862793e-01 6.088867e-01 4.858396e-01 1.572266e-01 9.824219e-01 1.015625e-01 1.805813e-02 -6.171875e-01 1.744080e-02 7.866211e-01 -3.394274e-01 +7.763672e-01 -1.835938e-01 6.030273e-01 4.871154e-01 1.569824e-01 9.829102e-01 9.710693e-02 1.808752e-02 -6.103516e-01 1.927185e-02 7.919922e-01 -3.381976e-01 +7.792969e-01 -1.839600e-01 5.986328e-01 4.883161e-01 1.584473e-01 9.829102e-01 9.576416e-02 1.813405e-02 -6.059570e-01 2.018738e-02 7.954102e-01 -3.370036e-01 +7.827148e-01 -1.822510e-01 5.952148e-01 4.893759e-01 1.572266e-01 9.829102e-01 9.436035e-02 1.812912e-02 -6.020508e-01 1.963806e-02 7.983398e-01 -3.358939e-01 +7.856445e-01 -1.817627e-01 5.913086e-01 4.902824e-01 1.577148e-01 9.829102e-01 9.265137e-02 1.806411e-02 -5.981445e-01 2.041626e-02 8.012695e-01 -3.348736e-01 +7.885742e-01 -1.804199e-01 5.878906e-01 4.910507e-01 1.571045e-01 9.833984e-01 9.094238e-02 1.794708e-02 -5.947266e-01 2.067566e-02 8.037109e-01 -3.339497e-01 +7.915039e-01 -1.791992e-01 5.839844e-01 4.916993e-01 1.566162e-01 9.833984e-01 8.947754e-02 1.780250e-02 -5.908203e-01 2.064514e-02 8.066406e-01 -3.331127e-01 +7.939453e-01 -1.779785e-01 5.815430e-01 4.922191e-01 1.560059e-01 9.838867e-01 8.813477e-02 1.765095e-02 -5.878906e-01 2.078247e-02 8.085938e-01 -3.323590e-01 +7.958984e-01 -1.765137e-01 5.791016e-01 4.926174e-01 1.551514e-01 9.838867e-01 8.673096e-02 1.750935e-02 -5.849609e-01 2.085876e-02 8.105469e-01 -3.316972e-01 +7.978516e-01 -1.759033e-01 5.766602e-01 4.929391e-01 1.552734e-01 9.843750e-01 8.538818e-02 1.738189e-02 -5.825195e-01 2.143860e-02 8.125000e-01 -3.311203e-01 +7.993164e-01 -1.751709e-01 5.747070e-01 4.932342e-01 1.545410e-01 9.843750e-01 8.502197e-02 1.726073e-02 -5.805664e-01 2.082825e-02 8.139648e-01 -3.305910e-01 +8.012695e-01 -1.749268e-01 5.722656e-01 4.935406e-01 1.545410e-01 9.843750e-01 8.459473e-02 1.714358e-02 -5.781250e-01 2.064514e-02 8.159180e-01 -3.300724e-01 +8.027344e-01 -1.743164e-01 5.703125e-01 4.938647e-01 1.542969e-01 9.843750e-01 8.374023e-02 1.701874e-02 -5.756836e-01 2.072144e-02 8.173828e-01 -3.295529e-01 +8.046875e-01 -1.734619e-01 5.678711e-01 4.941936e-01 1.535645e-01 9.848633e-01 8.300781e-02 1.688152e-02 -5.737305e-01 2.043152e-02 8.188477e-01 -3.290349e-01 +8.066406e-01 -1.724854e-01 5.654297e-01 4.944859e-01 1.530762e-01 9.848633e-01 8.203125e-02 1.673603e-02 -5.712891e-01 2.037048e-02 8.208008e-01 -3.285140e-01 +8.085938e-01 -1.719971e-01 5.629883e-01 4.946806e-01 1.528320e-01 9.848633e-01 8.142090e-02 1.657624e-02 -5.683594e-01 2.017212e-02 8.227539e-01 -3.280041e-01 +8.100586e-01 -1.715088e-01 5.610352e-01 4.947142e-01 1.524658e-01 9.848633e-01 8.093262e-02 1.639278e-02 -5.664062e-01 2.000427e-02 8.237305e-01 -3.275358e-01 +8.105469e-01 -1.713867e-01 5.600586e-01 4.945593e-01 1.520996e-01 9.848633e-01 8.117676e-02 1.618780e-02 -5.654297e-01 1.940918e-02 8.242188e-01 -3.271184e-01 +8.110352e-01 -1.705322e-01 5.595703e-01 4.942546e-01 1.517334e-01 9.853516e-01 8.050537e-02 1.596873e-02 -5.649414e-01 1.960754e-02 8.247070e-01 -3.267467e-01 +8.110352e-01 -1.705322e-01 5.595703e-01 4.938658e-01 1.513672e-01 9.853516e-01 8.099365e-02 1.574762e-02 -5.649414e-01 1.893616e-02 8.247070e-01 -3.263935e-01 +8.115234e-01 -1.697998e-01 5.585938e-01 4.934632e-01 1.507568e-01 9.853516e-01 8.044434e-02 1.554289e-02 -5.644531e-01 1.895142e-02 8.251953e-01 -3.260182e-01 +8.125000e-01 -1.693115e-01 5.576172e-01 4.930895e-01 1.506348e-01 9.853516e-01 7.971191e-02 1.535807e-02 -5.629883e-01 1.921082e-02 8.261719e-01 -3.256204e-01 +8.129883e-01 -1.689453e-01 5.566406e-01 4.927517e-01 1.502686e-01 9.853516e-01 7.965088e-02 1.518611e-02 -5.620117e-01 1.889038e-02 8.266602e-01 -3.252642e-01 +8.134766e-01 -1.688232e-01 5.566406e-01 4.924491e-01 1.502686e-01 9.853516e-01 7.928467e-02 1.502597e-02 -5.620117e-01 1.919556e-02 8.271484e-01 -3.250274e-01 +8.139648e-01 -1.688232e-01 5.556641e-01 4.921962e-01 1.502686e-01 9.853516e-01 7.916260e-02 1.487794e-02 -5.610352e-01 1.907349e-02 8.276367e-01 -3.249375e-01 +8.139648e-01 -1.693115e-01 5.556641e-01 4.920276e-01 1.501465e-01 9.853516e-01 8.020020e-02 1.475044e-02 -5.610352e-01 1.818848e-02 8.276367e-01 -3.249721e-01 +8.139648e-01 -1.693115e-01 5.551758e-01 4.919553e-01 1.502686e-01 9.853516e-01 8.013916e-02 1.465135e-02 -5.610352e-01 1.818848e-02 8.276367e-01 -3.250518e-01 +8.154297e-01 -1.702881e-01 5.537109e-01 4.919469e-01 1.507568e-01 9.853516e-01 8.105469e-02 1.457627e-02 -5.590820e-01 1.737976e-02 8.286133e-01 -3.250896e-01 +8.144531e-01 -1.691895e-01 5.551758e-01 4.919381e-01 1.499023e-01 9.853516e-01 8.032227e-02 1.452796e-02 -5.610352e-01 1.782227e-02 8.276367e-01 -3.250337e-01 +8.144531e-01 -1.687012e-01 5.551758e-01 4.919209e-01 1.496582e-01 9.853516e-01 7.989502e-02 1.451479e-02 -5.605469e-01 1.799011e-02 8.281250e-01 -3.248926e-01 +8.144531e-01 -1.687012e-01 5.556641e-01 4.919195e-01 1.497803e-01 9.853516e-01 7.971191e-02 1.454044e-02 -5.610352e-01 1.832581e-02 8.276367e-01 -3.246963e-01 +8.149414e-01 -1.678467e-01 5.546875e-01 4.919564e-01 1.495361e-01 9.858398e-01 7.855225e-02 1.459898e-02 -5.600586e-01 1.895142e-02 8.286133e-01 -3.244701e-01 +8.149414e-01 -1.676025e-01 5.551758e-01 4.920254e-01 1.495361e-01 9.858398e-01 7.806396e-02 1.467753e-02 -5.600586e-01 1.937866e-02 8.281250e-01 -3.242403e-01 +8.149414e-01 -1.672363e-01 5.546875e-01 4.921471e-01 1.492920e-01 9.858398e-01 7.775879e-02 1.477103e-02 -5.595703e-01 1.939392e-02 8.286133e-01 -3.240307e-01 +8.154297e-01 -1.673584e-01 5.541992e-01 4.923254e-01 1.500244e-01 9.858398e-01 7.696533e-02 1.486341e-02 -5.590820e-01 2.035522e-02 8.291016e-01 -3.238587e-01 +8.159180e-01 -1.671143e-01 5.532227e-01 4.925544e-01 1.495361e-01 9.858398e-01 7.720947e-02 1.492975e-02 -5.585938e-01 1.974487e-02 8.291016e-01 -3.237510e-01 +8.178711e-01 -1.691895e-01 5.498047e-01 4.927962e-01 1.519775e-01 9.853516e-01 7.714844e-02 1.495209e-02 -5.551758e-01 2.046204e-02 8.315430e-01 -3.236707e-01 +8.193359e-01 -1.666260e-01 5.483398e-01 4.929578e-01 1.488037e-01 9.858398e-01 7.720947e-02 1.490700e-02 -5.532227e-01 1.826477e-02 8.325195e-01 -3.235574e-01 +8.168945e-01 -1.667480e-01 5.517578e-01 4.930092e-01 1.492920e-01 9.858398e-01 7.690430e-02 1.483301e-02 -5.566406e-01 1.957703e-02 8.305664e-01 -3.234195e-01 +8.173828e-01 -1.665039e-01 5.517578e-01 4.930061e-01 1.489258e-01 9.858398e-01 7.696533e-02 1.475583e-02 -5.566406e-01 1.930237e-02 8.305664e-01 -3.233192e-01 +8.178711e-01 -1.665039e-01 5.507812e-01 4.929834e-01 1.488037e-01 9.858398e-01 7.714844e-02 1.469062e-02 -5.561523e-01 1.890564e-02 8.310547e-01 -3.232807e-01 +8.183594e-01 -1.669922e-01 5.502930e-01 4.929537e-01 1.491699e-01 9.858398e-01 7.733154e-02 1.464290e-02 -5.551758e-01 1.882935e-02 8.315430e-01 -3.233088e-01 +8.178711e-01 -1.671143e-01 5.502930e-01 4.929402e-01 1.491699e-01 9.858398e-01 7.751465e-02 1.460690e-02 -5.556641e-01 1.870728e-02 8.310547e-01 -3.233849e-01 +8.183594e-01 -1.671143e-01 5.498047e-01 4.929882e-01 1.490479e-01 9.858398e-01 7.775879e-02 1.458071e-02 -5.551758e-01 1.834106e-02 8.315430e-01 -3.234428e-01 +8.188477e-01 -1.673584e-01 5.488281e-01 4.931011e-01 1.495361e-01 9.858398e-01 7.739258e-02 1.455979e-02 -5.541992e-01 1.876831e-02 8.320312e-01 -3.234328e-01 +8.188477e-01 -1.668701e-01 5.488281e-01 4.932460e-01 1.486816e-01 9.858398e-01 7.781982e-02 1.453510e-02 -5.541992e-01 1.791382e-02 8.320312e-01 -3.233301e-01 +8.193359e-01 -1.665039e-01 5.488281e-01 4.934052e-01 1.484375e-01 9.858398e-01 7.751465e-02 1.452515e-02 -5.537109e-01 1.791382e-02 8.325195e-01 -3.231516e-01 +8.198242e-01 -1.663818e-01 5.478516e-01 4.935724e-01 1.483154e-01 9.858398e-01 7.751465e-02 1.454262e-02 -5.532227e-01 1.771545e-02 8.330078e-01 -3.229417e-01 +8.198242e-01 -1.657715e-01 5.483398e-01 4.937595e-01 1.480713e-01 9.858398e-01 7.678223e-02 1.458882e-02 -5.532227e-01 1.824951e-02 8.325195e-01 -3.227509e-01 +8.203125e-01 -1.658936e-01 5.473633e-01 4.939776e-01 1.481934e-01 9.858398e-01 7.666016e-02 1.464844e-02 -5.527344e-01 1.824951e-02 8.334961e-01 -3.226283e-01 +8.208008e-01 -1.656494e-01 5.468750e-01 4.941906e-01 1.483154e-01 9.858398e-01 7.623291e-02 1.469390e-02 -5.517578e-01 1.850891e-02 8.339844e-01 -3.225526e-01 +8.208008e-01 -1.654053e-01 5.468750e-01 4.943555e-01 1.480713e-01 9.858398e-01 7.604980e-02 1.470076e-02 -5.517578e-01 1.847839e-02 8.339844e-01 -3.224830e-01 +8.212891e-01 -1.654053e-01 5.458984e-01 4.944665e-01 1.478271e-01 9.858398e-01 7.641602e-02 1.467057e-02 -5.512695e-01 1.795959e-02 8.339844e-01 -3.223941e-01 +8.212891e-01 -1.654053e-01 5.458984e-01 4.945433e-01 1.477051e-01 9.858398e-01 7.659912e-02 1.462719e-02 -5.507812e-01 1.771545e-02 8.344727e-01 -3.222732e-01 +8.212891e-01 -1.651611e-01 5.458984e-01 4.946176e-01 1.474609e-01 9.863281e-01 7.653809e-02 1.459141e-02 -5.507812e-01 1.762390e-02 8.344727e-01 -3.221239e-01 +8.222656e-01 -1.646729e-01 5.449219e-01 4.947074e-01 1.472168e-01 9.863281e-01 7.586670e-02 1.456863e-02 -5.498047e-01 1.786804e-02 8.349609e-01 -3.219540e-01 +8.222656e-01 -1.640625e-01 5.444336e-01 4.948089e-01 1.472168e-01 9.863281e-01 7.464600e-02 1.454582e-02 -5.493164e-01 1.878357e-02 8.354492e-01 -3.217809e-01 +8.232422e-01 -1.640625e-01 5.434570e-01 4.949150e-01 1.468506e-01 9.863281e-01 7.525635e-02 1.450603e-02 -5.483398e-01 1.789856e-02 8.359375e-01 -3.216289e-01 +8.237305e-01 -1.640625e-01 5.429688e-01 4.949906e-01 1.469727e-01 9.863281e-01 7.513428e-02 1.446044e-02 -5.478516e-01 1.789856e-02 8.364258e-01 -3.215308e-01 +8.237305e-01 -1.645508e-01 5.424805e-01 4.950141e-01 1.468506e-01 9.863281e-01 7.617188e-02 1.442231e-02 -5.473633e-01 1.693726e-02 8.364258e-01 -3.214965e-01 +8.237305e-01 -1.646729e-01 5.429688e-01 4.949862e-01 1.470947e-01 9.863281e-01 7.598877e-02 1.441156e-02 -5.478516e-01 1.725769e-02 8.364258e-01 -3.215179e-01 +8.232422e-01 -1.651611e-01 5.429688e-01 4.949208e-01 1.475830e-01 9.863281e-01 7.617188e-02 1.442684e-02 -5.478516e-01 1.742554e-02 8.364258e-01 -3.215612e-01 +8.232422e-01 -1.652832e-01 5.429688e-01 4.948428e-01 1.477051e-01 9.863281e-01 7.629395e-02 1.445271e-02 -5.478516e-01 1.737976e-02 8.364258e-01 -3.216090e-01 +8.232422e-01 -1.655273e-01 5.429688e-01 4.947719e-01 1.480713e-01 9.858398e-01 7.629395e-02 1.447074e-02 -5.478516e-01 1.756287e-02 8.364258e-01 -3.216626e-01 +8.237305e-01 -1.651611e-01 5.424805e-01 4.947194e-01 1.474609e-01 9.863281e-01 7.629395e-02 1.447015e-02 -5.478516e-01 1.721191e-02 8.364258e-01 -3.217191e-01 +8.232422e-01 -1.652832e-01 5.429688e-01 4.946605e-01 1.477051e-01 9.858398e-01 7.623291e-02 1.445151e-02 -5.478516e-01 1.739502e-02 8.364258e-01 -3.217413e-01 +8.232422e-01 -1.654053e-01 5.434570e-01 4.945937e-01 1.478271e-01 9.858398e-01 7.623291e-02 1.441382e-02 -5.483398e-01 1.751709e-02 8.359375e-01 -3.217247e-01 +8.232422e-01 -1.650391e-01 5.429688e-01 4.945394e-01 1.474609e-01 9.863281e-01 7.629395e-02 1.436527e-02 -5.483398e-01 1.728821e-02 8.359375e-01 -3.216814e-01 +8.232422e-01 -1.651611e-01 5.429688e-01 4.944987e-01 1.474609e-01 9.863281e-01 7.635498e-02 1.432208e-02 -5.483398e-01 1.724243e-02 8.359375e-01 -3.216166e-01 +8.232422e-01 -1.650391e-01 5.434570e-01 4.944469e-01 1.470947e-01 9.863281e-01 7.666016e-02 1.429363e-02 -5.488281e-01 1.679993e-02 8.359375e-01 -3.215344e-01 +8.232422e-01 -1.654053e-01 5.434570e-01 4.943599e-01 1.475830e-01 9.858398e-01 7.666016e-02 1.427568e-02 -5.483398e-01 1.707458e-02 8.359375e-01 -3.214384e-01 +8.227539e-01 -1.656494e-01 5.434570e-01 4.942431e-01 1.479492e-01 9.858398e-01 7.647705e-02 1.424781e-02 -5.483398e-01 1.745605e-02 8.359375e-01 -3.213580e-01 +8.227539e-01 -1.652832e-01 5.439453e-01 4.941359e-01 1.474609e-01 9.863281e-01 7.659912e-02 1.419939e-02 -5.488281e-01 1.715088e-02 8.354492e-01 -3.213257e-01 +8.222656e-01 -1.651611e-01 5.444336e-01 4.940428e-01 1.472168e-01 9.863281e-01 7.684326e-02 1.413978e-02 -5.498047e-01 1.693726e-02 8.354492e-01 -3.213380e-01 +8.222656e-01 -1.651611e-01 5.449219e-01 4.939477e-01 1.470947e-01 9.863281e-01 7.696533e-02 1.407896e-02 -5.498047e-01 1.689148e-02 8.349609e-01 -3.213671e-01 +8.217773e-01 -1.651611e-01 5.454102e-01 4.938238e-01 1.469727e-01 9.863281e-01 7.714844e-02 1.402240e-02 -5.502930e-01 1.673889e-02 8.349609e-01 -3.214022e-01 +8.212891e-01 -1.650391e-01 5.458984e-01 4.936609e-01 1.468506e-01 9.863281e-01 7.727051e-02 1.397160e-02 -5.507812e-01 1.667786e-02 8.344727e-01 -3.214367e-01 +8.208008e-01 -1.654053e-01 5.463867e-01 4.934677e-01 1.470947e-01 9.863281e-01 7.745361e-02 1.392465e-02 -5.517578e-01 1.681519e-02 8.339844e-01 -3.214701e-01 diff --git a/data/vert/2011_F_EuMeT2wBo_00014_00001.npy b/data/vert/2011_F_EuMeT2wBo_00014_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..98ef031004e45501438a42d209e36f4405b95d39 --- /dev/null +++ b/data/vert/2011_F_EuMeT2wBo_00014_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9e7891daddfdcef364f24e54b3d7b6de945fcb139a281710e888cba181042412 +size 2067459 diff --git a/data/vert/2011_KAeAqaA0Llg_00005_00001.npy b/data/vert/2011_KAeAqaA0Llg_00005_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..ead7f3049ac3e77d117cc16839354670660985a7 --- /dev/null +++ b/data/vert/2011_KAeAqaA0Llg_00005_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d57de6acea24268cfc45bafa0d552f514e834981e708a15eb313576e92d30a4a +size 4878579 diff --git a/data/vert/2011_MCkKihQrNA4_00014_00000.npy b/data/vert/2011_MCkKihQrNA4_00014_00000.npy new file mode 100644 index 0000000000000000000000000000000000000000..a05c523431f0736d173de650cff080fbefdfd605 --- /dev/null +++ b/data/vert/2011_MCkKihQrNA4_00014_00000.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9e8e2524a66166fdd0b14a585b134d7e87c16db5fa36a865bbcd15b771d80ac2 +size 17611299 diff --git a/data/vert_raw/2011_F_EuMeT2wBo_00014_00001.npy b/data/vert_raw/2011_F_EuMeT2wBo_00014_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..93df08c461d57ac8cbd84a36580e123e3ef42d4b --- /dev/null +++ b/data/vert_raw/2011_F_EuMeT2wBo_00014_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:36236b578dcdb16ae8e19255024be06a4593b7aed30e21bce6590e7ebf371c40 +size 2067459 diff --git a/data/vert_raw/2011_KAeAqaA0Llg_00005_00001.npy b/data/vert_raw/2011_KAeAqaA0Llg_00005_00001.npy new file mode 100644 index 0000000000000000000000000000000000000000..1977ff3879aef8ab8a69662a7edab51dbad0bac0 --- /dev/null +++ b/data/vert_raw/2011_KAeAqaA0Llg_00005_00001.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:df501785263063d4e871ca0a62a99eebc0b38cf642119ddb37d605f199305c1b +size 4878579 diff --git a/data/vert_raw/2011_MCkKihQrNA4_00014_00000.npy b/data/vert_raw/2011_MCkKihQrNA4_00014_00000.npy new file mode 100644 index 0000000000000000000000000000000000000000..d8298a9b2ca226db23628b2d4607e3a23da6f713 --- /dev/null +++ b/data/vert_raw/2011_MCkKihQrNA4_00014_00000.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:632036e12d9801b34c95686c5845b9077e5b844b55d7a6e6480d1ef3f442ad02 +size 17611299 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..d8eae889c238ebf646cffd8dd126ddf53b8e8b1b --- /dev/null +++ b/requirements.txt @@ -0,0 +1,20 @@ +einops +ema_pytorch +evo +ftfy +fvcore +git+https://github.com/openai/CLIP.git +gradio +gradio_rerun +h5py +hydra-core +iopath +lightning +regex +rerun-sdk==0.15.1 +scikit-learn +scipy +torchtyping +tqdm +trimesh +wandb \ No newline at end of file diff --git a/src/datasets/datamodule.py b/src/datasets/datamodule.py new file mode 100644 index 0000000000000000000000000000000000000000..6fd3f637bd6d7fd23ac8d0fc3a5e45bc38a1de7a --- /dev/null +++ b/src/datasets/datamodule.py @@ -0,0 +1,68 @@ +from lightning import LightningDataModule +from torch.utils.data import Dataset, DataLoader + + +class Datamodule(LightningDataModule): + def __init__( + self, + train_dataset: Dataset, + eval_dataset: Dataset, + batch_train_size: int, + num_workers: int, + eval_batch_size: int = None, + ): + super().__init__() + + self.train_dataset = train_dataset + self.eval_dataset = eval_dataset + + self.batch_train_size = batch_train_size + self.eval_batch_size = ( + eval_batch_size if eval_batch_size is not None else batch_train_size + ) + + self.num_workers = num_workers + + def train_dataloader(self) -> DataLoader: + """Load train set loader.""" + persistent_workers = True if self.num_workers > 0 else False + + dataloader = DataLoader( + self.train_dataset, + batch_size=self.batch_train_size, + num_workers=self.num_workers, + pin_memory=True, + persistent_workers=persistent_workers, + ) + return dataloader + + def val_dataloader(self) -> DataLoader: + """Load val set loader.""" + persistent_workers = True if self.num_workers > 0 else False + + dataloader = DataLoader( + self.eval_dataset, + batch_size=self.eval_batch_size, + num_workers=self.num_workers, + pin_memory=True, + persistent_workers=persistent_workers, + ) + return dataloader + + def predict_dataloader(self) -> DataLoader: + """Load predict set loader.""" + dataloader = DataLoader( + self.eval_dataset, + batch_size=self.eval_batch_size, + num_workers=self.num_workers, + ) + return dataloader + + def test_dataloader(self) -> DataLoader: + """Load test set loader.""" + dataloader = DataLoader( + self.eval_dataset, + batch_size=self.eval_batch_size, + num_workers=self.num_workers, + ) + return dataloader diff --git a/src/datasets/modalities/caption_dataset.py b/src/datasets/modalities/caption_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..1275c29bc7d4e8b76ca8eb39489d3c6c8d889946 --- /dev/null +++ b/src/datasets/modalities/caption_dataset.py @@ -0,0 +1,107 @@ +from collections import Counter +from pathlib import Path + +import numpy as np +import torch +from torch.utils.data import Dataset +import torch.nn.functional as F + +from utils.file_utils import load_txt + + +class CaptionDataset(Dataset): + def __init__( + self, + name: str, + dataset_dir: str, + num_cams: int, + num_feats: int, + num_segments: int, + sequential: bool, + **kwargs, + ): + super().__init__() + self.modality = name + self.name = name + self.dataset_dir = Path(dataset_dir) + # Set data paths (segments, captions, etc...) + for name, field in kwargs.items(): + if isinstance(field, str): + field = Path(field) + if name == "feat_caption_dir": + field = field / "seq" if sequential else field / "token" + setattr(self, name, field) + + self.filenames = None + + self.clip_seq_dir = self.dataset_dir / "caption_clip" / "seq" # For CLaTrScore + self.num_cams = num_cams + self.num_feats = num_feats + self.num_segments = num_segments + self.sequential = sequential + + def __len__(self): + return len(self.filenames) + + def __getitem__(self, index): + filename = self.filenames[index] + + # Load data + if hasattr(self, "segment_dir"): + raw_segments = torch.from_numpy( + np.load((self.segment_dir / (filename + ".npy"))) + ) + padded_raw_segments = F.pad( + raw_segments, + (0, self.num_cams - len(raw_segments)), + value=self.num_segments, + ) + if hasattr(self, "raw_caption_dir"): + raw_caption = load_txt(self.raw_caption_dir / (filename + ".txt")) + if hasattr(self, "feat_caption_dir"): + feat_caption = torch.from_numpy( + np.load((self.feat_caption_dir / (filename + ".npy"))) + ) + if self.sequential: + feat_caption = F.pad( + feat_caption.to(torch.float32), + (0, 0, 0, self.max_feat_length - feat_caption.shape[0]), + ) + + if self.modality == "caption": + raw_data = {"caption": raw_caption, "segments": padded_raw_segments} + feat_data = ( + feat_caption.permute(1, 0) if feat_caption.dim() == 2 else feat_caption + ) + elif self.modality == "segments": + raw_data = {"segments": padded_raw_segments} + # Shift by one for padding + feat_data = F.one_hot( + padded_raw_segments, num_classes=self.num_segments + 1 + ).to(torch.float32) + if self.sequential: + feat_data = feat_data.permute(1, 0) + else: + feat_data = feat_data.reshape(-1) + elif self.modality == "class": + raw_data = {"segments": padded_raw_segments} + most_frequent_segment = Counter(raw_segments).most_common(1)[0][0] + feat_data = F.one_hot( + torch.tensor(most_frequent_segment), num_classes=self.num_segments + ).to(torch.float32) + else: + raise ValueError(f"Modality {self.modality} not supported") + + clip_seq_caption = torch.from_numpy( + np.load((self.clip_seq_dir / (filename + ".npy"))) + ) + padding_mask = torch.ones((self.max_feat_length)) + padding_mask[clip_seq_caption.shape[0] :] = 0 + clip_seq_caption = F.pad( + clip_seq_caption.to(torch.float32), + (0, 0, 0, self.max_feat_length - clip_seq_caption.shape[0]), + ) + raw_data["clip_seq_caption"] = clip_seq_caption + raw_data["clip_seq_mask"] = padding_mask + + return filename, feat_data, raw_data diff --git a/src/datasets/modalities/char_dataset.py b/src/datasets/modalities/char_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..345d25ab11be458852486c6fac3eaf25b7490439 --- /dev/null +++ b/src/datasets/modalities/char_dataset.py @@ -0,0 +1,120 @@ +from pathlib import Path + +import numpy as np +import torch +from torch.utils.data import Dataset +import torch.nn.functional as F + +# ------------------------------------------------------------------------------------- # + +num_frequencies = None + +# ------------------------------------------------------------------------------------- # + + +class CharacterDataset(Dataset): + def __init__( + self, + name: str, + dataset_dir: str, + standardize: bool, + num_feats: int, + num_cams: int, + sequential: bool, + num_frequencies: int, + min_freq: int, + max_freq: int, + load_vertices: bool, + **kwargs, + ): + super().__init__() + self.modality = "char" + self.name = name + self.dataset_dir = Path(dataset_dir) + self.traj_dir = self.dataset_dir / "traj" + self.data_dir = self.dataset_dir / self.name + self.vert_dir = self.dataset_dir / "vert_raw" + self.center_dir = self.dataset_dir / "char_raw" + + self.filenames = None + self.standardize = standardize + if self.standardize: + mean_std = kwargs["standardization"] + self.norm_mean = torch.Tensor(mean_std["norm_mean_h"])[:, None] + self.norm_std = torch.Tensor(mean_std["norm_std_h"])[:, None] + self.velocity = mean_std["velocity"] + + self.num_cams = num_cams + self.num_feats = num_feats + self.sequential = sequential + self.num_frequencies = num_frequencies + self.min_freq = min_freq + self.max_freq = max_freq + + self.load_vertices = load_vertices + + def __len__(self): + return len(self.filenames) + + def __getitem__(self, index): + filename = self.filenames[index] + + char_filename = filename + ".npy" + char_path = self.data_dir / char_filename + + raw_char_feature = torch.from_numpy(np.load((char_path))).to(torch.float32) + padding_size = self.num_cams - raw_char_feature.shape[0] + padded_raw_char_feature = F.pad( + raw_char_feature, (0, 0, 0, padding_size) + ).permute(1, 0) + + center_path = self.center_dir / char_filename # Center to offset mesh + center_offset = torch.from_numpy(np.load(center_path)[0]).to(torch.float32) + if self.load_vertices: + vert_path = self.vert_dir / char_filename + raw_verts = np.load(vert_path, allow_pickle=True)[()] + if raw_verts["vertices"] is None: + num_frames = raw_char_feature.shape[0] + verts = torch.zeros((num_frames, 6890, 3), dtype=torch.float32) + padded_verts = torch.zeros( + (self.num_cams, 6890, 3), dtype=torch.float32 + ) + faces = torch.zeros((13776, 3), dtype=torch.int16) + else: + verts = torch.from_numpy(raw_verts["vertices"]).to(torch.float32) + verts -= center_offset + padded_verts = F.pad(verts, (0, 0, 0, 0, 0, padding_size)) + faces = torch.from_numpy(raw_verts["faces"]).to(torch.int16) + + char_feature = raw_char_feature.clone() + if self.velocity: + velocity = char_feature[1:].clone() - char_feature[:-1].clone() + char_feature = torch.cat([raw_char_feature[0][None], velocity]) + + if self.standardize: + # Normalize the first frame (orgin) and the rest (velocity) separately + if len(self.norm_mean) == 6: + char_feature[0] -= self.norm_mean[:3, 0].to(raw_char_feature.device) + char_feature[0] /= self.norm_std[:3, 0].to(raw_char_feature.device) + char_feature[1:] -= self.norm_mean[3:, 0].to(raw_char_feature.device) + char_feature[1:] /= self.norm_std[3:, 0].to(raw_char_feature.device) + # Normalize all in one + else: + char_feature -= self.norm_mean[:, 0].to(raw_char_feature.device) + char_feature /= self.norm_std[:, 0].to(raw_char_feature.device) + padded_char_feature = F.pad( + char_feature, + (0, 0, 0, self.num_cams - char_feature.shape[0]), + ) + + if self.sequential: + padded_char_feature = padded_char_feature.permute(1, 0) + else: + padded_char_feature = padded_char_feature.reshape(-1) + + raw_feats = {"char_raw_feat": padded_raw_char_feature} + if self.load_vertices: + raw_feats["char_vertices"] = padded_verts + raw_feats["char_faces"] = faces + + return char_filename, padded_char_feature, raw_feats diff --git a/src/datasets/modalities/trajectory_dataset.py b/src/datasets/modalities/trajectory_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..6fea9a41d200ebb352527add29a27e5501115532 --- /dev/null +++ b/src/datasets/modalities/trajectory_dataset.py @@ -0,0 +1,152 @@ +from pathlib import Path + +from evo.tools.file_interface import read_kitti_poses_file +import numpy as np +import torch +from torch.utils.data import Dataset +from torchtyping import TensorType +import torch.nn.functional as F +from typing import Tuple + +from utils.file_utils import load_txt +from utils.rotation_utils import compute_rotation_matrix_from_ortho6d + +num_cams = None + + +# ------------------------------------------------------------------------------------- # + + +class TrajectoryDataset(Dataset): + def __init__( + self, + name: str, + set_name: str, + dataset_dir: str, + num_rawfeats: int, + num_feats: int, + num_cams: int, + standardize: bool, + **kwargs, + ): + super().__init__() + self.name = name + self.set_name = set_name + self.dataset_dir = Path(dataset_dir) + if name == "relative": + self.data_dir = self.dataset_dir / "traj_raw" + self.relative_dir = self.dataset_dir / "relative" + else: + self.data_dir = self.dataset_dir / "traj" + self.intrinsics_dir = self.dataset_dir / "intrinsics" + + self.num_rawfeats = num_rawfeats + self.num_feats = num_feats + self.num_cams = num_cams + + self.augmentation = None + self.standardize = standardize + if self.standardize: + mean_std = kwargs["standardization"] + self.norm_mean = torch.Tensor(mean_std["norm_mean"]) + self.norm_std = torch.Tensor(mean_std["norm_std"]) + self.shift_mean = torch.Tensor(mean_std["shift_mean"]) + self.shift_std = torch.Tensor(mean_std["shift_std"]) + self.velocity = mean_std["velocity"] + + # --------------------------------------------------------------------------------- # + + def set_split(self, split: str, train_rate: float = 1.0): + self.split = split + split_path = Path(self.dataset_dir) / f"{split}_split.txt" + split_traj = load_txt(split_path).split("\n") + self.filenames = sorted(split_traj) + + return self + + # --------------------------------------------------------------------------------- # + + def get_feature( + self, raw_matrix_trajectory: TensorType["num_cams", 4, 4] + ) -> TensorType[9, "num_cams"]: + matrix_trajectory = torch.clone(raw_matrix_trajectory) + + raw_trans = torch.clone(matrix_trajectory[:, :3, 3]) + if self.velocity: + velocity = raw_trans[1:] - raw_trans[:-1] + raw_trans = torch.cat([raw_trans[0][None], velocity]) + if self.standardize: + raw_trans[0] -= self.shift_mean + raw_trans[0] /= self.shift_std + raw_trans[1:] -= self.norm_mean + raw_trans[1:] /= self.norm_std + + # Compute the 6D continuous rotation + raw_rot = matrix_trajectory[:, :3, :3] + rot6d = raw_rot[:, :, :2].permute(0, 2, 1).reshape(-1, 6) + + # Stack rotation 6D and translation + rot6d_trajectory = torch.hstack([rot6d, raw_trans]).permute(1, 0) + + return rot6d_trajectory + + def get_matrix( + self, raw_rot6d_trajectory: TensorType[9, "num_cams"] + ) -> TensorType["num_cams", 4, 4]: + rot6d_trajectory = torch.clone(raw_rot6d_trajectory) + device = rot6d_trajectory.device + + num_cams = rot6d_trajectory.shape[1] + matrix_trajectory = torch.eye(4, device=device)[None].repeat(num_cams, 1, 1) + + raw_trans = rot6d_trajectory[6:].permute(1, 0) + if self.standardize: + raw_trans[0] *= self.shift_std.to(device) + raw_trans[0] += self.shift_mean.to(device) + raw_trans[1:] *= self.norm_std.to(device) + raw_trans[1:] += self.norm_mean.to(device) + if self.velocity: + raw_trans = torch.cumsum(raw_trans, dim=0) + matrix_trajectory[:, :3, 3] = raw_trans + + rot6d = rot6d_trajectory[:6].permute(1, 0) + raw_rot = compute_rotation_matrix_from_ortho6d(rot6d) + matrix_trajectory[:, :3, :3] = raw_rot + + return matrix_trajectory + + # --------------------------------------------------------------------------------- # + + def __getitem__(self, index: int) -> Tuple[str, TensorType["num_cams", 4, 4]]: + filename = self.filenames[index] + + trajectory_filename = filename + ".txt" + trajectory_path = self.data_dir / trajectory_filename + + trajectory = read_kitti_poses_file(trajectory_path) + matrix_trajectory = torch.from_numpy(np.array(trajectory.poses_se3)).to( + torch.float32 + ) + + trajectory_feature = self.get_feature(matrix_trajectory) + + padded_trajectory_feature = F.pad( + trajectory_feature, (0, self.num_cams - trajectory_feature.shape[1]) + ) + # Padding mask: 1 for valid cams, 0 for padded cams + padding_mask = torch.ones((self.num_cams)) + padding_mask[trajectory_feature.shape[1] :] = 0 + + intrinsics_filename = filename + ".npy" + intrinsics_path = self.intrinsics_dir / intrinsics_filename + intrinsics = np.load(intrinsics_path) + + return ( + trajectory_filename, + padded_trajectory_feature, + padding_mask, + intrinsics + ) + + def __len__(self): + return len(self.filenames) diff --git a/src/datasets/multimodal_dataset.py b/src/datasets/multimodal_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..a5afe81c3d0dd91a106d2950634c555372b52655 --- /dev/null +++ b/src/datasets/multimodal_dataset.py @@ -0,0 +1,88 @@ +from copy import deepcopy as dp +from pathlib import Path + +from torch.utils.data import Dataset + + +class MultimodalDataset(Dataset): + def __init__( + self, + name, + dataset_name, + dataset_dir, + trajectory, + feature_type, + num_rawfeats, + num_feats, + num_cams, + num_cond_feats, + standardization, + augmentation=None, + **modalities, + ): + self.dataset_dir = Path(dataset_dir) + self.name = name + self.dataset_name = dataset_name + self.feature_type = feature_type + self.num_rawfeats = num_rawfeats + self.num_feats = num_feats + self.num_cams = num_cams + self.trajectory_dataset = trajectory + self.standardization = standardization + self.modality_datasets = modalities + + if augmentation is not None: + self.augmentation = True + self.augmentation_rate = augmentation.rate + self.trajectory_dataset.set_augmentation(augmentation.trajectory) + if hasattr(augmentation, "modalities"): + for modality, augments in augmentation.modalities: + self.modality_datasets[modality].set_augmentation(augments) + else: + self.augmentation = False + + # --------------------------------------------------------------------------------- # + + def set_split(self, split: str, train_rate: float = 1.0): + self.split = split + + # Get trajectory split + self.trajectory_dataset = dp(self.trajectory_dataset).set_split( + split, train_rate + ) + self.root_filenames = self.trajectory_dataset.filenames + + # Get modality split + for modality_name in self.modality_datasets.keys(): + self.modality_datasets[modality_name].filenames = self.root_filenames + + self.get_feature = self.trajectory_dataset.get_feature + self.get_matrix = self.trajectory_dataset.get_matrix + + return self + + # --------------------------------------------------------------------------------- # + + def __getitem__(self, index): + traj_out = self.trajectory_dataset[index] + traj_filename, traj_feature, padding_mask, intrinsics = traj_out + + out = { + "traj_filename": traj_filename, + "traj_feat": traj_feature, + "padding_mask": padding_mask, + "intrinsics": intrinsics, + } + + for modality_name, modality_dataset in self.modality_datasets.items(): + modality_filename, modality_feature, modality_raw = modality_dataset[index] + assert traj_filename.split(".")[0] == modality_filename.split(".")[0] + out[f"{modality_name}_filename"] = modality_filename + out[f"{modality_name}_feat"] = modality_feature + out[f"{modality_name}_raw"] = modality_raw + out[f"{modality_name}_padding_mask"] = padding_mask + + return out + + def __len__(self): + return len(self.trajectory_dataset) diff --git a/src/diffuser.py b/src/diffuser.py new file mode 100644 index 0000000000000000000000000000000000000000..e30a5d1e91f5e21343a90af64bd10065d82837fe --- /dev/null +++ b/src/diffuser.py @@ -0,0 +1,221 @@ +from omegaconf.dictconfig import DictConfig +from typing import List, Tuple + +from ema_pytorch import EMA +import numpy as np +import torch +from torchtyping import TensorType +import torch.nn as nn +import lightning as L + +from utils.random_utils import StackedRandomGenerator + +# ------------------------------------------------------------------------------------- # + +batch_size, num_samples = None, None +num_feats, num_rawfeats, num_cams = None, None, None +RawTrajectory = TensorType["num_samples", "num_rawfeats", "num_cams"] + +# ------------------------------------------------------------------------------------- # + + +class Diffuser(L.LightningModule): + def __init__( + self, + network: nn.Module, + guidance_weight: float, + ema_kwargs: DictConfig, + sampling_kwargs: DictConfig, + edm2_normalization: bool, + **kwargs, + ): + super().__init__() + + # Network and EMA + self.net = network + self.ema = EMA(self.net, **ema_kwargs) + self.guidance_weight = guidance_weight + self.edm2_normalization = edm2_normalization + self.sigma_data = network.sigma_data + + # Sampling + self.num_steps = sampling_kwargs.num_steps + self.sigma_min = sampling_kwargs.sigma_min + self.sigma_max = sampling_kwargs.sigma_max + self.rho = sampling_kwargs.rho + self.S_churn = sampling_kwargs.S_churn + self.S_noise = sampling_kwargs.S_noise + self.S_min = sampling_kwargs.S_min + self.S_max = ( + sampling_kwargs.S_max + if isinstance(sampling_kwargs.S_max, float) + else float("inf") + ) + + # ---------------------------------------------------------------------------------- # + + def on_predict_start(self): + eval_dataset = self.trainer.datamodule.eval_dataset + self.modalities = list(eval_dataset.modality_datasets.keys()) + + self.get_matrix = self.trainer.datamodule.train_dataset.get_matrix + self.v_get_matrix = self.trainer.datamodule.eval_dataset.get_matrix + + def predict_step(self, batch, batch_idx): + ref_samples, mask = batch["traj_feat"], batch["padding_mask"] + + if len(self.modalities) > 0: + cond_k = [x for x in batch.keys() if "traj" not in x and "feat" in x] + cond_data = [batch[cond] for cond in cond_k] + conds = {} + for cond in cond_k: + cond_name = cond.replace("_feat", "") + if isinstance(batch[f"{cond_name}_raw"], dict): + for cond_name_, x in batch[f"{cond_name}_raw"].items(): + conds[cond_name_] = x + else: + conds[cond_name] = batch[f"{cond_name}_raw"] + batch["conds"] = conds + else: + cond_data = None + + # cf edm2 sigma_data normalization / https://arxiv.org/pdf/2312.02696.pdf + if self.edm2_normalization: + ref_samples *= self.sigma_data + _, gen_samples = self.sample(self.ema.ema_model, ref_samples, cond_data, mask) + + batch["ref_samples"] = torch.stack([self.v_get_matrix(x) for x in ref_samples]) + batch["gen_samples"] = torch.stack([self.get_matrix(x) for x in gen_samples]) + + return batch + + # --------------------------------------------------------------------------------- # + + def sample( + self, + net: torch.nn.Module, + traj_samples: RawTrajectory, + cond_samples: TensorType["num_samples", "num_feats"], + mask: TensorType["num_samples", "num_feats"], + external_seeds: List[int] = None, + ) -> Tuple[RawTrajectory, RawTrajectory]: + # Pick latents + num_samples = traj_samples.shape[0] + seeds = self.gen_seeds if hasattr(self, "gen_seeds") else range(num_samples) + rnd = StackedRandomGenerator(self.device, seeds) + + sz = [num_samples, self.net.num_feats, self.net.num_cams] + latents = rnd.randn_rn(sz, device=self.device) + # Generate trajectories. + generations = self.edm_sampler( + net, + latents, + class_labels=cond_samples, + mask=mask, + randn_like=rnd.randn_like, + guidance_weight=self.guidance_weight, + # ----------------------------------- # + num_steps=self.num_steps, + sigma_min=self.sigma_min, + sigma_max=self.sigma_max, + rho=self.rho, + S_churn=self.S_churn, + S_min=self.S_min, + S_max=self.S_max, + S_noise=self.S_noise, + ) + + return latents, generations + + @staticmethod + def edm_sampler( + net, + latents, + class_labels=None, + mask=None, + guidance_weight=2.0, + randn_like=torch.randn_like, + num_steps=18, + sigma_min=0.002, + sigma_max=80, + rho=7, + S_churn=0, + S_min=0, + S_max=float("inf"), + S_noise=1, + ): + # Time step discretization. + step_indices = torch.arange(num_steps, device=latents.device) + t_steps = ( + sigma_max ** (1 / rho) + + step_indices + / (num_steps - 1) + * (sigma_min ** (1 / rho) - sigma_max ** (1 / rho)) + ) ** rho + t_steps = torch.cat( + [torch.as_tensor(t_steps), torch.zeros_like(t_steps[:1])] + ) # t_N = 0 + + # Main sampling loop. + bool_mask = ~mask.to(bool) + x_next = latents * t_steps[0] + bs = latents.shape[0] + for i, (t_cur, t_next) in enumerate( + zip(t_steps[:-1], t_steps[1:]) + ): # 0, ..., N-1 + x_cur = x_next + + # Increase noise temporarily. + gamma = ( + min(S_churn / num_steps, np.sqrt(2) - 1) + if S_min <= t_cur <= S_max + else 0 + ) + t_hat = torch.as_tensor(t_cur + gamma * t_cur) + x_hat = x_cur + (t_hat**2 - t_cur**2).sqrt() * S_noise * randn_like(x_cur) + + # Euler step. + if class_labels is not None: + class_label_knot = [torch.zeros_like(label) for label in class_labels] + x_hat_both = torch.cat([x_hat, x_hat], dim=0) + y_label_both = [ + torch.cat([y, y_knot], dim=0) + for y, y_knot in zip(class_labels, class_label_knot) + ] + + bool_mask_both = torch.cat([bool_mask, bool_mask], dim=0) + t_hat_both = torch.cat([t_hat.expand(bs), t_hat.expand(bs)], dim=0) + cond_denoised, denoised = net( + x_hat_both, t_hat_both, y=y_label_both, mask=bool_mask_both + ).chunk(2, dim=0) + denoised = denoised + (cond_denoised - denoised) * guidance_weight + else: + denoised = net(x_hat, t_hat.expand(bs), mask=bool_mask) + d_cur = (x_hat - denoised) / t_hat + x_next = x_hat + (t_next - t_hat) * d_cur + + # Apply 2nd order correction. + if i < num_steps - 1: + if class_labels is not None: + class_label_knot = [ + torch.zeros_like(label) for label in class_labels + ] + x_next_both = torch.cat([x_next, x_next], dim=0) + y_label_both = [ + torch.cat([y, y_knot], dim=0) + for y, y_knot in zip(class_labels, class_label_knot) + ] + bool_mask_both = torch.cat([bool_mask, bool_mask], dim=0) + t_next_both = torch.cat( + [t_next.expand(bs), t_next.expand(bs)], dim=0 + ) + cond_denoised, denoised = net( + x_next_both, t_next_both, y=y_label_both, mask=bool_mask_both + ).chunk(2, dim=0) + denoised = denoised + (cond_denoised - denoised) * guidance_weight + else: + denoised = net(x_next, t_next.expand(bs), mask=bool_mask) + d_prime = (x_next - denoised) / t_next + x_next = x_hat + (t_next - t_hat) * (0.5 * d_cur + 0.5 * d_prime) + + return x_next diff --git a/src/models/modules/director.py b/src/models/modules/director.py new file mode 100644 index 0000000000000000000000000000000000000000..310c2e6edf81f48a01eb9cc08bb6d1f27126d1b5 --- /dev/null +++ b/src/models/modules/director.py @@ -0,0 +1,1154 @@ +import torch +import torch.nn as nn +from torch import Tensor +import numpy as np +from einops import rearrange + +from typing import Optional, List +from torchtyping import TensorType +from einops._torch_specific import allow_ops_in_compiled_graph # requires einops>=0.6.1 + +allow_ops_in_compiled_graph() + +batch_size, num_cond_feats = None, None + + +class FusedMLP(nn.Sequential): + def __init__( + self, + dim_model: int, + dropout: float, + activation: nn.Module, + hidden_layer_multiplier: int = 4, + bias: bool = True, + ): + super().__init__( + nn.Linear(dim_model, dim_model * hidden_layer_multiplier, bias=bias), + activation(), + nn.Dropout(dropout), + nn.Linear(dim_model * hidden_layer_multiplier, dim_model, bias=bias), + ) + + +def _cast_if_autocast_enabled(tensor): + if torch.is_autocast_enabled(): + if tensor.device.type == "cuda": + dtype = torch.get_autocast_gpu_dtype() + elif tensor.device.type == "cpu": + dtype = torch.get_autocast_cpu_dtype() + else: + raise NotImplementedError() + return tensor.to(dtype=dtype) + return tensor + + +class LayerNorm16Bits(torch.nn.LayerNorm): + """ + 16-bit friendly version of torch.nn.LayerNorm + """ + + def __init__( + self, + normalized_shape, + eps=1e-06, + elementwise_affine=True, + device=None, + dtype=None, + ): + super().__init__( + normalized_shape=normalized_shape, + eps=eps, + elementwise_affine=elementwise_affine, + device=device, + dtype=dtype, + ) + + def forward(self, x): + module_device = x.device + downcast_x = _cast_if_autocast_enabled(x) + downcast_weight = ( + _cast_if_autocast_enabled(self.weight) + if self.weight is not None + else self.weight + ) + downcast_bias = ( + _cast_if_autocast_enabled(self.bias) if self.bias is not None else self.bias + ) + with torch.autocast(enabled=False, device_type=module_device.type): + return nn.functional.layer_norm( + downcast_x, + self.normalized_shape, + downcast_weight, + downcast_bias, + self.eps, + ) + + +class StochatichDepth(nn.Module): + def __init__(self, p: float): + super().__init__() + self.survival_prob = 1.0 - p + + def forward(self, x: Tensor) -> Tensor: + if self.training and self.survival_prob < 1: + mask = ( + torch.empty(x.shape[0], 1, 1, device=x.device).uniform_() + + self.survival_prob + ) + mask = mask.floor() + if self.survival_prob > 0: + mask = mask / self.survival_prob + return x * mask + else: + return x + + +class CrossAttentionOp(nn.Module): + def __init__( + self, attention_dim, num_heads, dim_q, dim_kv, use_biases=True, is_sa=False + ): + super().__init__() + self.dim_q = dim_q + self.dim_kv = dim_kv + self.attention_dim = attention_dim + self.num_heads = num_heads + self.use_biases = use_biases + self.is_sa = is_sa + if self.is_sa: + self.qkv = nn.Linear(dim_q, attention_dim * 3, bias=use_biases) + else: + self.q = nn.Linear(dim_q, attention_dim, bias=use_biases) + self.kv = nn.Linear(dim_kv, attention_dim * 2, bias=use_biases) + self.out = nn.Linear(attention_dim, dim_q, bias=use_biases) + + def forward(self, x_to, x_from=None, attention_mask=None): + if x_from is None: + x_from = x_to + if self.is_sa: + q, k, v = self.qkv(x_to).chunk(3, dim=-1) + else: + q = self.q(x_to) + k, v = self.kv(x_from).chunk(2, dim=-1) + q = rearrange(q, "b n (h d) -> b h n d", h=self.num_heads) + k = rearrange(k, "b n (h d) -> b h n d", h=self.num_heads) + v = rearrange(v, "b n (h d) -> b h n d", h=self.num_heads) + if attention_mask is not None: + attention_mask = attention_mask.unsqueeze(1) + x = torch.nn.functional.scaled_dot_product_attention( + q, k, v, attn_mask=attention_mask + ) + x = rearrange(x, "b h n d -> b n (h d)") + x = self.out(x) + return x + + +class CrossAttentionBlock(nn.Module): + def __init__( + self, + dim_q: int, + dim_kv: int, + num_heads: int, + attention_dim: int = 0, + mlp_multiplier: int = 4, + dropout: float = 0.0, + stochastic_depth: float = 0.0, + use_biases: bool = True, + retrieve_attention_scores: bool = False, + use_layernorm16: bool = True, + ): + super().__init__() + layer_norm = ( + nn.LayerNorm + if not use_layernorm16 or retrieve_attention_scores + else LayerNorm16Bits + ) + self.retrieve_attention_scores = retrieve_attention_scores + self.initial_to_ln = layer_norm(dim_q, eps=1e-6) + attention_dim = min(dim_q, dim_kv) if attention_dim == 0 else attention_dim + self.ca = CrossAttentionOp( + attention_dim, num_heads, dim_q, dim_kv, is_sa=False, use_biases=use_biases + ) + self.ca_stochastic_depth = StochatichDepth(stochastic_depth) + self.middle_ln = layer_norm(dim_q, eps=1e-6) + self.ffn = FusedMLP( + dim_model=dim_q, + dropout=dropout, + activation=nn.GELU, + hidden_layer_multiplier=mlp_multiplier, + bias=use_biases, + ) + self.ffn_stochastic_depth = StochatichDepth(stochastic_depth) + + def forward( + self, + to_tokens: Tensor, + from_tokens: Tensor, + to_token_mask: Optional[Tensor] = None, + from_token_mask: Optional[Tensor] = None, + ) -> Tensor: + if to_token_mask is None and from_token_mask is None: + attention_mask = None + else: + if to_token_mask is None: + to_token_mask = torch.ones( + to_tokens.shape[0], + to_tokens.shape[1], + dtype=torch.bool, + device=to_tokens.device, + ) + if from_token_mask is None: + from_token_mask = torch.ones( + from_tokens.shape[0], + from_tokens.shape[1], + dtype=torch.bool, + device=from_tokens.device, + ) + attention_mask = from_token_mask.unsqueeze(1) * to_token_mask.unsqueeze(2) + attention_output = self.ca( + self.initial_to_ln(to_tokens), + from_tokens, + attention_mask=attention_mask, + ) + to_tokens = to_tokens + self.ca_stochastic_depth(attention_output) + to_tokens = to_tokens + self.ffn_stochastic_depth( + self.ffn(self.middle_ln(to_tokens)) + ) + return to_tokens + + +class SelfAttentionBlock(nn.Module): + def __init__( + self, + dim_qkv: int, + num_heads: int, + attention_dim: int = 0, + mlp_multiplier: int = 4, + dropout: float = 0.0, + stochastic_depth: float = 0.0, + use_biases: bool = True, + use_layer_scale: bool = False, + layer_scale_value: float = 0.0, + use_layernorm16: bool = True, + ): + super().__init__() + layer_norm = LayerNorm16Bits if use_layernorm16 else nn.LayerNorm + self.initial_ln = layer_norm(dim_qkv, eps=1e-6) + attention_dim = dim_qkv if attention_dim == 0 else attention_dim + self.sa = CrossAttentionOp( + attention_dim, + num_heads, + dim_qkv, + dim_qkv, + is_sa=True, + use_biases=use_biases, + ) + self.sa_stochastic_depth = StochatichDepth(stochastic_depth) + self.middle_ln = layer_norm(dim_qkv, eps=1e-6) + self.ffn = FusedMLP( + dim_model=dim_qkv, + dropout=dropout, + activation=nn.GELU, + hidden_layer_multiplier=mlp_multiplier, + bias=use_biases, + ) + self.ffn_stochastic_depth = StochatichDepth(stochastic_depth) + self.use_layer_scale = use_layer_scale + if use_layer_scale: + self.layer_scale_1 = nn.Parameter( + torch.ones(dim_qkv) * layer_scale_value, requires_grad=True + ) + self.layer_scale_2 = nn.Parameter( + torch.ones(dim_qkv) * layer_scale_value, requires_grad=True + ) + + def forward( + self, + tokens: torch.Tensor, + token_mask: Optional[torch.Tensor] = None, + ): + if token_mask is None: + attention_mask = None + else: + attention_mask = token_mask.unsqueeze(1) * torch.ones( + tokens.shape[0], + tokens.shape[1], + 1, + dtype=torch.bool, + device=tokens.device, + ) + attention_output = self.sa( + self.initial_ln(tokens), + attention_mask=attention_mask, + ) + if self.use_layer_scale: + tokens = tokens + self.sa_stochastic_depth( + self.layer_scale_1 * attention_output + ) + tokens = tokens + self.ffn_stochastic_depth( + self.layer_scale_2 * self.ffn(self.middle_ln(tokens)) + ) + else: + tokens = tokens + self.sa_stochastic_depth(attention_output) + tokens = tokens + self.ffn_stochastic_depth( + self.ffn(self.middle_ln(tokens)) + ) + return tokens + + +class AdaLNSABlock(nn.Module): + def __init__( + self, + dim_qkv: int, + dim_cond: int, + num_heads: int, + attention_dim: int = 0, + mlp_multiplier: int = 4, + dropout: float = 0.0, + stochastic_depth: float = 0.0, + use_biases: bool = True, + use_layer_scale: bool = False, + layer_scale_value: float = 0.1, + use_layernorm16: bool = True, + ): + super().__init__() + layer_norm = LayerNorm16Bits if use_layernorm16 else nn.LayerNorm + self.initial_ln = layer_norm(dim_qkv, eps=1e-6, elementwise_affine=False) + attention_dim = dim_qkv if attention_dim == 0 else attention_dim + self.adaln_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(dim_cond, dim_qkv * 6, bias=use_biases), + ) + # Zero init + nn.init.zeros_(self.adaln_modulation[1].weight) + nn.init.zeros_(self.adaln_modulation[1].bias) + + self.sa = CrossAttentionOp( + attention_dim, + num_heads, + dim_qkv, + dim_qkv, + is_sa=True, + use_biases=use_biases, + ) + self.sa_stochastic_depth = StochatichDepth(stochastic_depth) + self.middle_ln = layer_norm(dim_qkv, eps=1e-6, elementwise_affine=False) + self.ffn = FusedMLP( + dim_model=dim_qkv, + dropout=dropout, + activation=nn.GELU, + hidden_layer_multiplier=mlp_multiplier, + bias=use_biases, + ) + self.ffn_stochastic_depth = StochatichDepth(stochastic_depth) + self.use_layer_scale = use_layer_scale + if use_layer_scale: + self.layer_scale_1 = nn.Parameter( + torch.ones(dim_qkv) * layer_scale_value, requires_grad=True + ) + self.layer_scale_2 = nn.Parameter( + torch.ones(dim_qkv) * layer_scale_value, requires_grad=True + ) + + def forward( + self, + tokens: torch.Tensor, + cond: torch.Tensor, + token_mask: Optional[torch.Tensor] = None, + ): + if token_mask is None: + attention_mask = None + else: + attention_mask = token_mask.unsqueeze(1) * torch.ones( + tokens.shape[0], + tokens.shape[1], + 1, + dtype=torch.bool, + device=tokens.device, + ) + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = ( + self.adaln_modulation(cond).chunk(6, dim=-1) + ) + attention_output = self.sa( + modulate_shift_and_scale(self.initial_ln(tokens), shift_msa, scale_msa), + attention_mask=attention_mask, + ) + if self.use_layer_scale: + tokens = tokens + self.sa_stochastic_depth( + gate_msa.unsqueeze(1) * self.layer_scale_1 * attention_output + ) + tokens = tokens + self.ffn_stochastic_depth( + gate_mlp.unsqueeze(1) + * self.layer_scale_2 + * self.ffn( + modulate_shift_and_scale( + self.middle_ln(tokens), shift_mlp, scale_mlp + ) + ) + ) + else: + tokens = tokens + gate_msa.unsqueeze(1) * self.sa_stochastic_depth( + attention_output + ) + tokens = tokens + self.ffn_stochastic_depth( + gate_mlp.unsqueeze(1) + * self.ffn( + modulate_shift_and_scale( + self.middle_ln(tokens), shift_mlp, scale_mlp + ) + ) + ) + return tokens + + +class CrossAttentionSABlock(nn.Module): + def __init__( + self, + dim_qkv: int, + dim_cond: int, + num_heads: int, + attention_dim: int = 0, + mlp_multiplier: int = 4, + dropout: float = 0.0, + stochastic_depth: float = 0.0, + use_biases: bool = True, + use_layer_scale: bool = False, + layer_scale_value: float = 0.0, + use_layernorm16: bool = True, + ): + super().__init__() + layer_norm = LayerNorm16Bits if use_layernorm16 else nn.LayerNorm + attention_dim = dim_qkv if attention_dim == 0 else attention_dim + self.ca = CrossAttentionOp( + attention_dim, + num_heads, + dim_qkv, + dim_cond, + is_sa=False, + use_biases=use_biases, + ) + self.ca_stochastic_depth = StochatichDepth(stochastic_depth) + self.ca_ln = layer_norm(dim_qkv, eps=1e-6) + + self.initial_ln = layer_norm(dim_qkv, eps=1e-6) + attention_dim = dim_qkv if attention_dim == 0 else attention_dim + + self.sa = CrossAttentionOp( + attention_dim, + num_heads, + dim_qkv, + dim_qkv, + is_sa=True, + use_biases=use_biases, + ) + self.sa_stochastic_depth = StochatichDepth(stochastic_depth) + self.middle_ln = layer_norm(dim_qkv, eps=1e-6) + self.ffn = FusedMLP( + dim_model=dim_qkv, + dropout=dropout, + activation=nn.GELU, + hidden_layer_multiplier=mlp_multiplier, + bias=use_biases, + ) + self.ffn_stochastic_depth = StochatichDepth(stochastic_depth) + self.use_layer_scale = use_layer_scale + if use_layer_scale: + self.layer_scale_1 = nn.Parameter( + torch.ones(dim_qkv) * layer_scale_value, requires_grad=True + ) + self.layer_scale_2 = nn.Parameter( + torch.ones(dim_qkv) * layer_scale_value, requires_grad=True + ) + + def forward( + self, + tokens: torch.Tensor, + cond: torch.Tensor, + token_mask: Optional[torch.Tensor] = None, + cond_mask: Optional[torch.Tensor] = None, + ): + if cond_mask is None: + cond_attention_mask = None + else: + cond_attention_mask = torch.ones( + cond.shape[0], + 1, + cond.shape[1], + dtype=torch.bool, + device=tokens.device, + ) * token_mask.unsqueeze(2) + if token_mask is None: + attention_mask = None + else: + attention_mask = token_mask.unsqueeze(1) * torch.ones( + tokens.shape[0], + tokens.shape[1], + 1, + dtype=torch.bool, + device=tokens.device, + ) + ca_output = self.ca( + self.ca_ln(tokens), + cond, + attention_mask=cond_attention_mask, + ) + ca_output = torch.nan_to_num( + ca_output, nan=0.0, posinf=0.0, neginf=0.0 + ) # Needed as some tokens get attention from no token so Nan + tokens = tokens + self.ca_stochastic_depth(ca_output) + attention_output = self.sa( + self.initial_ln(tokens), + attention_mask=attention_mask, + ) + if self.use_layer_scale: + tokens = tokens + self.sa_stochastic_depth( + self.layer_scale_1 * attention_output + ) + tokens = tokens + self.ffn_stochastic_depth( + self.layer_scale_2 * self.ffn(self.middle_ln(tokens)) + ) + else: + tokens = tokens + self.sa_stochastic_depth(attention_output) + tokens = tokens + self.ffn_stochastic_depth( + self.ffn(self.middle_ln(tokens)) + ) + return tokens + + +class CAAdaLNSABlock(nn.Module): + def __init__( + self, + dim_qkv: int, + dim_cond: int, + num_heads: int, + attention_dim: int = 0, + mlp_multiplier: int = 4, + dropout: float = 0.0, + stochastic_depth: float = 0.0, + use_biases: bool = True, + use_layer_scale: bool = False, + layer_scale_value: float = 0.1, + use_layernorm16: bool = True, + ): + super().__init__() + layer_norm = LayerNorm16Bits if use_layernorm16 else nn.LayerNorm + self.ca = CrossAttentionOp( + attention_dim, + num_heads, + dim_qkv, + dim_cond, + is_sa=False, + use_biases=use_biases, + ) + self.ca_stochastic_depth = StochatichDepth(stochastic_depth) + self.ca_ln = layer_norm(dim_qkv, eps=1e-6) + self.initial_ln = layer_norm(dim_qkv, eps=1e-6) + attention_dim = dim_qkv if attention_dim == 0 else attention_dim + self.adaln_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(dim_cond, dim_qkv * 6, bias=use_biases), + ) + # Zero init + nn.init.zeros_(self.adaln_modulation[1].weight) + nn.init.zeros_(self.adaln_modulation[1].bias) + + self.sa = CrossAttentionOp( + attention_dim, + num_heads, + dim_qkv, + dim_qkv, + is_sa=True, + use_biases=use_biases, + ) + self.sa_stochastic_depth = StochatichDepth(stochastic_depth) + self.middle_ln = layer_norm(dim_qkv, eps=1e-6) + self.ffn = FusedMLP( + dim_model=dim_qkv, + dropout=dropout, + activation=nn.GELU, + hidden_layer_multiplier=mlp_multiplier, + bias=use_biases, + ) + self.ffn_stochastic_depth = StochatichDepth(stochastic_depth) + self.use_layer_scale = use_layer_scale + if use_layer_scale: + self.layer_scale_1 = nn.Parameter( + torch.ones(dim_qkv) * layer_scale_value, requires_grad=True + ) + self.layer_scale_2 = nn.Parameter( + torch.ones(dim_qkv) * layer_scale_value, requires_grad=True + ) + + def forward( + self, + tokens: torch.Tensor, + cond_1: torch.Tensor, + cond_2: torch.Tensor, + cond_1_mask: Optional[torch.Tensor] = None, + token_mask: Optional[torch.Tensor] = None, + ): + if token_mask is None and cond_1_mask is None: + cond_attention_mask = None + elif token_mask is None: + cond_attention_mask = cond_1_mask.unsqueeze(1) * torch.ones( + cond_1.shape[0], + cond_1.shape[1], + 1, + dtype=torch.bool, + device=cond_1.device, + ) + elif cond_1_mask is None: + cond_attention_mask = torch.ones( + tokens.shape[0], + 1, + tokens.shape[1], + dtype=torch.bool, + device=tokens.device, + ) * token_mask.unsqueeze(2) + else: + cond_attention_mask = cond_1_mask.unsqueeze(1) * token_mask.unsqueeze(2) + if token_mask is None: + attention_mask = None + else: + attention_mask = token_mask.unsqueeze(1) * torch.ones( + tokens.shape[0], + tokens.shape[1], + 1, + dtype=torch.bool, + device=tokens.device, + ) + ca_output = self.ca( + self.ca_ln(tokens), + cond_1, + attention_mask=cond_attention_mask, + ) + ca_output = torch.nan_to_num(ca_output, nan=0.0, posinf=0.0, neginf=0.0) + tokens = tokens + self.ca_stochastic_depth(ca_output) + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = ( + self.adaln_modulation(cond_2).chunk(6, dim=-1) + ) + attention_output = self.sa( + modulate_shift_and_scale(self.initial_ln(tokens), shift_msa, scale_msa), + attention_mask=attention_mask, + ) + if self.use_layer_scale: + tokens = tokens + self.sa_stochastic_depth( + gate_msa.unsqueeze(1) * self.layer_scale_1 * attention_output + ) + tokens = tokens + self.ffn_stochastic_depth( + gate_mlp.unsqueeze(1) + * self.layer_scale_2 + * self.ffn( + modulate_shift_and_scale( + self.middle_ln(tokens), shift_mlp, scale_mlp + ) + ) + ) + else: + tokens = tokens + gate_msa.unsqueeze(1) * self.sa_stochastic_depth( + attention_output + ) + tokens = tokens + self.ffn_stochastic_depth( + gate_mlp.unsqueeze(1) + * self.ffn( + modulate_shift_and_scale( + self.middle_ln(tokens), shift_mlp, scale_mlp + ) + ) + ) + return tokens + + +class PositionalEmbedding(nn.Module): + """ + Taken from https://github.com/NVlabs/edm + """ + + def __init__(self, num_channels, max_positions=10000, endpoint=False): + super().__init__() + self.num_channels = num_channels + self.max_positions = max_positions + self.endpoint = endpoint + freqs = torch.arange(start=0, end=self.num_channels // 2, dtype=torch.float32) + freqs = 2 * freqs / self.num_channels + freqs = (1 / self.max_positions) ** freqs + self.register_buffer("freqs", freqs) + + def forward(self, x): + x = torch.outer(x, self.freqs) + out = torch.cat([x.cos(), x.sin()], dim=1) + return out.to(x.dtype) + + +class PositionalEncoding(nn.Module): + def __init__(self, d_model, dropout=0.0, max_len=10000): + super().__init__() + self.dropout = nn.Dropout(p=dropout) + + pe = torch.zeros(max_len, d_model) + position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1) + div_term = torch.exp( + torch.arange(0, d_model, 2).float() * (-np.log(10000.0) / d_model) + ) + pe[:, 0::2] = torch.sin(position * div_term) + pe[:, 1::2] = torch.cos(position * div_term) + pe = pe.unsqueeze(0) + + self.register_buffer("pe", pe) + + def forward(self, x): + # not used in the final model + x = x + self.pe[:, : x.shape[1], :] + return self.dropout(x) + + +class TimeEmbedder(nn.Module): + def __init__( + self, + dim: int, + time_scaling: float, + expansion: int = 4, + ): + super().__init__() + self.encode_time = PositionalEmbedding(num_channels=dim, endpoint=True) + + self.time_scaling = time_scaling + self.map_time = nn.Sequential( + nn.Linear(dim, dim * expansion), + nn.SiLU(), + nn.Linear(dim * expansion, dim * expansion), + ) + + def forward(self, t: Tensor) -> Tensor: + time = self.encode_time(t * self.time_scaling) + time_mean = time.mean(dim=-1, keepdim=True) + time_std = time.std(dim=-1, keepdim=True) + time = (time - time_mean) / time_std + return self.map_time(time) + + +def modulate_shift_and_scale(x: Tensor, shift: Tensor, scale: Tensor) -> Tensor: + return x * (1 + scale).unsqueeze(1) + shift.unsqueeze(1) + + +# ------------------------------------------------------------------------------------- # + + +class BaseDirector(nn.Module): + def __init__( + self, + name: str, + num_feats: int, + num_cond_feats: int, + num_cams: int, + latent_dim: int, + mlp_multiplier: int, + num_layers: int, + num_heads: int, + dropout: float, + stochastic_depth: float, + label_dropout: float, + num_rawfeats: int, + clip_sequential: bool = False, + cond_sequential: bool = False, + device: str = "cuda", + **kwargs, + ): + super().__init__() + self.name = name + self.label_dropout = label_dropout + self.num_rawfeats = num_rawfeats + self.num_feats = num_feats + self.num_cams = num_cams + self.clip_sequential = clip_sequential + self.cond_sequential = cond_sequential + self.use_layernorm16 = device == "cuda" + + self.input_projection = nn.Sequential( + nn.Linear(num_feats, latent_dim), + PositionalEncoding(latent_dim), + ) + self.time_embedding = TimeEmbedder(latent_dim // 4, time_scaling=1000) + self.init_conds_mappings(num_cond_feats, latent_dim) + self.init_backbone( + num_layers, latent_dim, mlp_multiplier, num_heads, dropout, stochastic_depth + ) + self.init_output_projection(num_feats, latent_dim) + + def forward( + self, + x: Tensor, + timesteps: Tensor, + y: List[Tensor] = None, + mask: Tensor = None, + ) -> Tensor: + mask = mask.logical_not() if mask is not None else None + x = rearrange(x, "b c n -> b n c") + x = self.input_projection(x) + t = self.time_embedding(timesteps) + if y is not None: + y = self.mask_cond(y) + y = self.cond_mapping(y, mask, t) + + x = self.backbone(x, y, mask) + x = self.output_projection(x, y) + return rearrange(x, "b n c -> b c n") + + def init_conds_mappings(self, num_cond_feats, latent_dim): + raise NotImplementedError( + "This method should be implemented in the derived class" + ) + + def init_backbone(self): + raise NotImplementedError( + "This method should be implemented in the derived class" + ) + + def cond_mapping(self, cond: List[Tensor], mask: Tensor, t: Tensor) -> Tensor: + raise NotImplementedError( + "This method should be implemented in the derived class" + ) + + def backbone(self, x: Tensor, y: Tensor, mask: Tensor) -> Tensor: + raise NotImplementedError( + "This method should be implemented in the derived class" + ) + + def mask_cond( + self, cond: List[TensorType["batch_size", "num_cond_feats"]] + ) -> TensorType["batch_size", "num_cond_feats"]: + bs = cond[0].shape[0] + if self.training and self.label_dropout > 0.0: + # 1-> use null_cond, 0-> use real cond + prob = torch.ones(bs, device=cond[0].device) * self.label_dropout + masked_cond = [] + common_mask = torch.bernoulli(prob) # Common to all modalities + for _cond in cond: + modality_mask = torch.bernoulli(prob) # Modality only + mask = torch.clip(common_mask + modality_mask, 0, 1) + mask = mask.view(bs, 1, 1) if _cond.dim() == 3 else mask.view(bs, 1) + masked_cond.append(_cond * (1.0 - mask)) + return masked_cond + else: + return cond + + def init_output_projection(self, num_feats, latent_dim): + raise NotImplementedError( + "This method should be implemented in the derived class" + ) + + def output_projection(self, x: Tensor, y: Tensor) -> Tensor: + raise NotImplementedError( + "This method should be implemented in the derived class" + ) + + +class AdaLNDirector(BaseDirector): + def __init__( + self, + name: str, + num_feats: int, + num_cond_feats: int, + num_cams: int, + latent_dim: int, + mlp_multiplier: int, + num_layers: int, + num_heads: int, + dropout: float, + stochastic_depth: float, + label_dropout: float, + num_rawfeats: int, + clip_sequential: bool = False, + cond_sequential: bool = False, + device: str = "cuda", + **kwargs, + ): + super().__init__( + name=name, + num_feats=num_feats, + num_cond_feats=num_cond_feats, + num_cams=num_cams, + latent_dim=latent_dim, + mlp_multiplier=mlp_multiplier, + num_layers=num_layers, + num_heads=num_heads, + dropout=dropout, + stochastic_depth=stochastic_depth, + label_dropout=label_dropout, + num_rawfeats=num_rawfeats, + clip_sequential=clip_sequential, + cond_sequential=cond_sequential, + device=device, + ) + assert not (clip_sequential and cond_sequential) + + def init_conds_mappings(self, num_cond_feats, latent_dim): + self.joint_cond_projection = nn.Linear(sum(num_cond_feats), latent_dim) + + def cond_mapping(self, cond: List[Tensor], mask: Tensor, t: Tensor) -> Tensor: + c_emb = torch.cat(cond, dim=-1) + return self.joint_cond_projection(c_emb) + t + + def init_backbone( + self, + num_layers, + latent_dim, + mlp_multiplier, + num_heads, + dropout, + stochastic_depth, + ): + self.backbone_module = nn.ModuleList( + [ + AdaLNSABlock( + dim_qkv=latent_dim, + dim_cond=latent_dim, + num_heads=num_heads, + mlp_multiplier=mlp_multiplier, + dropout=dropout, + stochastic_depth=stochastic_depth, + use_layernorm16=self.use_layernorm16, + ) + for _ in range(num_layers) + ] + ) + + def backbone(self, x: Tensor, y: Tensor, mask: Tensor) -> Tensor: + for block in self.backbone_module: + x = block(x, y, mask) + return x + + def init_output_projection(self, num_feats, latent_dim): + layer_norm = LayerNorm16Bits if self.use_layernorm16 else nn.LayerNorm + + self.final_norm = layer_norm(latent_dim, eps=1e-6, elementwise_affine=False) + self.final_linear = nn.Linear(latent_dim, num_feats, bias=True) + self.final_adaln = nn.Sequential( + nn.SiLU(), + nn.Linear(latent_dim, latent_dim * 2, bias=True), + ) + # Zero init + nn.init.zeros_(self.final_adaln[1].weight) + nn.init.zeros_(self.final_adaln[1].bias) + + def output_projection(self, x: Tensor, y: Tensor) -> Tensor: + shift, scale = self.final_adaln(y).chunk(2, dim=-1) + x = modulate_shift_and_scale(self.final_norm(x), shift, scale) + return self.final_linear(x) + + +class CrossAttentionDirector(BaseDirector): + def __init__( + self, + name: str, + num_feats: int, + num_cond_feats: int, + num_cams: int, + latent_dim: int, + mlp_multiplier: int, + num_layers: int, + num_heads: int, + dropout: float, + stochastic_depth: float, + label_dropout: float, + num_rawfeats: int, + num_text_registers: int, + clip_sequential: bool = True, + cond_sequential: bool = True, + device: str = "cuda", + **kwargs, + ): + self.num_text_registers = num_text_registers + self.num_heads = num_heads + self.dropout = dropout + self.mlp_multiplier = mlp_multiplier + self.stochastic_depth = stochastic_depth + super().__init__( + name=name, + num_feats=num_feats, + num_cond_feats=num_cond_feats, + num_cams=num_cams, + latent_dim=latent_dim, + mlp_multiplier=mlp_multiplier, + num_layers=num_layers, + num_heads=num_heads, + dropout=dropout, + stochastic_depth=stochastic_depth, + label_dropout=label_dropout, + num_rawfeats=num_rawfeats, + clip_sequential=clip_sequential, + cond_sequential=cond_sequential, + device=device, + ) + assert clip_sequential and cond_sequential + + def init_conds_mappings(self, num_cond_feats, latent_dim): + self.cond_projection = nn.ModuleList( + [nn.Linear(num_cond_feat, latent_dim) for num_cond_feat in num_cond_feats] + ) + self.cond_registers = nn.Parameter( + torch.randn(self.num_text_registers, latent_dim), requires_grad=True + ) + nn.init.trunc_normal_(self.cond_registers, std=0.02, a=-2 * 0.02, b=2 * 0.02) + self.cond_sa = nn.ModuleList( + [ + SelfAttentionBlock( + dim_qkv=latent_dim, + num_heads=self.num_heads, + mlp_multiplier=self.mlp_multiplier, + dropout=self.dropout, + stochastic_depth=self.stochastic_depth, + use_layernorm16=self.use_layernorm16, + ) + for _ in range(2) + ] + ) + self.cond_positional_embedding = PositionalEncoding(latent_dim, max_len=10000) + + def cond_mapping(self, cond: List[Tensor], mask: Tensor, t: Tensor) -> Tensor: + batch_size = cond[0].shape[0] + cond_emb = [ + cond_proj(rearrange(c, "b c n -> b n c")) + for cond_proj, c in zip(self.cond_projection, cond) + ] + cond_emb = [ + self.cond_registers.unsqueeze(0).expand(batch_size, -1, -1), + t.unsqueeze(1), + ] + cond_emb + cond_emb = torch.cat(cond_emb, dim=1) + cond_emb = self.cond_positional_embedding(cond_emb) + for block in self.cond_sa: + cond_emb = block(cond_emb) + return cond_emb + + def init_backbone( + self, + num_layers, + latent_dim, + mlp_multiplier, + num_heads, + dropout, + stochastic_depth, + ): + self.backbone_module = nn.ModuleList( + [ + CrossAttentionSABlock( + dim_qkv=latent_dim, + dim_cond=latent_dim, + num_heads=num_heads, + mlp_multiplier=mlp_multiplier, + dropout=dropout, + stochastic_depth=stochastic_depth, + use_layernorm16=self.use_layernorm16, + ) + for _ in range(num_layers) + ] + ) + + def backbone(self, x: Tensor, y: Tensor, mask: Tensor) -> Tensor: + for block in self.backbone_module: + x = block(x, y, mask, None) + return x + + def init_output_projection(self, num_feats, latent_dim): + layer_norm = LayerNorm16Bits if self.use_layernorm16 else nn.LayerNorm + + self.final_norm = layer_norm(latent_dim, eps=1e-6) + self.final_linear = nn.Linear(latent_dim, num_feats, bias=True) + + def output_projection(self, x: Tensor, y: Tensor) -> Tensor: + return self.final_linear(self.final_norm(x)) + + +class InContextDirector(BaseDirector): + def __init__( + self, + name: str, + num_feats: int, + num_cond_feats: int, + num_cams: int, + latent_dim: int, + mlp_multiplier: int, + num_layers: int, + num_heads: int, + dropout: float, + stochastic_depth: float, + label_dropout: float, + num_rawfeats: int, + clip_sequential: bool = False, + cond_sequential: bool = False, + device: str = "cuda", + **kwargs, + ): + super().__init__( + name=name, + num_feats=num_feats, + num_cond_feats=num_cond_feats, + num_cams=num_cams, + latent_dim=latent_dim, + mlp_multiplier=mlp_multiplier, + num_layers=num_layers, + num_heads=num_heads, + dropout=dropout, + stochastic_depth=stochastic_depth, + label_dropout=label_dropout, + num_rawfeats=num_rawfeats, + clip_sequential=clip_sequential, + cond_sequential=cond_sequential, + device=device, + ) + + def init_conds_mappings(self, num_cond_feats, latent_dim): + self.cond_projection = nn.ModuleList( + [nn.Linear(num_cond_feat, latent_dim) for num_cond_feat in num_cond_feats] + ) + + def cond_mapping(self, cond: List[Tensor], mask: Tensor, t: Tensor) -> Tensor: + for i in range(len(cond)): + if cond[i].dim() == 3: + cond[i] = rearrange(cond[i], "b c n -> b n c") + cond_emb = [cond_proj(c) for cond_proj, c in zip(self.cond_projection, cond)] + cond_emb = [c.unsqueeze(1) if c.dim() == 2 else cond_emb for c in cond_emb] + cond_emb = torch.cat([t.unsqueeze(1)] + cond_emb, dim=1) + return cond_emb + + def init_backbone( + self, + num_layers, + latent_dim, + mlp_multiplier, + num_heads, + dropout, + stochastic_depth, + ): + self.backbone_module = nn.ModuleList( + [ + SelfAttentionBlock( + dim_qkv=latent_dim, + num_heads=num_heads, + mlp_multiplier=mlp_multiplier, + dropout=dropout, + stochastic_depth=stochastic_depth, + use_layernorm16=self.use_layernorm16, + ) + for _ in range(num_layers) + ] + ) + + def backbone(self, x: Tensor, y: Tensor, mask: Tensor) -> Tensor: + bs, n_y, _ = y.shape + mask = torch.cat([torch.ones(bs, n_y, device=y.device), mask], dim=1) + x = torch.cat([y, x], dim=1) + for block in self.backbone_module: + x = block(x, mask) + return x + + def init_output_projection(self, num_feats, latent_dim): + layer_norm = LayerNorm16Bits if self.use_layernorm16 else nn.LayerNorm + + self.final_norm = layer_norm(latent_dim, eps=1e-6) + self.final_linear = nn.Linear(latent_dim, num_feats, bias=True) + + def output_projection(self, x: Tensor, y: Tensor) -> Tensor: + num_y = y.shape[1] + x = x[:, num_y:] + return self.final_linear(self.final_norm(x)) diff --git a/src/models/networks.py b/src/models/networks.py new file mode 100644 index 0000000000000000000000000000000000000000..e154566a87a12b2a255b7ebc76205ab24062a815 --- /dev/null +++ b/src/models/networks.py @@ -0,0 +1,33 @@ +import torch.nn as nn + + +# ---------------------------------------------------------------------------- +# Improved preconditioning proposed in the paper "Elucidating the Design +# Space of Diffusion-Based Generative Models" (EDM). + + +class RnEDMPrecond(nn.Module): + def __init__(self, sigma_data: float = 0.5, module: nn.Module = None, **kwargs): + super().__init__() + self.sigma_data = sigma_data + + self.model = module + self.num_rawfeats = module.num_rawfeats + self.num_feats = module.num_feats + self.num_cams = module.num_cams + + def forward(self, x, sigma, y=None, mask=None): + """ + x: [batch_size, num_feats, max_frames], denoted x_t in the paper + sigma: [batch_size] (int) + """ + sigma = sigma.reshape(-1, 1, 1) + c_skip = self.sigma_data**2 / (sigma**2 + self.sigma_data**2) + c_out = sigma * self.sigma_data / (sigma**2 + self.sigma_data**2).sqrt() + c_in = 1 / (self.sigma_data**2 + sigma**2).sqrt() + c_noise = sigma.log() / 4 + + F_x = self.model(c_in * x, c_noise.flatten(), y=y, mask=mask) + D_x = c_skip * x + c_out * F_x + + return D_x diff --git a/utils/common_viz.py b/utils/common_viz.py new file mode 100644 index 0000000000000000000000000000000000000000..a8bbc04844f52fa67e3cf9632c4f4b1d852b6957 --- /dev/null +++ b/utils/common_viz.py @@ -0,0 +1,137 @@ +from typing import Any, Dict, List, Tuple + +import clip +from hydra import compose, initialize +from hydra.utils import instantiate +from omegaconf import OmegaConf +import torch +from torchtyping import TensorType +from torch.utils.data import DataLoader +import torch.nn.functional as F + +from src.diffuser import Diffuser +from src.datasets.multimodal_dataset import MultimodalDataset + +# ------------------------------------------------------------------------------------- # + +batch_size, context_length = None, None +collate_fn = DataLoader([]).collate_fn + +# ------------------------------------------------------------------------------------- # + + +def to_device(batch: Dict[str, Any], device: torch.device) -> Dict[str, Any]: + for key, value in batch.items(): + if isinstance(value, torch.Tensor): + batch[key] = value.to(device) + return batch + + +def load_clip_model(version: str, device: str) -> clip.model.CLIP: + model, _ = clip.load(version, device=device, jit=False) + model.eval() + for p in model.parameters(): + p.requires_grad = False + return model + + +def encode_text( + caption_raws: List[str], # batch_size + clip_model: clip.model.CLIP, + max_token_length: int, + device: str, +) -> TensorType["batch_size", "context_length"]: + if max_token_length is not None: + default_context_length = 77 + context_length = max_token_length + 2 # start_token + 20 + end_token + assert context_length < default_context_length + # [bs, context_length] # if n_tokens > context_length -> will truncate + texts = clip.tokenize( + caption_raws, context_length=context_length, truncate=True + ) + zero_pad = torch.zeros( + [texts.shape[0], default_context_length - context_length], + dtype=texts.dtype, + device=texts.device, + ) + texts = torch.cat([texts, zero_pad], dim=1) + else: + # [bs, context_length] # if n_tokens > 77 -> will truncate + texts = clip.tokenize(caption_raws, truncate=True) + + # [batch_size, n_ctx, d_model] + x = clip_model.token_embedding(texts.to(device)).type(clip_model.dtype) + x = x + clip_model.positional_embedding.type(clip_model.dtype) + x = x.permute(1, 0, 2) # NLD -> LND + x = clip_model.transformer(x) + x = x.permute(1, 0, 2) # LND -> NLD + x = clip_model.ln_final(x).type(clip_model.dtype) + # x.shape = [batch_size, n_ctx, transformer.width] + # take features from the eot embedding (eot_token is the highest in each sequence) + x_tokens = x[torch.arange(x.shape[0]), texts.argmax(dim=-1)].float() + x_seq = [x[k, : (m + 1)].float() for k, m in enumerate(texts.argmax(dim=-1))] + + return x_seq, x_tokens + + +def get_batch( + prompt: str, + sample_id: str, + clip_model: clip.model.CLIP, + dataset: MultimodalDataset, + seq_feat: bool, + device: torch.device, +) -> Dict[str, Any]: + # Get base batch + sample_index = dataset.root_filenames.index(sample_id) + raw_batch = dataset[sample_index] + batch = collate_fn([to_device(raw_batch, device)]) + + # Encode text + caption_seq, caption_tokens = encode_text([prompt], clip_model, None, device) + print(caption_seq[0].device) + + if seq_feat: + caption_feat = caption_seq[0] + caption_feat = F.pad(caption_feat, (0, 0, 0, 77 - caption_feat.shape[0])) + caption_feat = caption_feat.unsqueeze(0).permute(0, 2, 1) + else: + caption_feat = caption_tokens + + # Update batch + batch["caption_raw"] = [prompt] + batch["caption_feat"] = caption_feat + + return batch + + +def init( + config_name: str, +) -> Tuple[Diffuser, clip.model.CLIP, MultimodalDataset]: + with initialize(version_base="1.3", config_path="../configs"): + config = compose(config_name=config_name) + + OmegaConf.register_new_resolver("eval", eval) + + # Initialize model + # device = torch.device(config.compnode.device) + diffuser = instantiate(config.diffuser) + state_dict = torch.load(config.checkpoint_path, map_location="cpu")["state_dict"] + state_dict["ema.initted"] = diffuser.ema.initted + state_dict["ema.step"] = diffuser.ema.step + diffuser.load_state_dict(state_dict, strict=False) + diffuser.to("cpu").eval() + + # Initialize CLIP model + clip_model = load_clip_model("ViT-B/32", "cpu") + + # Initialize dataset + config.dataset.char.load_vertices = True + config.batch_size = 1 + dataset = instantiate(config.dataset) + dataset.set_split("demo") + diffuser.modalities = list(dataset.modality_datasets.keys()) + diffuser.get_matrix = dataset.get_matrix + diffuser.v_get_matrix = dataset.get_matrix + + return diffuser, clip_model, dataset, config.compnode.device diff --git a/utils/file_utils.py b/utils/file_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..146501a33eb01954892795d3eb4a8fd67e35cd12 --- /dev/null +++ b/utils/file_utils.py @@ -0,0 +1,71 @@ +import os +import os.path as osp +import pickle +import subprocess +from typing import Any + +import h5py +import numpy as np +import torch + +num_channels, num_frames, height, width = None, None, None, None + + +def create_dir(dir_name: str): + """Create a directory if it does not exist yet.""" + if not osp.exists(dir_name): + os.makedirs(dir_name) + + +def move_files(source_path: str, destpath: str): + """Move files from `source_path` to `dest_path`.""" + subprocess.call(["mv", source_path, destpath]) + + +def load_pickle(pickle_path: str) -> Any: + """Load a pickle file.""" + with open(pickle_path, "rb") as f: + data = pickle.load(f) + return data + + +def load_hdf5(hdf5_path: str) -> Any: + with h5py.File(hdf5_path, "r") as h5file: + data = {key: np.array(value) for key, value in h5file.items()} + return data + + +def save_hdf5(data: Any, hdf5_path: str): + with h5py.File(hdf5_path, "w") as h5file: + for key, value in data.items(): + h5file.create_dataset(key, data=value) + + +def save_pickle(data: Any, pickle_path: str): + """Save data in a pickle file.""" + with open(pickle_path, "wb") as f: + pickle.dump(data, f, protocol=4) + + +def load_txt(txt_path: str): + """Load a txt file.""" + with open(txt_path, "r") as f: + data = f.read() + return data + + +def save_txt(data: str, txt_path: str): + """Save data in a txt file.""" + with open(txt_path, "w") as f: + f.write(data) + + +def load_pth(pth_path: str) -> Any: + """Load a pth (PyTorch) file.""" + data = torch.load(pth_path) + return data + + +def save_pth(data: Any, pth_path: str): + """Save data in a pth (PyTorch) file.""" + torch.save(data, pth_path) diff --git a/utils/random_utils.py b/utils/random_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..6e285463ed5f9dcc8127e4b2dc2458ce8d850d25 --- /dev/null +++ b/utils/random_utils.py @@ -0,0 +1,46 @@ +import numpy as np +import random +import torch + + +def set_random_seed(seed: int): + torch.manual_seed((seed) % (1 << 31)) + torch.cuda.manual_seed((seed) % (1 << 31)) + torch.cuda.manual_seed_all((seed) % (1 << 31)) + np.random.seed((seed) % (1 << 31)) + random.seed((seed) % (1 << 31)) + torch.backends.cudnn.benchmark = False + torch.backends.cudnn.deterministic = True + + +class StackedRandomGenerator: + """ + Wrapper for torch.Generator that allows specifying a different random seed for each + sample in a minibatch. + """ + + def __init__(self, device, seeds): + super().__init__() + self.generators = [ + torch.Generator(device).manual_seed(int(seed) % (1 << 31)) for seed in seeds + ] + + def randn_rn(self, size, **kwargs): + assert size[0] == len(self.generators) + return torch.stack( + [torch.randn(size[1:], generator=gen, **kwargs) for gen in self.generators] + ) + + def randn_like(self, input): + return self.randn_rn( + input.shape, dtype=input.dtype, layout=input.layout, device=input.device + ) + + def randint(self, *args, size, **kwargs): + assert size[0] == len(self.generators) + return torch.stack( + [ + torch.randint(*args, size=size[1:], generator=gen, **kwargs) + for gen in self.generators + ] + ) diff --git a/utils/rerun.py b/utils/rerun.py new file mode 100644 index 0000000000000000000000000000000000000000..b1091cab23516fd9bbec6c8d7165a2fcbf71962c --- /dev/null +++ b/utils/rerun.py @@ -0,0 +1,66 @@ +import numpy as np +from matplotlib import colormaps +import rerun as rr +from rerun.components import Material +from scipy.spatial import transform + + +def color_fn(x, cmap="tab10"): + return colormaps[cmap](x % colormaps[cmap].N) + + +def log_sample( + root_name: str, + traj: np.ndarray, + char_traj: np.ndarray, + K: np.ndarray, + vertices: np.ndarray, + faces: np.ndarray, + normals: np.ndarray, + caption: str, + mesh_masks: np.ndarray, +): + num_cameras = traj.shape[0] + + rr.log(root_name, rr.ViewCoordinates.RIGHT_HAND_Y_DOWN, timeless=True) + rr.log( + f"{root_name}/trajectory/points", + rr.Points3D(traj[:, :3, 3]), + timeless=True, + ) + rr.log( + f"{root_name}/trajectory/line", + rr.LineStrips3D( + np.stack((traj[:, :3, 3][:-1], traj[:, :3, 3][1:]), axis=1), + colors=[(1.0, 0.0, 1.0, 1.0)], + ), + timeless=True, + ) + for k in range(num_cameras): + rr.set_time_sequence("frame_idx", k) + + translation = traj[k][:3, 3] + rotation_q = transform.Rotation.from_matrix(traj[k][:3, :3]).as_quat() + rr.log( + f"{root_name}/camera/image", + rr.Pinhole( + image_from_camera=K, + width=K[0, -1] * 2, + height=K[1, -1] * 2, + ), + ) + rr.log( + f"{root_name}/camera", + rr.Transform3D( + translation=translation, + rotation=rr.Quaternion(xyzw=rotation_q), + ), + ) + rr.set_time_sequence("image", k) + + # Log character trajectory points + rr.log( + f"{root_name}/char_traj/points", + rr.Points3D(char_traj.T, colors=[(1.0, 0.0, 0.0, 1.0)]), + timeless=True, + ) diff --git a/utils/rotation_utils.py b/utils/rotation_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..47d16761e5ae1ab4cc3d408eaa31d0afe7f0ff65 --- /dev/null +++ b/utils/rotation_utils.py @@ -0,0 +1,178 @@ +import numpy as np +from scipy.spatial.transform import Rotation as R +import torch +from torchtyping import TensorType +from itertools import product + +num_samples, num_cams = None, None + + +def rotvec_to_matrix(rotvec): + return R.from_rotvec(rotvec).as_matrix() + + +def matrix_to_rotvec(mat): + return R.from_matrix(mat).as_rotvec() + + +def compose_rotvec(r1, r2): + """ + #TODO: adapt to torch + Compose two rotation euler vectors. + """ + r1 = r1.cpu().numpy() if isinstance(r1, torch.Tensor) else r1 + r2 = r2.cpu().numpy() if isinstance(r2, torch.Tensor) else r2 + + R1 = rotvec_to_matrix(r1) + R2 = rotvec_to_matrix(r2) + cR = np.einsum("...ij,...jk->...ik", R1, R2) + return torch.from_numpy(matrix_to_rotvec(cR)) + + +def quat_to_rotvec(quat, eps=1e-6): + # w > 0 to ensure 0 <= angle <= pi + flip = (quat[..., :1] < 0).float() + quat = (-1 * quat) * flip + (1 - flip) * quat + + angle = 2 * torch.atan2(torch.linalg.norm(quat[..., 1:], dim=-1), quat[..., 0]) + + angle2 = angle * angle + small_angle_scales = 2 + angle2 / 12 + 7 * angle2 * angle2 / 2880 + large_angle_scales = angle / torch.sin(angle / 2 + eps) + + small_angles = (angle <= 1e-3).float() + rot_vec_scale = ( + small_angle_scales * small_angles + (1 - small_angles) * large_angle_scales + ) + rot_vec = rot_vec_scale[..., None] * quat[..., 1:] + return rot_vec + + +# batch*n +def normalize_vector(v, return_mag=False): + batch = v.shape[0] + v_mag = torch.sqrt(v.pow(2).sum(1)) # batch + v_mag = torch.max( + v_mag, torch.autograd.Variable(torch.FloatTensor([1e-8])).to(v.device) + ) + v_mag = v_mag.view(batch, 1).expand(batch, v.shape[1]) + v = v / v_mag + if return_mag is True: + return v, v_mag[:, 0] + else: + return v + + +# u, v batch*n +def cross_product(u, v): + batch = u.shape[0] + i = u[:, 1] * v[:, 2] - u[:, 2] * v[:, 1] + j = u[:, 2] * v[:, 0] - u[:, 0] * v[:, 2] + k = u[:, 0] * v[:, 1] - u[:, 1] * v[:, 0] + + out = torch.cat( + (i.view(batch, 1), j.view(batch, 1), k.view(batch, 1)), 1 + ) # [batch, 6] + + return out + + +def compute_rotation_matrix_from_ortho6d(ortho6d): + x_raw = ortho6d[:, 0:3] # [batch, 6] + y_raw = ortho6d[:, 3:6] # [batch, 6] + + x = normalize_vector(x_raw) # [batch, 6] + z = cross_product(x, y_raw) # [batch, 6] + z = normalize_vector(z) # [batch, 6] + y = cross_product(z, x) # [batch, 6] + + x = x.view(-1, 3, 1) + y = y.view(-1, 3, 1) + z = z.view(-1, 3, 1) + matrix = torch.cat((x, y, z), 2) # [batch, 3, 3] + return matrix + + +def invert_rotvec(rotvec: TensorType["num_samples", 3]): + angle = torch.norm(rotvec, dim=-1) + axis = rotvec / (angle.unsqueeze(-1) + 1e-6) + inverted_rotvec = -angle.unsqueeze(-1) * axis + return inverted_rotvec + + +def are_rotations(matrix: TensorType["num_samples", 3, 3]) -> TensorType["num_samples"]: + """Check if a matrix is a rotation matrix.""" + # Check if the matrix is orthogonal + identity = torch.eye(3, device=matrix.device) + is_orthogonal = ( + torch.isclose(torch.bmm(matrix, matrix.transpose(1, 2)), identity, atol=1e-6) + .all(dim=1) + .all(dim=1) + ) + + # Check if the determinant is 1 + determinant = torch.det(matrix) + is_determinant_one = torch.isclose( + determinant, torch.tensor(1.0, device=matrix.device), atol=1e-6 + ) + + return torch.logical_and(is_orthogonal, is_determinant_one) + + +def project_so3( + matrix: TensorType["num_samples", 4, 4] +) -> TensorType["num_samples", 4, 4]: + # Project rotation matrix to SO(3) + # TODO: use torch + rot = R.from_matrix(matrix[:, :3, :3].cpu().numpy()).as_matrix() + + projection = torch.eye(4).unsqueeze(0).repeat(matrix.shape[0], 1, 1).to(matrix) + projection[:, :3, :3] = torch.from_numpy(rot).to(matrix) + projection[:, :3, 3] = matrix[:, :3, 3] + + return projection + + +def pairwise_geodesic( + R_x: TensorType["num_samples", "num_cams", 3, 3], + R_y: TensorType["num_samples", "num_cams", 3, 3], + reduction: str = "mean", + block_size: int = 200, +): + def arange(start, stop, step, endpoint=True): + arr = torch.arange(start, stop, step) + if endpoint and arr[-1] != stop - 1: + arr = torch.cat((arr, torch.tensor([stop - 1], dtype=arr.dtype))) + return arr + + # Geodesic distance + # https://math.stackexchange.com/questions/2113634/comparing-two-rotation-matrices + num_samples, num_cams, _, _ = R_x.shape + + C = torch.zeros(num_samples, num_samples, device=R_x.device) + chunk_indices = arange(0, num_samples + 1, block_size, endpoint=True) + for i, j in product( + range(chunk_indices.shape[0] - 1), range(chunk_indices.shape[0] - 1) + ): + start_x, stop_x = chunk_indices[i], chunk_indices[i + 1] + start_y, stop_y = chunk_indices[j], chunk_indices[j + 1] + r_x, r_y = R_x[start_x:stop_x], R_y[start_y:stop_y] + + # Compute rotations between each pair of cameras of each sample + r_xy = torch.einsum("anjk,bnlk->abnjl", r_x, r_y) # b, b, N, 3, 3 + + # Compute axis-angle representations: angle is the geodesic distance + traces = r_xy.diagonal(dim1=-2, dim2=-1).sum(-1) + c = torch.acos(torch.clamp((traces - 1) / 2, -1, 1)) / torch.pi + + # Average distance between cameras over samples + if reduction == "mean": + C[start_x:stop_x, start_y:stop_y] = c.mean(-1) + elif reduction == "sum": + C[start_x:stop_x, start_y:stop_y] = c.sum(-1) + + # Check for NaN values in traces + if torch.isnan(c).any(): + raise ValueError("NaN values detected in traces") + + return C