diff --git a/moe_9L_loop_seed42.log b/moe_9L_loop_seed42.log new file mode 100644 index 0000000000..1efa32609a --- /dev/null +++ b/moe_9L_loop_seed42.log @@ -0,0 +1,90 @@ +logs/9188ac40-ddbd-42b3-8bd8-47538890494d.txt +val_bpb:enabled tokenizer_kind=sentencepiece tokenizer_path=./data/tokenizers/fineweb_8192_bpe.model +train_loader:dataset:fineweb10B_sp8192 train_shards:5 +val_loader:shards pattern=./data/datasets/fineweb10B_sp8192/fineweb_val_*.bin tokens:40540160 +model_params:30500020 +world_size:1 grad_accum_steps:8 +sdp_backends:cudnn=False flash=True mem_efficient=False math=False +attention_mode:gqa num_heads:8 num_kv_heads:4 +tie_embeddings:True embed_lr:0.03 head_lr:0.0 matrix_lr:0.022 scalar_lr:0.02 +train_batch_tokens:524288 train_seq_len:2048 iterations:5000 warmup_steps:20 max_wallclock_seconds:0.000 +seed:42 +warmup_step:1/20 +warmup_step:2/20 +warmup_step:3/20 +warmup_step:4/20 +warmup_step:5/20 +warmup_step:6/20 +warmup_step:7/20 +warmup_step:8/20 +warmup_step:9/20 +warmup_step:10/20 +warmup_step:11/20 +warmup_step:12/20 +warmup_step:13/20 +warmup_step:14/20 +warmup_step:15/20 +warmup_step:16/20 +warmup_step:17/20 +warmup_step:18/20 +warmup_step:19/20 +warmup_step:20/20 +bigram_blend:enabled lambda=0.03 +step:0/5000 val_loss:7.9749 val_bpb:3.0873 train_time:0ms step_avg:0.02ms +step:1/5000 train_loss:9.0055 train_time:2227ms step_avg:2227.42ms +step:2/5000 train_loss:12.0595 train_time:2698ms step_avg:1348.95ms +step:3/5000 train_loss:8.9047 train_time:3175ms step_avg:1058.38ms +step:4/5000 train_loss:8.2363 train_time:3652ms step_avg:912.91ms +step:5/5000 train_loss:8.3311 train_time:4127ms step_avg:825.33ms +step:6/5000 train_loss:8.5817 train_time:4602ms step_avg:767.06ms +step:7/5000 train_loss:8.3936 train_time:5081ms step_avg:725.83ms +step:8/5000 train_loss:7.8929 train_time:5559ms step_avg:694.90ms +step:9/5000 train_loss:7.4203 train_time:6037ms step_avg:670.74ms +step:10/5000 train_loss:7.1254 train_time:6514ms step_avg:651.41ms +step:500/5000 train_loss:3.4080 train_time:240909ms step_avg:481.82ms +step:1000/5000 train_loss:3.3411 train_time:480513ms step_avg:480.51ms +step:1500/5000 train_loss:3.3344 train_time:719894ms step_avg:479.93ms +moe:upcycled frac:0.300 layers:[4, 5] experts:2 +/data/users/maxiv25/parameter-golf/.venv/lib/python3.11/site-packages/torch/_inductor/lowering.py:7836: UserWarning: +Online softmax is disabled on the fly since Inductor decides to +split the reduction. Cut an issue to PyTorch if this is an +important use case and you want to speed it up with online +softmax. + + warnings.warn( +loop:activated frac:0.350 enc:[0, 1, 2, 3, 4, 5, 3] dec:[4, 5, 3, 4, 5, 6, 7, 8] +/data/users/maxiv25/parameter-golf/.venv/lib/python3.11/site-packages/torch/_inductor/lowering.py:7836: UserWarning: +Online softmax is disabled on the fly since Inductor decides to +split the reduction. Cut an issue to PyTorch if this is an +important use case and you want to speed it up with online +softmax. + + warnings.warn( +step:2000/5000 train_loss:3.2514 train_time:1148214ms step_avg:574.11ms +step:2500/5000 train_loss:3.1740 train_time:1601844ms step_avg:640.74ms +step:3000/5000 train_loss:3.1046 train_time:2054922ms step_avg:684.97ms +step:3500/5000 train_loss:3.0186 train_time:2508431ms step_avg:716.69ms +step:4000/5000 train_loss:2.9809 train_time:2961344ms step_avg:740.34ms +/data/users/maxiv25/parameter-golf/.venv/lib/python3.11/site-packages/torch/_inductor/lowering.py:7836: UserWarning: +Online softmax is disabled on the fly since Inductor decides to +split the reduction. Cut an issue to PyTorch if this is an +important use case and you want to speed it up with online +softmax. + + warnings.warn( +step:4500/5000 train_loss:3.0072 train_time:3471175ms step_avg:771.37ms +step:5000/5000 train_loss:2.9481 train_time:3924179ms step_avg:784.84ms +bigram_blend:enabled lambda=0.03 +step:5000/5000 val_loss:2.8651 val_bpb:1.1092 train_time:3924180ms step_avg:784.84ms +peak memory allocated: 28135 MiB reserved: 34924 MiB +Serialized model: 129901107 bytes +Code size: 75236 bytes +Total submission size: 129976343 bytes +Applying EMA weights. +gptq:collecting Hessians from 16 calibration batches... +gptq:collected 58 Hessians in 2.9s +Serialized model int6+lzma: 14893968 bytes (payload:34938360 raw_torch:34990635 payload_ratio:3.72x) +Total submission size int6+lzma: 14969204 bytes +bigram_blend:enabled lambda=0.03 +final_int6_lzma_roundtrip val_loss:3.4529 val_bpb:1.3367 eval_time:70248ms +final_int6_lzma_roundtrip_exact val_loss:3.45288030 val_bpb:1.33671757 diff --git a/moe_9L_seed0.log b/moe_9L_seed0.log new file mode 100644 index 0000000000..b25e022f35 --- /dev/null +++ b/moe_9L_seed0.log @@ -0,0 +1,69 @@ +logs/f65c3dc4-14fa-4afd-a1b1-d814ecf3e9b2.txt +val_bpb:enabled tokenizer_kind=sentencepiece tokenizer_path=./data/tokenizers/fineweb_8192_bpe.model +train_loader:dataset:fineweb10B_sp8192 train_shards:5 +val_loader:shards pattern=./data/datasets/fineweb10B_sp8192/fineweb_val_*.bin tokens:40540160 +model_params:30500020 +world_size:1 grad_accum_steps:8 +sdp_backends:cudnn=False flash=True mem_efficient=False math=False +attention_mode:gqa num_heads:8 num_kv_heads:4 +tie_embeddings:True embed_lr:0.03 head_lr:0.0 matrix_lr:0.022 scalar_lr:0.02 +train_batch_tokens:524288 train_seq_len:2048 iterations:5000 warmup_steps:20 max_wallclock_seconds:0.000 +seed:0 +warmup_step:1/20 +warmup_step:2/20 +warmup_step:3/20 +warmup_step:4/20 +warmup_step:5/20 +warmup_step:6/20 +warmup_step:7/20 +warmup_step:8/20 +warmup_step:9/20 +warmup_step:10/20 +warmup_step:11/20 +warmup_step:12/20 +warmup_step:13/20 +warmup_step:14/20 +warmup_step:15/20 +warmup_step:16/20 +warmup_step:17/20 +warmup_step:18/20 +warmup_step:19/20 +warmup_step:20/20 +bigram_blend:enabled lambda=0.03 +step:0/5000 val_loss:7.9755 val_bpb:3.0876 train_time:0ms step_avg:0.02ms +step:1/5000 train_loss:9.0071 train_time:1765ms step_avg:1764.93ms +step:2/5000 train_loss:12.0765 train_time:2242ms step_avg:1120.82ms +step:3/5000 train_loss:8.8985 train_time:2722ms step_avg:907.29ms +step:4/5000 train_loss:8.2204 train_time:3203ms step_avg:800.63ms +step:5/5000 train_loss:8.3057 train_time:3684ms step_avg:736.89ms +step:6/5000 train_loss:8.5536 train_time:4163ms step_avg:693.83ms +step:7/5000 train_loss:8.3750 train_time:4649ms step_avg:664.09ms +step:8/5000 train_loss:7.9176 train_time:5128ms step_avg:641.03ms +step:9/5000 train_loss:7.5210 train_time:5607ms step_avg:623.01ms +step:10/5000 train_loss:7.1839 train_time:6090ms step_avg:608.99ms +step:500/5000 train_loss:3.4036 train_time:243114ms step_avg:486.23ms +step:1000/5000 train_loss:3.3408 train_time:485017ms step_avg:485.02ms +step:1500/5000 train_loss:3.3339 train_time:727264ms step_avg:484.84ms +moe:upcycled frac:0.300 layers:[4, 5] experts:2 +loop:activated frac:0.350 enc:[0, 1, 2, 3, 4, 5, 3] dec:[4, 5, 3, 4, 5, 6, 7, 8] +step:2000/5000 train_loss:3.2493 train_time:1093182ms step_avg:546.59ms +step:2500/5000 train_loss:3.1728 train_time:1550737ms step_avg:620.29ms +step:3000/5000 train_loss:3.1053 train_time:2007804ms step_avg:669.27ms +step:3500/5000 train_loss:3.0198 train_time:2465082ms step_avg:704.31ms +step:4000/5000 train_loss:2.9812 train_time:2921789ms step_avg:730.45ms +step:4500/5000 train_loss:3.0061 train_time:3397563ms step_avg:755.01ms +step:5000/5000 train_loss:2.9486 train_time:3854587ms step_avg:770.92ms +bigram_blend:enabled lambda=0.03 +step:5000/5000 val_loss:2.8643 val_bpb:1.1089 train_time:3854587ms step_avg:770.92ms +peak memory allocated: 28135 MiB reserved: 34888 MiB +Serialized model: 129901107 bytes +Code size: 75236 bytes +Total submission size: 129976343 bytes +Applying EMA weights. +gptq:collecting Hessians from 16 calibration batches... +gptq:collected 58 Hessians in 4.5s +Serialized model int6+lzma: 14952076 bytes (payload:34938360 raw_torch:34990635 payload_ratio:3.72x) +Total submission size int6+lzma: 15027312 bytes +bigram_blend:enabled lambda=0.03 +final_int6_lzma_roundtrip val_loss:3.4163 val_bpb:1.3226 eval_time:70410ms +final_int6_lzma_roundtrip_exact val_loss:3.41628675 val_bpb:1.32255106 diff --git a/moe_9L_seed314.log b/moe_9L_seed314.log new file mode 100644 index 0000000000..8a62e89ffe --- /dev/null +++ b/moe_9L_seed314.log @@ -0,0 +1,76 @@ +logs/073d0695-e326-4a44-8839-d3e4ffb69098.txt +val_bpb:enabled tokenizer_kind=sentencepiece tokenizer_path=./data/tokenizers/fineweb_8192_bpe.model +train_loader:dataset:fineweb10B_sp8192 train_shards:5 +val_loader:shards pattern=./data/datasets/fineweb10B_sp8192/fineweb_val_*.bin tokens:40540160 +model_params:30500020 +world_size:1 grad_accum_steps:8 +sdp_backends:cudnn=False flash=True mem_efficient=False math=False +attention_mode:gqa num_heads:8 num_kv_heads:4 +tie_embeddings:True embed_lr:0.03 head_lr:0.0 matrix_lr:0.022 scalar_lr:0.02 +train_batch_tokens:524288 train_seq_len:2048 iterations:5000 warmup_steps:20 max_wallclock_seconds:0.000 +seed:314 +warmup_step:1/20 +warmup_step:2/20 +warmup_step:3/20 +warmup_step:4/20 +warmup_step:5/20 +warmup_step:6/20 +warmup_step:7/20 +warmup_step:8/20 +warmup_step:9/20 +warmup_step:10/20 +warmup_step:11/20 +warmup_step:12/20 +warmup_step:13/20 +warmup_step:14/20 +warmup_step:15/20 +warmup_step:16/20 +warmup_step:17/20 +warmup_step:18/20 +warmup_step:19/20 +warmup_step:20/20 +bigram_blend:enabled lambda=0.03 +step:0/5000 val_loss:7.9752 val_bpb:3.0875 train_time:0ms step_avg:0.02ms +step:1/5000 train_loss:9.0068 train_time:1834ms step_avg:1834.21ms +step:2/5000 train_loss:12.1178 train_time:2308ms step_avg:1153.99ms +step:3/5000 train_loss:8.8683 train_time:2785ms step_avg:928.42ms +step:4/5000 train_loss:8.2249 train_time:3269ms step_avg:817.16ms +step:5/5000 train_loss:8.2860 train_time:3760ms step_avg:751.91ms +step:6/5000 train_loss:8.4848 train_time:4237ms step_avg:706.15ms +step:7/5000 train_loss:8.2269 train_time:4714ms step_avg:673.49ms +step:8/5000 train_loss:7.7811 train_time:5192ms step_avg:649.05ms +step:9/5000 train_loss:7.3688 train_time:5670ms step_avg:630.03ms +step:10/5000 train_loss:7.1103 train_time:6149ms step_avg:614.86ms +step:500/5000 train_loss:3.4147 train_time:242442ms step_avg:484.88ms +step:1000/5000 train_loss:3.3501 train_time:483833ms step_avg:483.83ms +step:1500/5000 train_loss:3.3359 train_time:725289ms step_avg:483.53ms +moe:upcycled frac:0.300 layers:[4, 5] experts:2 +loop:activated frac:0.350 enc:[0, 1, 2, 3, 4, 5, 3] dec:[4, 5, 3, 4, 5, 6, 7, 8] +step:2000/5000 train_loss:3.2514 train_time:1100487ms step_avg:550.24ms +step:2500/5000 train_loss:3.1749 train_time:1555082ms step_avg:622.03ms +step:3000/5000 train_loss:3.1061 train_time:2008848ms step_avg:669.62ms +step:3500/5000 train_loss:3.0205 train_time:2463146ms step_avg:703.76ms +step:4000/5000 train_loss:2.9836 train_time:2916879ms step_avg:729.22ms +/data/users/maxiv25/parameter-golf/.venv/lib/python3.11/site-packages/torch/_inductor/lowering.py:7836: UserWarning: +Online softmax is disabled on the fly since Inductor decides to +split the reduction. Cut an issue to PyTorch if this is an +important use case and you want to speed it up with online +softmax. + + warnings.warn( +step:4500/5000 train_loss:3.0111 train_time:3427400ms step_avg:761.64ms +step:5000/5000 train_loss:2.9506 train_time:3881539ms step_avg:776.31ms +bigram_blend:enabled lambda=0.03 +step:5000/5000 val_loss:2.8660 val_bpb:1.1095 train_time:3881539ms step_avg:776.31ms +peak memory allocated: 28135 MiB reserved: 34888 MiB +Serialized model: 129901107 bytes +Code size: 75236 bytes +Total submission size: 129976343 bytes +Applying EMA weights. +gptq:collecting Hessians from 16 calibration batches... +gptq:collected 58 Hessians in 3.4s +Serialized model int6+lzma: 14982264 bytes (payload:34938360 raw_torch:34990635 payload_ratio:3.72x) +Total submission size int6+lzma: 15057500 bytes +bigram_blend:enabled lambda=0.03 +final_int6_lzma_roundtrip val_loss:3.5358 val_bpb:1.3688 eval_time:70282ms +final_int6_lzma_roundtrip_exact val_loss:3.53581238 val_bpb:1.36882316 diff --git a/train_gpt_exp4_moe.py b/train_gpt_exp4_moe.py new file mode 100644 index 0000000000..ac6164935d --- /dev/null +++ b/train_gpt_exp4_moe.py @@ -0,0 +1,1505 @@ +from __future__ import annotations + +import copy +import glob +import io +import math +import os +import random +import subprocess +import sys +import time +import uuid +import zlib +import lzma +from pathlib import Path + +import numpy as np +import sentencepiece as spm +import torch +import torch.distributed as dist +import torch.nn.functional as F +from torch import Tensor, nn +from torch.nn.parallel import DistributedDataParallel as DDP + +class Hyperparameters: + data_path = os.environ.get("DATA_PATH", "./data/datasets/fineweb10B_sp8192") + train_files = os.path.join(data_path, "fineweb_train_*.bin") + val_files = os.path.join(data_path, "fineweb_val_*.bin") + tokenizer_path = os.environ.get("TOKENIZER_PATH", "./data/tokenizers/fineweb_8192_bpe.model") + run_id = os.environ.get("RUN_ID", str(uuid.uuid4())) + seed = int(os.environ.get("SEED", 1337)) + + val_batch_size = int(os.environ.get("VAL_BATCH_SIZE", 524_288)) + val_loss_every = int(os.environ.get("VAL_LOSS_EVERY", 1000)) + train_log_every = int(os.environ.get("TRAIN_LOG_EVERY", 200)) + + iterations = int(os.environ.get("ITERATIONS", 20000)) + warmup_steps = int(os.environ.get("WARMUP_STEPS", 20)) + train_batch_tokens = int(os.environ.get("TRAIN_BATCH_TOKENS", 524_288)) + train_seq_len = int(os.environ.get("TRAIN_SEQ_LEN", 2048)) + eval_stride = int(os.environ.get("EVAL_STRIDE", train_seq_len)) + max_wallclock_seconds = float(os.environ.get("MAX_WALLCLOCK_SECONDS", 600.0)) + qk_gain_init = float(os.environ.get("QK_GAIN_INIT", 5.25)) + + vocab_size = int(os.environ.get("VOCAB_SIZE", 8192)) + num_layers = int(os.environ.get("NUM_LAYERS", 11)) + num_kv_heads = int(os.environ.get("NUM_KV_HEADS", 4)) + model_dim = int(os.environ.get("MODEL_DIM", 512)) + num_heads = int(os.environ.get("NUM_HEADS", 8)) + mlp_mult = int(os.environ.get("MLP_MULT", 4)) + num_loops = int(os.environ.get("NUM_LOOPS", 2)) + loop_start = int(os.environ.get("LOOP_START", 3)) + loop_end = int(os.environ.get("LOOP_END", 5)) + enable_looping_at = float(os.environ.get("ENABLE_LOOPING_AT", 0.35)) + parallel_start_layer = int(os.environ.get("PARALLEL_START_LAYER", 7)) + tie_embeddings = bool(int(os.environ.get("TIE_EMBEDDINGS", "1"))) + rope_base = float(os.environ.get("ROPE_BASE", 10000.0)) + logit_softcap = float(os.environ.get("LOGIT_SOFTCAP", 30.0)) + + embed_lr = float(os.environ.get("EMBED_LR", 0.6)) + head_lr = float(os.environ.get("HEAD_LR", 0.008)) + tied_embed_lr = float(os.environ.get("TIED_EMBED_LR", 0.03)) + tied_embed_init_std = float(os.environ.get("TIED_EMBED_INIT_STD", 0.005)) + matrix_lr = float(os.environ.get("MATRIX_LR", 0.022)) + scalar_lr = float(os.environ.get("SCALAR_LR", 0.02)) + muon_momentum = float(os.environ.get("MUON_MOMENTUM", 0.99)) + muon_backend_steps = int(os.environ.get("MUON_BACKEND_STEPS", 5)) + muon_momentum_warmup_start = float(os.environ.get("MUON_MOMENTUM_WARMUP_START", 0.92)) + muon_momentum_warmup_steps = int(os.environ.get("MUON_MOMENTUM_WARMUP_STEPS", 1500)) + beta1 = float(os.environ.get("BETA1", 0.9)) + beta2 = float(os.environ.get("BETA2", 0.95)) + adam_eps = float(os.environ.get("ADAM_EPS", 1e-8)) + grad_clip_norm = float(os.environ.get("GRAD_CLIP_NORM", 0.3)) + warmdown_frac = float(os.environ.get("WARMDOWN_FRAC", 0.72)) + ema_decay = float(os.environ.get("EMA_DECAY", 0.9965)) + muon_wd = float(os.environ.get("MUON_WD", 0.095)) + embed_wd = float(os.environ.get("EMBED_WD", 0.085)) + min_lr = float(os.environ.get("MIN_LR", 0.10)) + bigram_buckets = int(os.environ.get("BIGRAM_BUCKETS", 2048)) + bigram_dim = int(os.environ.get("BIGRAM_DIM", 128)) + gate_window = int(os.environ.get("GATE_WINDOW", 12)) + bigram_blend_enabled = bool(int(os.environ.get("BIGRAM_BLEND_ENABLED", "0"))) + bigram_blend_lambda = float(os.environ.get("BIGRAM_BLEND_LAMBDA", 0.03)) + moe_start = int(os.environ.get("MOE_START", 5)) + moe_end = int(os.environ.get("MOE_END", 7)) + moe_num_experts = int(os.environ.get("MOE_NUM_EXPERTS", 2)) + moe_aux_weight = float(os.environ.get("MOE_AUX_WEIGHT", 0.01)) + enable_moe_at = float(os.environ.get("ENABLE_MOE_AT", 0.30)) + gptq_calib_batches = int(os.environ.get("GPTQ_CALIB_BATCHES", 64)) + +def zeropower_via_newtonschulz5(G: Tensor, steps: int = 10, eps: float = 1e-7) -> Tensor: + a, b, c = (3.4445, -4.7750, 2.0315) + X = G.bfloat16() + X /= X.norm() + eps + transposed = G.size(0) > G.size(1) + if transposed: + X = X.T + for _ in range(steps): + A = X @ X.T + B = b * A + c * A @ A + X = a * X + B @ X + return X.T if transposed else X + +class Muon(torch.optim.Optimizer): + def __init__(self, params, lr: float, momentum: float, backend_steps: int, + nesterov: bool = True, weight_decay: float = 0.0, row_normalize: bool = False): + super().__init__( + params, + dict(lr=lr, momentum=momentum, backend_steps=backend_steps, + nesterov=nesterov, weight_decay=weight_decay, row_normalize=row_normalize), + ) + + @torch.no_grad() + def step(self, closure=None): + loss = None + if closure is not None: + with torch.enable_grad(): + loss = closure() + + distributed = dist.is_available() and dist.is_initialized() + world_size = dist.get_world_size() if distributed else 1 + rank = dist.get_rank() if distributed else 0 + + for group in self.param_groups: + params = group["params"] + if not params: + continue + lr = group["lr"] + momentum = group["momentum"] + backend_steps = group["backend_steps"] + nesterov = group["nesterov"] + + total_params = sum(int(p.numel()) for p in params) + updates_flat = torch.zeros(total_params, device=params[0].device, dtype=torch.bfloat16) + + curr = 0 + for i, p in enumerate(params): + if i % world_size == rank and p.grad is not None: + g = p.grad + state = self.state[p] + if "momentum_buffer" not in state: + state["momentum_buffer"] = torch.zeros_like(g) + buf = state["momentum_buffer"] + buf.mul_(momentum).add_(g) + if nesterov: + g = g.add(buf, alpha=momentum) + if group.get("row_normalize", False): + row_norms = g.float().norm(dim=-1, keepdim=True).clamp_min(1e-7) + g = g / row_norms.to(g.dtype) + g = zeropower_via_newtonschulz5(g, steps=backend_steps) + g *= max(1, g.size(0) / g.size(1)) ** 0.5 + updates_flat[curr : curr + p.numel()] = g.reshape(-1) + curr += p.numel() + + if distributed: + dist.all_reduce(updates_flat, op=dist.ReduceOp.SUM) + + wd = group.get("weight_decay", 0.0) + curr = 0 + for p in params: + if wd > 0.0: + p.data.mul_(1.0 - lr * wd) + g = updates_flat[curr : curr + p.numel()].view_as(p).to(dtype=p.dtype) + p.add_(g, alpha=-lr) + curr += p.numel() + + return loss + +def build_sentencepiece_luts( + sp: spm.SentencePieceProcessor, vocab_size: int, device: torch.device +) -> tuple[Tensor, Tensor, Tensor]: + sp_vocab_size = int(sp.vocab_size()) + table_size = max(sp_vocab_size, vocab_size) + base_bytes_np = np.zeros((table_size,), dtype=np.int16) + has_leading_space_np = np.zeros((table_size,), dtype=np.bool_) + is_boundary_token_np = np.ones((table_size,), dtype=np.bool_) + for token_id in range(sp_vocab_size): + if sp.is_control(token_id) or sp.is_unknown(token_id) or sp.is_unused(token_id): + continue + is_boundary_token_np[token_id] = False + if sp.is_byte(token_id): + base_bytes_np[token_id] = 1 + continue + piece = sp.id_to_piece(token_id) + if piece.startswith("▁"): + has_leading_space_np[token_id] = True + piece = piece[1:] + base_bytes_np[token_id] = len(piece.encode("utf-8")) + return ( + torch.tensor(base_bytes_np, dtype=torch.int16, device=device), + torch.tensor(has_leading_space_np, dtype=torch.bool, device=device), + torch.tensor(is_boundary_token_np, dtype=torch.bool, device=device), + ) + +def load_validation_tokens(pattern: str, seq_len: int) -> Tensor: + files = [Path(p) for p in sorted(glob.glob(pattern))] + if not files: + raise FileNotFoundError(f"No files found for pattern: {pattern}") + # The export pipeline writes the fixed first-50k-doc validation set to fineweb_val_*. + tokens = torch.cat([load_data_shard(file) for file in files]).contiguous() + usable = ((tokens.numel() - 1) // seq_len) * seq_len + if usable <= 0: + raise ValueError(f"Validation split is too short for TRAIN_SEQ_LEN={seq_len}") + return tokens[: usable + 1] + +def _forward_logits(model: nn.Module, x: Tensor, x0: Tensor = None) -> Tensor: + m = model.module if hasattr(model, 'module') else model + xe = m.tok_emb(x) + if m.bigram_emb is not None: + prev_ids = F.pad(x[:, :-1], (1, 0), value=0) + bigram_hash = ((prev_ids * 31 + x) % m.bigram_buckets).long() + xe = xe + m.bigram_proj(m.bigram_emb(bigram_hash)) + xe = F.rms_norm(xe, (xe.size(-1),)) + x0 = xe + h = xe + skips = [] + if m.looping_active: + enc_list = m.encoder_indices + dec_list = m.decoder_indices + else: + enc_list = list(range(m.num_encoder_layers)) + dec_list = list(range(m.num_encoder_layers, m.num_encoder_layers + m.num_decoder_layers)) + for idx in range(len(enc_list)): + i = enc_list[idx] + h = m.blocks[i](h, x0) + skips.append(h) + for skip_idx in range(len(dec_list)): + i = dec_list[skip_idx] + if skip_idx < m.num_skip_weights and len(skips) > 0: + scaled_skip = m.skip_weights[skip_idx].to(dtype=h.dtype)[None, None, :] * skips.pop() + g = torch.sigmoid(m.skip_gates[skip_idx].to(dtype=h.dtype))[None, None, :] + h = torch.lerp(scaled_skip, h, g) + h = m.blocks[i](h, x0) + h = m.final_norm(h) + if m.tie_embeddings: + logits = F.linear(h, m.tok_emb.weight) + else: + logits = m.lm_head(h) + logits = m.logit_softcap * torch.tanh(logits / m.logit_softcap) + return logits + +def eval_val( + args: Hyperparameters, + model: nn.Module, + rank: int, + world_size: int, + device: torch.device, + grad_accum_steps: int, + val_tokens: Tensor, + base_bytes_lut: Tensor, + has_leading_space_lut: Tensor, + is_boundary_token_lut: Tensor, + is_final_eval: bool = False, +) -> tuple[float, float]: + stride = getattr(args, 'eval_stride', args.train_seq_len) if is_final_eval else args.train_seq_len + if stride <= 0 or stride > args.train_seq_len: + stride = args.train_seq_len + + local_batch_tokens = args.val_batch_size // (world_size * grad_accum_steps) + if local_batch_tokens < args.train_seq_len: + raise ValueError( + "VAL_BATCH_SIZE must provide at least one sequence per rank; " + f"got VAL_BATCH_SIZE={args.val_batch_size}, WORLD_SIZE={world_size}, " + f"GRAD_ACCUM_STEPS={grad_accum_steps}, TRAIN_SEQ_LEN={args.train_seq_len}" + ) + local_batch_seqs = max(1, local_batch_tokens // args.train_seq_len) + total_tokens = val_tokens.numel() - 1 + total_starts = max(0, total_tokens - args.train_seq_len + stride) // stride + + seq_start = (total_starts * rank) // world_size + seq_end = (total_starts * (rank + 1)) // world_size + + val_loss_sum = torch.zeros((), device=device, dtype=torch.float64) + val_token_count = torch.zeros((), device=device, dtype=torch.float64) + val_byte_count = torch.zeros((), device=device, dtype=torch.float64) + + use_bigram = args.bigram_blend_enabled + if use_bigram: + V = args.vocab_size + bigram_counts = torch.zeros(V, V, device=device, dtype=torch.float32) + bigram_totals = torch.zeros(V, 1, device=device, dtype=torch.float32) + blend_lambda = args.bigram_blend_lambda + log0 = lambda msg: print(msg) if rank == 0 else None + log0(f"bigram_blend:enabled lambda={blend_lambda}") + + model.eval() + with torch.inference_mode(): + for batch_seq_start in range(seq_start, seq_end, local_batch_seqs): + batch_seq_end = min(batch_seq_start + local_batch_seqs, seq_end) + bsz = batch_seq_end - batch_seq_start + + x = torch.zeros((bsz, args.train_seq_len), dtype=torch.int64, device=device) + y = torch.zeros((bsz, args.train_seq_len), dtype=torch.int64, device=device) + for i in range(bsz): + raw_start = (batch_seq_start + i) * stride + raw_end = raw_start + args.train_seq_len + 1 + local = val_tokens[raw_start:raw_end].to(device=device, dtype=torch.int64, non_blocking=True) + actual_len = local.numel() - 1 + if actual_len < args.train_seq_len: + x[i, :actual_len] = local[:-1] + y[i, :actual_len] = local[1:] + else: + x[i] = local[:-1] + y[i] = local[1:] + + if use_bigram: + with torch.autocast(device_type="cuda", dtype=torch.bfloat16, enabled=True): + logits = _forward_logits(model, x).detach() + flat_logits = logits.reshape(-1, logits.size(-1)).float() + log_probs = F.log_softmax(flat_logits, dim=-1) + prev_flat = x.reshape(-1) + bg_rows = bigram_counts[prev_flat] + bg_tots = bigram_totals[prev_flat] + bg_log_probs = torch.log((bg_rows + 1.0) / (bg_tots + V)) + confidence = (bg_tots / (bg_tots + 10.0)).squeeze(-1) + lam_c = (blend_lambda * confidence).unsqueeze(-1) + blended = torch.logaddexp( + torch.log1p(-lam_c) + log_probs, + torch.log(lam_c + 1e-10) + bg_log_probs + ) + per_token_loss = F.nll_loss(blended, y.reshape(-1), reduction="none") + batch_losses = per_token_loss.view(bsz, args.train_seq_len) + bigram_counts[prev_flat, y.reshape(-1)] += 1.0 + bigram_totals[prev_flat] += 1.0 + else: + with torch.autocast(device_type="cuda", dtype=torch.bfloat16, enabled=True): + if hasattr(model, "module"): + batch_losses = model.module(x, y, reduction="none") + else: + batch_losses = model(x, y, reduction="none") + + for i in range(bsz): + seq_idx = batch_seq_start + i + eval_start = 0 if seq_idx == 0 else args.train_seq_len - stride + + loss_slice = batch_losses[i, eval_start:] + tgt_slice = y[i, eval_start:] + prev_slice = x[i, eval_start:] + + batch_token_count = float(tgt_slice.numel()) + val_loss_sum += loss_slice.sum().to(torch.float64) + val_token_count += batch_token_count + + token_bytes = base_bytes_lut[tgt_slice].to(dtype=torch.int16) + token_bytes += (has_leading_space_lut[tgt_slice] & ~is_boundary_token_lut[prev_slice]).to(dtype=torch.int16) + val_byte_count += token_bytes.to(torch.float64).sum() + + if dist.is_available() and dist.is_initialized(): + dist.all_reduce(val_loss_sum, op=dist.ReduceOp.SUM) + dist.all_reduce(val_token_count, op=dist.ReduceOp.SUM) + dist.all_reduce(val_byte_count, op=dist.ReduceOp.SUM) + + val_loss = val_loss_sum / val_token_count + bits_per_token = val_loss.item() / math.log(2.0) + tokens_per_byte = val_token_count.item() / val_byte_count.item() + model.train() + return float(val_loss.item()), float(bits_per_token * tokens_per_byte) + +CONTROL_TENSOR_NAME_PATTERNS = tuple( + pattern + for pattern in os.environ.get( + "CONTROL_TENSOR_NAME_PATTERNS", + "attn_scale,attn_scales,mlp_scale,mlp_scales,resid_mix,resid_mixes,q_gain,skip_weight,skip_weights", + ).split(",") + if pattern +) +INT8_KEEP_FLOAT_FP32_NAME_PATTERNS = tuple( + pattern + for pattern in os.environ.get( + "INT8_KEEP_FLOAT_FP32_NAME_PATTERNS", + ",".join(CONTROL_TENSOR_NAME_PATTERNS), + ).split(",") + if pattern +) +INT8_KEEP_FLOAT_MAX_NUMEL = 65_536 +INT8_KEEP_FLOAT_STORE_DTYPE = torch.float16 +INT8_PER_ROW_SCALE_DTYPE = torch.float16 +INT8_CLIP_PERCENTILE = 99.99984 +INT8_CLIP_Q = INT8_CLIP_PERCENTILE / 100.0 + +def tensor_nbytes(t: Tensor) -> int: + return int(t.numel()) * int(t.element_size()) + +def keep_float_tensor(name: str, t: Tensor, passthrough_orig_dtypes: dict[str, str]) -> Tensor: + if any(pattern in name for pattern in INT8_KEEP_FLOAT_FP32_NAME_PATTERNS): + return t.float().contiguous() + if t.dtype in {torch.float32, torch.bfloat16}: + passthrough_orig_dtypes[name] = str(t.dtype).removeprefix("torch.") + return t.to(dtype=INT8_KEEP_FLOAT_STORE_DTYPE).contiguous() + return t + +_GPTQ_CLIP_PERCENTILES = [0.9990, 0.9995, 0.9999, 0.99995, 1.0] + +def quantize_float_tensor(t: Tensor) -> tuple[Tensor, Tensor]: + t32 = t.float() + if t32.ndim == 2 and t32.numel() > 0: + best_q = None + best_scale = None + best_mse = None + for pct in _GPTQ_CLIP_PERCENTILES: + clip_abs = torch.quantile(t32.abs(), pct, dim=1) + clipped = torch.maximum(torch.minimum(t32, clip_abs[:, None]), -clip_abs[:, None]) + scale = (clip_abs / 31.0).clamp_min(1.0 / 31.0) + q = torch.clamp(torch.round(clipped / scale[:, None]), -31, 31) + # Reconstruction error per row + recon = q * scale[:, None] + mse = (recon - t32).pow(2).mean(dim=1) + if best_mse is None: + best_mse = mse + best_q = q + best_scale = scale + else: + improve = mse < best_mse + best_mse = torch.where(improve, mse, best_mse) + best_q = torch.where(improve[:, None], q, best_q) + best_scale = torch.where(improve, scale, best_scale) + return best_q.to(torch.int8).contiguous(), best_scale.to(dtype=INT8_PER_ROW_SCALE_DTYPE).contiguous() + + # Vectors / scalars use a simpler per-tensor scale. + clip_abs = float(torch.quantile(t32.abs().flatten(), INT8_CLIP_Q).item()) if t32.numel() else 0.0 + scale = torch.tensor(clip_abs / 31.0 if clip_abs > 0 else 1.0, dtype=torch.float32) + q = torch.clamp(torch.round(torch.clamp(t32, -clip_abs, clip_abs) / scale), -31, 31).to(torch.int8).contiguous() + return q, scale + +def collect_hessians( + model: nn.Module, + train_loader: "DistributedTokenLoader", + args: Hyperparameters, + device: torch.device, + grad_accum_steps: int, + n_batches: int = 64, +) -> dict[str, Tensor]: + hessians: dict[str, Tensor] = {} + hooks: list = [] + + def make_hook(name: str): + def hook_fn(module, inp, out): + x = inp[0].detach().float() + if x.ndim == 3: + x = x.reshape(-1, x.shape[-1]) + if name not in hessians: + hessians[name] = torch.zeros( + x.shape[1], x.shape[1], dtype=torch.float32, device=device + ) + hessians[name].addmm_(x.T, x) + return hook_fn + + # Register hooks on large CastedLinear layers (mlp + attn) + for name, module in model.named_modules(): + if isinstance(module, CastedLinear) and module.weight.numel() > INT8_KEEP_FLOAT_MAX_NUMEL: + hooks.append(module.register_forward_hook(make_hook(name + ".weight"))) + + model.eval() + with torch.no_grad(): + for _ in range(n_batches): + x, y = train_loader.next_batch( + args.train_batch_tokens, args.train_seq_len, grad_accum_steps + ) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16, enabled=True): + model(x, y) + + for h in hooks: + h.remove() + + # Average and move to CPU + for name in hessians: + hessians[name] = hessians[name].cpu() / n_batches + + model.train() + return hessians + +def gptq_quantize_weight( + w: Tensor, + H: Tensor, + clip_range: int = 31, + block_size: int = 128, +) -> tuple[Tensor, Tensor]: + W_orig = w.float().clone() + rows, cols = W_orig.shape + H = H.float().clone() + + # Zero out dead columns and add damping + dead = torch.diag(H) == 0 + H[dead, dead] = 1 + damp = 0.01 * H.diag().mean() + H.diagonal().add_(damp) + + # Column reordering by descending Hessian diagonal (actorder) + perm = torch.argsort(H.diag(), descending=True) + invperm = torch.argsort(perm) + W_perm = W_orig[:, perm].clone() + W_perm[:, dead[perm]] = 0 + H = H[perm][:, perm] + + # Upper Cholesky of the inverse + try: + Hinv = torch.cholesky_inverse(torch.linalg.cholesky(H)) + Hinv = torch.linalg.cholesky(Hinv, upper=True) + except torch.linalg.LinAlgError: + return quantize_float_tensor(W_orig) + + best_q, best_scale, best_err = None, None, float('inf') + for pct in _GPTQ_CLIP_PERCENTILES: + if pct < 1.0: + row_clip = torch.quantile(W_orig.abs(), pct, dim=1) + else: + row_clip = W_orig.abs().amax(dim=1) + s = (row_clip / clip_range).clamp_min(1.0 / clip_range).to(torch.float16) + sf = s.float() + + Q = torch.zeros(rows, cols, dtype=torch.int8) + W_work = W_perm.clone() + for i1 in range(0, cols, block_size): + i2 = min(i1 + block_size, cols) + W_block = W_work[:, i1:i2].clone() + Hinv_block = Hinv[i1:i2, i1:i2] + Err = torch.zeros(rows, i2 - i1) + for j in range(i2 - i1): + w_col = W_block[:, j] + d = Hinv_block[j, j] + q_col = torch.clamp(torch.round(w_col / sf), -clip_range, clip_range) + Q[:, i1 + j] = q_col.to(torch.int8) + err = (w_col - q_col.float() * sf) / d + Err[:, j] = err + W_block[:, j:] -= err.unsqueeze(1) * Hinv_block[j, j:].unsqueeze(0) + if i2 < cols: + W_work[:, i2:] -= Err @ Hinv[i1:i2, i2:] + + recon = Q.float() * sf[:, None] + mse = (W_perm - recon).pow(2).mean().item() + if mse < best_err: + best_q, best_scale, best_err = Q, s, mse + + return best_q[:, invperm].contiguous(), best_scale.contiguous() + +def fake_quantize_int6(w: Tensor) -> Tensor: + with torch.no_grad(): + if w.ndim == 2 and w.numel() > 0: + clip_abs = torch.quantile(w.abs().float(), INT8_CLIP_Q, dim=1) + scale = (clip_abs / 31.0).clamp_min(1.0 / 31.0) + q = torch.clamp(torch.round(w.float() / scale[:, None]), -31, 31) + w_q = (q * scale[:, None]).to(w.dtype) + else: + clip_abs = torch.quantile(w.abs().float().flatten(), INT8_CLIP_Q) if w.numel() else torch.tensor(0.0) + scale = (clip_abs / 31.0).clamp_min(1.0 / 31.0) + q = torch.clamp(torch.round(w.float() / scale), -31, 31) + w_q = (q * scale).to(w.dtype) + # STE: gradient flows through as if no quantization happened + return w + (w_q - w).detach() + +def quantize_state_dict_int8(state_dict: dict[str, Tensor], hessians: dict[str, Tensor] | None = None): + # Single supported clean-script export format: + # - per-row int8 for 2D float tensors + # - per-tensor int8 for other float tensors + # - exact passthrough for non-floats + # - passthrough for small float tensors, stored as fp16 to save bytes + quantized: dict[str, Tensor] = {} + scales: dict[str, Tensor] = {} + dtypes: dict[str, str] = {} + passthrough: dict[str, Tensor] = {} + passthrough_orig_dtypes: dict[str, str] = {} + qmeta: dict[str, dict[str, object]] = {} + stats = dict.fromkeys( + ("param_count", "num_tensors", "num_float_tensors", "num_nonfloat_tensors", "baseline_tensor_bytes", "int8_payload_bytes"), + 0, + ) + + for name, tensor in state_dict.items(): + t = tensor.detach().to("cpu").contiguous() + stats["param_count"] += int(t.numel()) + stats["num_tensors"] += 1 + stats["baseline_tensor_bytes"] += tensor_nbytes(t) + + if not t.is_floating_point(): + stats["num_nonfloat_tensors"] += 1 + passthrough[name] = t + stats["int8_payload_bytes"] += tensor_nbytes(t) + continue + + # Small float tensors are cheap enough to keep directly. We still downcast + # fp32/bf16 passthrough tensors to fp16 so metadata does not dominate size. + if t.numel() <= INT8_KEEP_FLOAT_MAX_NUMEL: + kept = keep_float_tensor(name, t, passthrough_orig_dtypes) + passthrough[name] = kept + stats["int8_payload_bytes"] += tensor_nbytes(kept) + continue + + stats["num_float_tensors"] += 1 + if hessians is not None and name in hessians and t.ndim == 2: + q, s = gptq_quantize_weight(t, hessians[name]) + else: + q, s = quantize_float_tensor(t) + if s.ndim > 0: + qmeta[name] = {"scheme": "per_row", "axis": 0} + quantized[name] = q + scales[name] = s + dtypes[name] = str(t.dtype).removeprefix("torch.") + stats["int8_payload_bytes"] += tensor_nbytes(q) + tensor_nbytes(s) + + obj: dict[str, object] = { + "__quant_format__": "int6_clean_per_row_v1", + "quantized": quantized, + "scales": scales, + "dtypes": dtypes, + "passthrough": passthrough, + } + if qmeta: + obj["qmeta"] = qmeta + if passthrough_orig_dtypes: + obj["passthrough_orig_dtypes"] = passthrough_orig_dtypes + return obj, stats + +def dequantize_state_dict_int8(obj: dict[str, object]) -> dict[str, Tensor]: + out: dict[str, Tensor] = {} + qmeta = obj.get("qmeta", {}) + passthrough_orig_dtypes = obj.get("passthrough_orig_dtypes", {}) + for name, q in obj["quantized"].items(): + dtype = getattr(torch, obj["dtypes"][name]) + s = obj["scales"][name] + if qmeta.get(name, {}).get("scheme") == "per_row" or s.ndim > 0: + s = s.to(dtype=torch.float32) + # Broadcast the saved row scale back across trailing dimensions. + out[name] = (q.float() * s.view(q.shape[0], *([1] * (q.ndim - 1)))).to(dtype=dtype).contiguous() + else: + scale = float(s.item()) + out[name] = (q.float() * scale).to(dtype=dtype).contiguous() + for name, t in obj["passthrough"].items(): + # Restore small tensors, undoing the temporary fp16 storage cast if needed. + out_t = t.detach().to("cpu").contiguous() + orig_dtype = passthrough_orig_dtypes.get(name) + if isinstance(orig_dtype, str): + out_t = out_t.to(dtype=getattr(torch, orig_dtype)).contiguous() + out[name] = out_t + return out + +def load_data_shard(file: Path) -> Tensor: + header_bytes = 256 * np.dtype(" None: + self.file_idx = (self.file_idx + 1) % len(self.files) + self.tokens = load_data_shard(self.files[self.file_idx]) + self.pos = 0 + + def take(self, n: int) -> Tensor: + chunks: list[Tensor] = [] + remaining = n + while remaining > 0: + avail = self.tokens.numel() - self.pos + if avail <= 0: + self._advance_file() + continue + k = min(remaining, avail) + chunks.append(self.tokens[self.pos : self.pos + k]) + self.pos += k + remaining -= k + return chunks[0] if len(chunks) == 1 else torch.cat(chunks) + +class DistributedTokenLoader: + # Each call consumes a contiguous chunk from the shared token stream, then slices out + # one disjoint span per rank. The extra "+1" token lets us build (x, y) by shifting. + def __init__(self, pattern: str, rank: int, world_size: int, device: torch.device): + self.rank = rank + self.world_size = world_size + self.device = device + self.stream = TokenStream(pattern) + + def next_batch(self, global_tokens: int, seq_len: int, grad_accum_steps: int) -> tuple[Tensor, Tensor]: + local_tokens = global_tokens // (self.world_size * grad_accum_steps) + per_rank_span = local_tokens + 1 + chunk = self.stream.take(per_rank_span * self.world_size) + start = self.rank * per_rank_span + local = chunk[start : start + per_rank_span].to(dtype=torch.int64) + x = local[:-1].reshape(-1, seq_len) + y = local[1:].reshape(-1, seq_len) + return x.to(self.device, non_blocking=True), y.to(self.device, non_blocking=True) + +class RMSNorm(nn.Module): + def __init__(self, eps: float | None = None): + super().__init__() + self.eps = eps + + def forward(self, x: Tensor) -> Tensor: + return F.rms_norm(x, (x.size(-1),), eps=self.eps) + +class CastedLinear(nn.Linear): + # Keep weights in fp32 for optimizer/state quality, cast at matmul time for bf16 compute. + _qat_enabled: bool = False + + def forward(self, x: Tensor) -> Tensor: + w = self.weight.to(dtype=x.dtype) + if self._qat_enabled and self.training and self.weight.numel() > INT8_KEEP_FLOAT_MAX_NUMEL: + w = fake_quantize_int6(w) + bias = self.bias.to(x.dtype) if self.bias is not None else None + return F.linear(x, w, bias) + +def restore_low_dim_params_to_fp32(module: nn.Module) -> None: + # Keep small/control parameters in fp32 even when the model body runs in bf16. + with torch.no_grad(): + for name, param in module.named_parameters(): + if (param.ndim < 2 or any(pattern in name for pattern in CONTROL_TENSOR_NAME_PATTERNS)) and param.dtype != torch.float32: + param.data = param.data.float() + +class Rotary(nn.Module): + # Caches cos/sin tables per sequence length on the current device. + def __init__(self, dim: int, base: float = 10000.0): + super().__init__() + inv_freq = 1.0 / (base ** (torch.arange(0, dim, 2, dtype=torch.float32) / dim)) + self.register_buffer("inv_freq", inv_freq, persistent=False) + self._seq_len_cached = 0 + self._cos_cached: Tensor | None = None + self._sin_cached: Tensor | None = None + + def forward(self, seq_len: int, device: torch.device, dtype: torch.dtype) -> tuple[Tensor, Tensor]: + if ( + self._cos_cached is None + or self._sin_cached is None + or self._seq_len_cached != seq_len + or self._cos_cached.device != device + ): + t = torch.arange(seq_len, device=device, dtype=self.inv_freq.dtype) + freqs = torch.outer(t, self.inv_freq.to(device)) + self._cos_cached = freqs.cos()[None, None, :, :] + self._sin_cached = freqs.sin()[None, None, :, :] + self._seq_len_cached = seq_len + return self._cos_cached.to(dtype=dtype), self._sin_cached.to(dtype=dtype) + +def apply_rotary_emb(x: Tensor, cos: Tensor, sin: Tensor, rope_dims: int = 0) -> Tensor: + if rope_dims > 0 and rope_dims < x.size(-1): + # Partial RoPE: only rotate first rope_dims dimensions + x_rope = x[..., :rope_dims] + x_pass = x[..., rope_dims:] + half = rope_dims // 2 + x1, x2 = x_rope[..., :half], x_rope[..., half:] + x_rope = torch.cat((x1 * cos + x2 * sin, x1 * (-sin) + x2 * cos), dim=-1) + return torch.cat((x_rope, x_pass), dim=-1) + half = x.size(-1) // 2 + x1, x2 = x[..., :half], x[..., half:] + return torch.cat((x1 * cos + x2 * sin, x1 * (-sin) + x2 * cos), dim=-1) + +class CausalSelfAttention(nn.Module): + def __init__( + self, + dim: int, + num_heads: int, + num_kv_heads: int, + rope_base: float, + qk_gain_init: float, + ): + super().__init__() + if dim % num_heads != 0: + raise ValueError("model_dim must be divisible by num_heads") + if num_heads % num_kv_heads != 0: + raise ValueError("num_heads must be divisible by num_kv_heads") + self.num_heads = num_heads + self.num_kv_heads = num_kv_heads + self.head_dim = dim // num_heads + if self.head_dim % 2 != 0: + raise ValueError("head_dim must be even for RoPE") + # Partial RoPE: only first rope_dims dimensions get positional encoding + self.rope_dims = min(16, self.head_dim) + kv_dim = self.num_kv_heads * self.head_dim + self.c_q = CastedLinear(dim, dim, bias=False) + self.c_k = CastedLinear(dim, kv_dim, bias=False) + self.c_v = CastedLinear(dim, kv_dim, bias=False) + self.proj = CastedLinear(dim, dim, bias=False) + self.proj._zero_init = True + self.q_gain = nn.Parameter(torch.full((num_heads,), qk_gain_init, dtype=torch.float32)) + self.rotary = Rotary(self.rope_dims, base=rope_base) + + def forward(self, x: Tensor) -> Tensor: + bsz, seqlen, dim = x.shape + q = self.c_q(x).reshape(bsz, seqlen, self.num_heads, self.head_dim).transpose(1, 2) + k = self.c_k(x).reshape(bsz, seqlen, self.num_kv_heads, self.head_dim).transpose(1, 2) + v = self.c_v(x).reshape(bsz, seqlen, self.num_kv_heads, self.head_dim).transpose(1, 2) + q = F.rms_norm(q, (q.size(-1),)) + k = F.rms_norm(k, (k.size(-1),)) + cos, sin = self.rotary(seqlen, x.device, q.dtype) + q = apply_rotary_emb(q, cos, sin, rope_dims=self.rope_dims) + k = apply_rotary_emb(k, cos, sin, rope_dims=self.rope_dims) + q = q * self.q_gain.to(dtype=q.dtype)[None, :, None, None] + y = F.scaled_dot_product_attention( + q, + k, + v, + attn_mask=None, + is_causal=True, + enable_gqa=(self.num_kv_heads != self.num_heads), + ) + if self.num_kv_heads != self.num_heads: + repeats = self.num_heads // self.num_kv_heads + v_expanded = v.repeat_interleave(repeats, dim=1) + else: + v_expanded = v + vn = F.normalize(v_expanded, dim=-1) + y = y - (y * vn).sum(dim=-1, keepdim=True) * vn + y = y.transpose(1, 2).contiguous().reshape(bsz, seqlen, dim) + return self.proj(y) + +class MLP(nn.Module): + def __init__(self, dim: int, mlp_mult: int): + super().__init__() + hidden = mlp_mult * dim + self.fc = CastedLinear(dim, hidden, bias=False) + self.proj = CastedLinear(hidden, dim, bias=False) + self.proj._zero_init = True + + def forward(self, x: Tensor) -> Tensor: + x = F.leaky_relu(self.fc(x), negative_slope=0.5) + return self.proj(x.square()) + +class MoEMLP(nn.Module): + """Mixture of 2 Experts MLP with top-1 routing. + Same total params as dense MLP but with specialization.""" + def __init__(self, dim: int, mlp_mult: int, num_experts: int = 2): + super().__init__() + self.num_experts = num_experts + hidden = mlp_mult * dim # each expert is full-size for maximum capacity + self.router = nn.Linear(dim, num_experts, bias=False) + self.experts_fc = nn.ModuleList([CastedLinear(dim, hidden, bias=False) for _ in range(num_experts)]) + self.experts_proj = nn.ModuleList([CastedLinear(hidden, dim, bias=False) for _ in range(num_experts)]) + for p in self.experts_proj: + p._zero_init = True + self._aux_loss = torch.tensor(0.0) + + def forward(self, x: Tensor) -> Tensor: + bsz, seq, dim = x.shape + x_flat = x.reshape(-1, dim) # [N, dim] + logits = self.router(x_flat) # [N, num_experts] + probs = F.softmax(logits, dim=-1) + top_idx = probs.argmax(dim=-1) # [N] + top_weight = probs.gather(1, top_idx.unsqueeze(1)).squeeze(1) # [N] + + f_i = torch.zeros(self.num_experts, device=x.device) + for e in range(self.num_experts): + f_i[e] = (top_idx == e).float().mean() + p_i = probs.mean(dim=0) + self._aux_loss = self.num_experts * (f_i * p_i).sum() + + out = torch.zeros_like(x_flat) + for e in range(self.num_experts): + mask = (top_idx == e).unsqueeze(1) # [N, 1] + h = F.leaky_relu(self.experts_fc[e](x_flat), negative_slope=0.5).square() + expert_out = self.experts_proj[e](h) + out = out + mask.float() * expert_out * top_weight.unsqueeze(1) + return out.reshape(bsz, seq, dim) + +class Block(nn.Module): + def __init__( + self, dim: int, num_heads: int, num_kv_heads: int, mlp_mult: int, + rope_base: float, qk_gain_init: float, layer_idx: int = 0, + ln_scale: bool = True, gate_window: int = 12, + ): + super().__init__() + self.attn_norm = RMSNorm() + self.mlp_norm = RMSNorm() + self.attn = CausalSelfAttention(dim, num_heads, num_kv_heads, rope_base, qk_gain_init) + self.mlp = MLP(dim, mlp_mult) # replaced by MoEMLP externally when needed + self.attn_scale = nn.Parameter(torch.ones(dim, dtype=torch.float32)) + self.mlp_scale = nn.Parameter(torch.ones(dim, dtype=torch.float32)) + self.resid_mix = nn.Parameter(torch.stack((torch.ones(dim), torch.zeros(dim))).float()) + self.ln_scale_factor = 1.0 / math.sqrt(layer_idx + 1) if ln_scale else 1.0 + self.parallel = False + self.smear_w = nn.Parameter(torch.zeros(gate_window, dtype=torch.float32)) + self.gate_window = gate_window + + def forward(self, x: Tensor, x0: Tensor) -> Tensor: + mix = self.resid_mix.to(dtype=x.dtype) + x_in = mix[0][None, None, :] * x + mix[1][None, None, :] * x0 + g_smear = torch.sigmoid(self.smear_w.to(dtype=x_in.dtype)) + prev = F.pad(x_in[:, :-1, :self.gate_window], (0, 0, 1, 0)) + x_in = torch.cat([x_in[..., :self.gate_window] + g_smear * (prev - x_in[..., :self.gate_window]), + x_in[..., self.gate_window:]], dim=-1) + attn_out = self.attn(self.attn_norm(x_in) * self.ln_scale_factor) + if self.parallel: + mlp_out = self.mlp(self.mlp_norm(x_in) * self.ln_scale_factor) + x_out = x_in + self.attn_scale.to(dtype=x_in.dtype)[None, None, :] * attn_out \ + + self.mlp_scale.to(dtype=x_in.dtype)[None, None, :] * mlp_out + else: + x_out = x_in + self.attn_scale.to(dtype=x_in.dtype)[None, None, :] * attn_out + x_out = x_out + self.mlp_scale.to(dtype=x_out.dtype)[None, None, :] * self.mlp(self.mlp_norm(x_out) * self.ln_scale_factor) + return x_out + +class GPT(nn.Module): + def __init__( + self, vocab_size: int, num_layers: int, model_dim: int, + num_heads: int, num_kv_heads: int, mlp_mult: int, + tie_embeddings: bool, tied_embed_init_std: float, + logit_softcap: float, rope_base: float, qk_gain_init: float, + num_loops: int = 2, loop_start: int = 3, loop_end: int = 5, + parallel_start_layer: int = 7, xsa_last_n: int = 11, + bigram_buckets: int = 2048, bigram_dim: int = 128, gate_window: int = 12, + **kwargs, + ): + super().__init__() + if logit_softcap <= 0.0: + raise ValueError(f"logit_softcap must be positive, got {logit_softcap}") + self._moe_aux_weight = kwargs.get('moe_aux_weight', 0.01) + self.tie_embeddings = tie_embeddings + self.tied_embed_init_std = tied_embed_init_std + self.logit_softcap = logit_softcap + self.tok_emb = nn.Embedding(vocab_size, model_dim) + + self.bigram_buckets = bigram_buckets + if bigram_buckets > 0: + self.bigram_emb = nn.Embedding(bigram_buckets, bigram_dim) + self.bigram_proj = CastedLinear(bigram_dim, model_dim, bias=False) + self.bigram_proj._zero_init = True + else: + self.bigram_emb = None + self.bigram_proj = None + + self.blocks = nn.ModuleList([ + Block(model_dim, num_heads, num_kv_heads, mlp_mult, rope_base, + qk_gain_init, layer_idx=i, ln_scale=True, gate_window=gate_window) + for i in range(num_layers) + ]) + + for i in range(parallel_start_layer, num_layers): + self.blocks[i].parallel = True + + self._moe_start = kwargs.get('moe_start', 5) + self._moe_end = kwargs.get('moe_end', 7) + self._moe_num_experts = kwargs.get('moe_num_experts', 2) + self._moe_mlp_mult = mlp_mult + self._moe_model_dim = model_dim + self._moe_blocks: list[int] = [] + self._moe_active = False + + self.looping_active = False + self.num_loops = num_loops + if num_loops > 0: + loop_seg = list(range(loop_start, loop_end + 1)) + all_indices = list(range(loop_start)) + for _ in range(num_loops + 1): + all_indices.extend(loop_seg) + all_indices.extend(range(loop_end + 1, num_layers)) + num_enc = len(all_indices) // 2 + self.encoder_indices = all_indices[:num_enc] + self.decoder_indices = all_indices[num_enc:] + else: + num_enc = num_layers // 2 + self.encoder_indices = list(range(num_enc)) + self.decoder_indices = list(range(num_enc, num_layers)) + + self.num_encoder_layers = num_layers // 2 + self.num_decoder_layers = num_layers - self.num_encoder_layers + + self.num_skip_weights = min(len(self.encoder_indices), len(self.decoder_indices)) + self.skip_weights = nn.Parameter(torch.ones(self.num_skip_weights, model_dim, dtype=torch.float32)) + self.skip_gates = nn.Parameter(torch.zeros(self.num_skip_weights, model_dim, dtype=torch.float32)) + + self.final_norm = RMSNorm() + self.lm_head = None if tie_embeddings else CastedLinear(model_dim, vocab_size, bias=False) + if self.lm_head is not None: + self.lm_head._zero_init = True + self._init_weights() + + def _init_weights(self) -> None: + if self.tie_embeddings: + nn.init.normal_(self.tok_emb.weight, mean=0.0, std=self.tied_embed_init_std) + for name, module in self.named_modules(): + if isinstance(module, nn.Linear): + if getattr(module, "_zero_init", False): + nn.init.zeros_(module.weight) + elif module.weight.ndim == 2 and module.weight.shape[0] >= 64 and module.weight.shape[1] >= 64: + nn.init.orthogonal_(module.weight, gain=1.0) + + def forward(self, input_ids: Tensor, target_ids: Tensor, reduction: str = "mean") -> Tensor: + x = self.tok_emb(input_ids) + if self.bigram_emb is not None: + prev_ids = F.pad(input_ids[:, :-1], (1, 0), value=0) + bigram_hash = ((prev_ids * 31 + input_ids) % self.bigram_buckets).long() + x = x + self.bigram_proj(self.bigram_emb(bigram_hash)) + x = F.rms_norm(x, (x.size(-1),)) + x0 = x + skips: list[Tensor] = [] + + if self.looping_active: + enc_list = self.encoder_indices + dec_list = self.decoder_indices + else: + enc_list = list(range(self.num_encoder_layers)) + dec_list = list(range(self.num_encoder_layers, self.num_encoder_layers + self.num_decoder_layers)) + + # Encoder pass: store skip activations + for idx in range(len(enc_list)): + i = enc_list[idx] + x = self.blocks[i](x, x0) + skips.append(x) + + # Decoder pass: consume skips with sigmoid-gated connections + for skip_idx in range(len(dec_list)): + i = dec_list[skip_idx] + if skip_idx < self.num_skip_weights and len(skips) > 0: + scaled_skip = self.skip_weights[skip_idx].to(dtype=x.dtype)[None, None, :] * skips.pop() + g = torch.sigmoid(self.skip_gates[skip_idx].to(dtype=x.dtype))[None, None, :] + x = torch.lerp(scaled_skip, x, g) + x = self.blocks[i](x, x0) + + x = self.final_norm(x).reshape(-1, x.size(-1)) + targets = target_ids.reshape(-1) + if self.tie_embeddings: + logits_proj = F.linear(x, self.tok_emb.weight) + else: + if self.lm_head is None: + raise RuntimeError("lm_head is required when tie_embeddings=False") + logits_proj = self.lm_head(x) + logits = self.logit_softcap * torch.tanh(logits_proj / self.logit_softcap) + ce = F.cross_entropy(logits.float(), targets, reduction=reduction) + if reduction == "none": + return ce.view(input_ids.shape) + aux = sum(self.blocks[i].mlp._aux_loss for i in self._moe_blocks) if self._moe_blocks else 0.0 + return ce + self._moe_aux_weight * aux + +def main() -> None: + global zeropower_via_newtonschulz5 + + code = Path(__file__).read_text(encoding="utf-8") + args = Hyperparameters() + zeropower_via_newtonschulz5 = torch.compile(zeropower_via_newtonschulz5) + + distributed = "RANK" in os.environ and "WORLD_SIZE" in os.environ + rank = int(os.environ.get("RANK", "0")) + world_size = int(os.environ.get("WORLD_SIZE", "1")) + local_rank = int(os.environ.get("LOCAL_RANK", "0")) + if world_size <= 0: + raise ValueError(f"WORLD_SIZE must be positive, got {world_size}") + if 8 % world_size != 0: + log0(f"Warning: WORLD_SIZE={world_size} does not divide 8 evenly.") + + grad_accum_steps = int(os.environ.get("GRAD_ACCUM_STEPS", 8 // world_size)) + grad_scale = 1.0 / grad_accum_steps + if not torch.cuda.is_available(): + raise RuntimeError("CUDA is required") + device = torch.device("cuda", local_rank) + torch.cuda.set_device(device) + if distributed: + dist.init_process_group(backend="nccl", device_id=device) + dist.barrier() + master_process = rank == 0 + + torch.backends.cuda.matmul.allow_tf32 = True + torch.backends.cudnn.allow_tf32 = True + from torch.backends.cuda import enable_cudnn_sdp, enable_flash_sdp, enable_math_sdp, enable_mem_efficient_sdp + + enable_cudnn_sdp(False) + enable_flash_sdp(True) + enable_mem_efficient_sdp(False) + enable_math_sdp(False) + + logfile = None + if master_process: + os.makedirs("logs", exist_ok=True) + logfile = f"logs/{args.run_id}.txt" + print(logfile) + + def log0(msg: str, console: bool = True) -> None: + if not master_process: + return + if console: + print(msg) + if logfile is not None: + with open(logfile, "a", encoding="utf-8") as f: + print(msg, file=f) + + log0(code, console=False) + log0("=" * 100, console=False) + log0(f"Running Python {sys.version}", console=False) + log0(f"Running PyTorch {torch.__version__}", console=False) + log0( + subprocess.run(["nvidia-smi"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, check=False).stdout, + console=False, + ) + log0("=" * 100, console=False) + + random.seed(args.seed) + np.random.seed(args.seed) + torch.manual_seed(args.seed) + torch.cuda.manual_seed_all(args.seed) + + if not args.tokenizer_path.endswith(".model"): + raise ValueError(f"Script only setup for SentencePiece .model file: {args.tokenizer_path}") + sp = spm.SentencePieceProcessor(model_file=args.tokenizer_path) + if int(sp.vocab_size()) != args.vocab_size: + raise ValueError( + f"VOCAB_SIZE={args.vocab_size} does not match tokenizer vocab_size={int(sp.vocab_size())}" + ) + dataset_dir = Path(args.data_path).resolve() + actual_train_files = len(list(dataset_dir.glob("fineweb_train_*.bin"))) + val_tokens = load_validation_tokens(args.val_files, args.train_seq_len) + base_bytes_lut, has_leading_space_lut, is_boundary_token_lut = build_sentencepiece_luts( + sp, args.vocab_size, device + ) + log0(f"val_bpb:enabled tokenizer_kind=sentencepiece tokenizer_path={args.tokenizer_path}") + log0(f"train_loader:dataset:{dataset_dir.name} train_shards:{actual_train_files}") + log0(f"val_loader:shards pattern={args.val_files} tokens:{val_tokens.numel() - 1}") + + base_model = GPT( + vocab_size=args.vocab_size, + num_layers=args.num_layers, + model_dim=args.model_dim, + num_heads=args.num_heads, + num_kv_heads=args.num_kv_heads, + mlp_mult=args.mlp_mult, + tie_embeddings=args.tie_embeddings, + tied_embed_init_std=args.tied_embed_init_std, + logit_softcap=args.logit_softcap, + rope_base=args.rope_base, + qk_gain_init=args.qk_gain_init, + num_loops=args.num_loops, + loop_start=args.loop_start, + loop_end=args.loop_end, + parallel_start_layer=args.parallel_start_layer, + bigram_buckets=args.bigram_buckets, bigram_dim=args.bigram_dim, + gate_window=args.gate_window, + moe_start=args.moe_start, moe_end=args.moe_end, + moe_num_experts=args.moe_num_experts, moe_aux_weight=args.moe_aux_weight, + ).to(device).bfloat16() + for module in base_model.modules(): + if isinstance(module, CastedLinear): + module.float() + restore_low_dim_params_to_fp32(base_model) + compiled_model = torch.compile(base_model, dynamic=False, fullgraph=True) + model: nn.Module = DDP(compiled_model, device_ids=[local_rank], broadcast_buffers=False) if distributed else compiled_model + + block_named_params = list(base_model.blocks.named_parameters()) + matrix_params = [ + p + for name, p in block_named_params + if p.ndim == 2 and not any(pattern in name for pattern in CONTROL_TENSOR_NAME_PATTERNS) + ] + scalar_params = [ + p + for name, p in block_named_params + if p.ndim < 2 or any(pattern in name for pattern in CONTROL_TENSOR_NAME_PATTERNS) + ] + if base_model.skip_weights.numel() > 0: + scalar_params.append(base_model.skip_weights) + if base_model.skip_gates.numel() > 0: + scalar_params.append(base_model.skip_gates) + token_lr = args.tied_embed_lr if args.tie_embeddings else args.embed_lr + optimizer_tok = torch.optim.AdamW( + [{"params": [base_model.tok_emb.weight], "lr": token_lr, "base_lr": token_lr}], + betas=(args.beta1, args.beta2), + eps=args.adam_eps, + weight_decay=args.embed_wd, + fused=True, + ) + optimizer_muon = Muon( + matrix_params, + lr=args.matrix_lr, + momentum=args.muon_momentum, + backend_steps=args.muon_backend_steps, + weight_decay=args.muon_wd, + row_normalize=True, + ) + for group in optimizer_muon.param_groups: + group["base_lr"] = args.matrix_lr + optimizer_scalar = torch.optim.Adam( + [{"params": scalar_params, "lr": args.scalar_lr, "base_lr": args.scalar_lr}], + betas=(args.beta1, args.beta2), + eps=args.adam_eps, + fused=True, + ) + optimizers: list[torch.optim.Optimizer] = [optimizer_tok, optimizer_muon, optimizer_scalar] + if base_model.lm_head is not None: + optimizer_head = torch.optim.Adam( + [{"params": [base_model.lm_head.weight], "lr": args.head_lr, "base_lr": args.head_lr}], + betas=(args.beta1, args.beta2), + eps=args.adam_eps, + fused=True, + ) + optimizers.insert(1, optimizer_head) + + # Init EMA Model + ema_model = None + if master_process: + ema_model = GPT( + vocab_size=args.vocab_size, num_layers=args.num_layers, model_dim=args.model_dim, + num_heads=args.num_heads, num_kv_heads=args.num_kv_heads, mlp_mult=args.mlp_mult, + tie_embeddings=args.tie_embeddings, tied_embed_init_std=args.tied_embed_init_std, + logit_softcap=args.logit_softcap, rope_base=args.rope_base, + qk_gain_init=args.qk_gain_init, num_loops=args.num_loops, + loop_start=args.loop_start, loop_end=args.loop_end, + parallel_start_layer=args.parallel_start_layer, + bigram_buckets=args.bigram_buckets, bigram_dim=args.bigram_dim, + gate_window=args.gate_window, + moe_start=args.moe_start, moe_end=args.moe_end, + moe_num_experts=args.moe_num_experts, moe_aux_weight=args.moe_aux_weight, + ).to(device).bfloat16() + ema_model.load_state_dict(base_model.state_dict()) + for p in ema_model.parameters(): + p.requires_grad = False + + n_params = sum(p.numel() for p in base_model.parameters()) + log0(f"model_params:{n_params}") + log0(f"world_size:{world_size} grad_accum_steps:{grad_accum_steps}") + log0("sdp_backends:cudnn=False flash=True mem_efficient=False math=False") + log0(f"attention_mode:gqa num_heads:{args.num_heads} num_kv_heads:{args.num_kv_heads}") + log0( + f"tie_embeddings:{args.tie_embeddings} embed_lr:{token_lr} " + f"head_lr:{args.head_lr if base_model.lm_head is not None else 0.0} " + f"matrix_lr:{args.matrix_lr} scalar_lr:{args.scalar_lr}" + ) + log0( + f"train_batch_tokens:{args.train_batch_tokens} train_seq_len:{args.train_seq_len} " + f"iterations:{args.iterations} warmup_steps:{args.warmup_steps} " + f"max_wallclock_seconds:{args.max_wallclock_seconds:.3f}" + ) + log0(f"seed:{args.seed}") + + train_loader = DistributedTokenLoader(args.train_files, rank, world_size, device) + + def zero_grad_all() -> None: + for opt in optimizers: + opt.zero_grad(set_to_none=True) + + max_wallclock_ms = 1000.0 * args.max_wallclock_seconds if args.max_wallclock_seconds > 0 else None + + def lr_mul(step: int, elapsed_ms: float) -> float: + if args.warmdown_frac <= 0: + return 1.0 + if max_wallclock_ms is not None: + frac = elapsed_ms / max_wallclock_ms + else: + frac = step / max(args.iterations, 1) + if frac < (1.0 - args.warmdown_frac): + return 1.0 + # Linear decay from 1.0 to min_lr over the warmdown fraction + warmdown_progress = (frac - (1.0 - args.warmdown_frac)) / args.warmdown_frac + return max(1.0 - warmdown_progress, args.min_lr) + + if args.warmup_steps > 0: + initial_model_state = {name: tensor.detach().cpu().clone() for name, tensor in base_model.state_dict().items()} + initial_optimizer_states = [copy.deepcopy(opt.state_dict()) for opt in optimizers] + model.train() + for warmup_step in range(args.warmup_steps): + zero_grad_all() + for micro_step in range(grad_accum_steps): + if distributed: + model.require_backward_grad_sync = micro_step == grad_accum_steps - 1 + x, y = train_loader.next_batch(args.train_batch_tokens, args.train_seq_len, grad_accum_steps) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16, enabled=True): + warmup_loss = model(x, y) + (warmup_loss * grad_scale).backward() + for opt in optimizers: + opt.step() + zero_grad_all() + if args.warmup_steps <= 20 or (warmup_step + 1) % 10 == 0 or warmup_step + 1 == args.warmup_steps: + log0(f"warmup_step:{warmup_step + 1}/{args.warmup_steps}") + base_model.load_state_dict(initial_model_state, strict=True) + for opt, state in zip(optimizers, initial_optimizer_states, strict=True): + opt.load_state_dict(state) + zero_grad_all() + if distributed: + model.require_backward_grad_sync = True + train_loader = DistributedTokenLoader(args.train_files, rank, world_size, device) + + training_time_ms = 0.0 + stop_after_step: int | None = None + torch.cuda.synchronize() + t0 = time.perf_counter() + + step = 0 + while True: + last_step = step == args.iterations or (stop_after_step is not None and step >= stop_after_step) + + should_validate = last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0) + if should_validate: + torch.cuda.synchronize() + training_time_ms += 1000.0 * (time.perf_counter() - t0) + val_loss, val_bpb = eval_val( + args, + model, + rank, + world_size, + device, + grad_accum_steps, + val_tokens, + base_bytes_lut, + has_leading_space_lut, + is_boundary_token_lut, + is_final_eval=False, + ) + log0( + f"step:{step}/{args.iterations} val_loss:{val_loss:.4f} val_bpb:{val_bpb:.4f} " + f"train_time:{training_time_ms:.0f}ms step_avg:{training_time_ms / max(step, 1):.2f}ms" + ) + torch.cuda.synchronize() + t0 = time.perf_counter() + + if last_step: + if stop_after_step is not None and step < args.iterations: + log0( + f"stopping_early: wallclock_cap train_time:{training_time_ms:.0f}ms " + f"step:{step}/{args.iterations}" + ) + break + + elapsed_ms = training_time_ms + 1000.0 * (time.perf_counter() - t0) + scale = lr_mul(step, elapsed_ms) + if args.num_loops > 0 and not base_model.looping_active: + frac = elapsed_ms / max(max_wallclock_ms, 1e-9) if max_wallclock_ms else step / max(args.iterations, 1) + if frac >= args.enable_looping_at: + base_model.looping_active = True + log0(f"loop:activated frac:{frac:.3f} enc:{base_model.encoder_indices} dec:{base_model.decoder_indices}") + if not base_model._moe_active and base_model._moe_start < base_model._moe_end + 1: + frac_moe = elapsed_ms / max(max_wallclock_ms, 1e-9) if max_wallclock_ms else step / max(args.iterations, 1) + if frac_moe >= args.enable_moe_at: + for i in range(base_model._moe_start, min(base_model._moe_end + 1, args.num_layers)): + old_mlp = base_model.blocks[i].mlp + moe_mlp = MoEMLP(base_model._moe_model_dim, base_model._moe_mlp_mult, base_model._moe_num_experts).to(device).bfloat16() + for e in range(base_model._moe_num_experts): + moe_mlp.experts_fc[e].weight.data.copy_(old_mlp.fc.weight.data) + moe_mlp.experts_proj[e].weight.data.copy_(old_mlp.proj.weight.data) + nn.init.normal_(moe_mlp.router.weight, std=0.01) + for m in moe_mlp.modules(): + if isinstance(m, CastedLinear): + m.float() + base_model.blocks[i].mlp = moe_mlp + base_model._moe_blocks.append(i) + base_model._moe_active = True + new_matrix = [p for i in base_model._moe_blocks for p in base_model.blocks[i].mlp.parameters() if p.ndim == 2 and p.numel() > 256] + optimizer_muon.add_param_group({'params': new_matrix, 'lr': args.matrix_lr, 'base_lr': args.matrix_lr}) + new_scalar = [p for i in base_model._moe_blocks for p in base_model.blocks[i].mlp.parameters() if p.ndim < 2 or p.numel() <= 256] + if new_scalar: + optimizer_scalar.add_param_group({'params': new_scalar, 'lr': args.scalar_lr, 'base_lr': args.scalar_lr}) + log0(f"moe:upcycled frac:{frac_moe:.3f} layers:{base_model._moe_blocks} experts:{base_model._moe_num_experts}") + if ema_model is not None: + for i in base_model._moe_blocks: + ema_model.blocks[i].mlp = MoEMLP(base_model._moe_model_dim, base_model._moe_mlp_mult, base_model._moe_num_experts).to(device).bfloat16() + for m in ema_model.blocks[i].mlp.modules(): + if isinstance(m, CastedLinear): + m.float() + ema_model._moe_blocks = list(base_model._moe_blocks) + ema_model._moe_active = True + ema_model.load_state_dict(base_model.state_dict()) + for p in ema_model.parameters(): + p.requires_grad = False + compiled_model = torch.compile(base_model, dynamic=False, fullgraph=True) + model = DDP(compiled_model, device_ids=[local_rank], broadcast_buffers=False) if distributed else compiled_model + remaining_steps = (stop_after_step or args.iterations) - step + qat_active = scale < 0.15 and remaining_steps > 500 + for m in base_model.modules(): + if isinstance(m, CastedLinear): + m._qat_enabled = qat_active + zero_grad_all() + train_loss = torch.zeros((), device=device) + for micro_step in range(grad_accum_steps): + if distributed: + model.require_backward_grad_sync = micro_step == grad_accum_steps - 1 + x, y = train_loader.next_batch(args.train_batch_tokens, args.train_seq_len, grad_accum_steps) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16, enabled=True): + loss = model(x, y) + train_loss += loss.detach() + (loss * grad_scale).backward() + train_loss /= grad_accum_steps + + frac = min(step / args.muon_momentum_warmup_steps, 1.0) if args.muon_momentum_warmup_steps > 0 else 1.0 + muon_momentum = (1 - frac) * args.muon_momentum_warmup_start + frac * args.muon_momentum + for group in optimizer_muon.param_groups: + group["momentum"] = muon_momentum + + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["base_lr"] * scale + + if args.grad_clip_norm > 0: + torch.nn.utils.clip_grad_norm_(base_model.parameters(), args.grad_clip_norm) + for opt in optimizers: + opt.step() + zero_grad_all() + + if master_process and ema_model is not None: + with torch.no_grad(): + current_decay = min(args.ema_decay, (step + 1) / (step + 10)) + for ema_p, p in zip(ema_model.parameters(), base_model.parameters()): + ema_p.mul_(current_decay).add_(p.data, alpha=1 - current_decay) + + step += 1 + approx_training_time_ms = training_time_ms + 1000.0 * (time.perf_counter() - t0) + should_log_train = ( + args.train_log_every > 0 + and (step <= 10 or step % args.train_log_every == 0 or stop_after_step is not None) + ) + if should_log_train: + log0( + f"step:{step}/{args.iterations} train_loss:{train_loss.item():.4f} " + f"train_time:{approx_training_time_ms:.0f}ms step_avg:{approx_training_time_ms / step:.2f}ms" + ) + + reached_cap = max_wallclock_ms is not None and approx_training_time_ms >= max_wallclock_ms + if distributed and max_wallclock_ms is not None: + reached_cap_tensor = torch.tensor(int(reached_cap), device=device) + dist.all_reduce(reached_cap_tensor, op=dist.ReduceOp.MAX) + reached_cap = bool(reached_cap_tensor.item()) + if stop_after_step is None and reached_cap: + stop_after_step = step + + log0( + f"peak memory allocated: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB " + f"reserved: {torch.cuda.max_memory_reserved() // 1024 // 1024} MiB" + ) + + if master_process: + torch.save(base_model.state_dict(), "final_model.pt") + model_bytes = os.path.getsize("final_model.pt") + code_bytes = len(code.encode("utf-8")) + log0(f"Serialized model: {model_bytes} bytes") + log0(f"Code size: {code_bytes} bytes") + log0(f"Total submission size: {model_bytes + code_bytes} bytes") + + if master_process and ema_model is not None: + log0("Applying EMA weights.") + base_model.load_state_dict(ema_model.state_dict()) + base_model.looping_active = True # always use depth recurrence for eval/GPTQ + + hessians = None + if args.gptq_calib_batches > 0: + log0(f"gptq:collecting Hessians from {args.gptq_calib_batches} calibration batches...") + torch.cuda.synchronize() + t_gptq = time.perf_counter() + calib_loader = DistributedTokenLoader(args.train_files, rank, world_size, device) + hessians = collect_hessians( + base_model, calib_loader, args, device, grad_accum_steps, + n_batches=args.gptq_calib_batches, + ) + torch.cuda.synchronize() + log0(f"gptq:collected {len(hessians)} Hessians in {time.perf_counter() - t_gptq:.1f}s") + + quant_obj, quant_stats = quantize_state_dict_int8(base_model.state_dict(), hessians=hessians) + quant_buf = io.BytesIO() + torch.save(quant_obj, quant_buf) + quant_raw = quant_buf.getvalue() + quant_blob = lzma.compress(quant_raw, preset=9) + quant_raw_bytes = len(quant_raw) + if master_process: + with open("final_model.int6.ptz", "wb") as f: + f.write(quant_blob) + quant_file_bytes = os.path.getsize("final_model.int6.ptz") + code_bytes = len(code.encode("utf-8")) + ratio = quant_stats["baseline_tensor_bytes"] / max(quant_stats["int8_payload_bytes"], 1) + log0( + f"Serialized model int6+lzma: {quant_file_bytes} bytes " + f"(payload:{quant_stats['int8_payload_bytes']} raw_torch:{quant_raw_bytes} payload_ratio:{ratio:.2f}x)" + ) + log0(f"Total submission size int6+lzma: {quant_file_bytes + code_bytes} bytes") + + if distributed: + dist.barrier() + with open("final_model.int6.ptz", "rb") as f: + quant_blob_disk = f.read() + quant_state = torch.load(io.BytesIO(lzma.decompress(quant_blob_disk)), map_location="cpu") + base_model.load_state_dict(dequantize_state_dict_int8(quant_state), strict=True) + base_model.looping_active = True # ensure depth recurrence is on for eval + torch.cuda.synchronize() + t_qeval = time.perf_counter() + q_val_loss, q_val_bpb = eval_val( + args, + base_model, # use uncompiled model to avoid recompilation limit + rank, + world_size, + device, + grad_accum_steps, + val_tokens, + base_bytes_lut, + has_leading_space_lut, + is_boundary_token_lut, + is_final_eval=True, + ) + torch.cuda.synchronize() + log0( + f"final_int6_lzma_roundtrip val_loss:{q_val_loss:.4f} val_bpb:{q_val_bpb:.4f} " + f"eval_time:{1000.0 * (time.perf_counter() - t_qeval):.0f}ms" + ) + log0(f"final_int6_lzma_roundtrip_exact val_loss:{q_val_loss:.8f} val_bpb:{q_val_bpb:.8f}") + + if distributed: + dist.destroy_process_group() + +if __name__ == "__main__": + main() diff --git a/train_gpt_sota_exp.py b/train_gpt_sota_exp.py new file mode 100644 index 0000000000..6d2fb7f02c --- /dev/null +++ b/train_gpt_sota_exp.py @@ -0,0 +1,3852 @@ +import base64, collections, copy, fcntl, glob, io, lzma, math, os +from pathlib import Path +import random, re, subprocess, sys, time, uuid, numpy as np, sentencepiece as spm, torch, torch.distributed as dist, torch.nn.functional as F +from torch import Tensor, nn +from flash_attn_interface import ( + flash_attn_func as flash_attn_3_func, + flash_attn_varlen_func, +) +from concurrent.futures import ThreadPoolExecutor +import triton +import triton.language as tl +from triton.tools.tensor_descriptor import TensorDescriptor + + +# ===== Fused softcapped cross-entropy (Triton) — training-only path ===== +# Replaces the eager +# logits_softcap = softcap * tanh(logits / softcap) +# F.cross_entropy(logits_softcap.float(), targets, reduction="mean") +# sequence with a single fused kernel that reads logits_proj once, applies +# softcap in-register, and computes (LSE, loss) in one streaming pass. The +# backward kernel mirrors the forward so there's no stored softcapped logits. +# Numerically identical to the eager path up to fp32 accumulation differences. +_FUSED_CE_LIBRARY = "pgsubmission1draft7fusedce" +_FUSED_CE_BLOCK_SIZE = 1024 +_FUSED_CE_NUM_WARPS = 4 + + +@triton.jit +def _softcapped_ce_fwd_kernel( + logits_ptr, losses_ptr, lse_ptr, targets_ptr, + stride_logits_n, stride_logits_v, + n_rows, n_cols, softcap, + block_size: tl.constexpr, +): + row_idx = tl.program_id(0).to(tl.int64) + logits_row_ptr = logits_ptr + row_idx * stride_logits_n + max_val = -float("inf") + sum_exp = 0.0 + A = 2.0 * softcap + inv_C = 2.0 / softcap + for off in range(0, n_cols, block_size): + cols = off + tl.arange(0, block_size) + mask = cols < n_cols + val = tl.load( + logits_row_ptr + cols * stride_logits_v, + mask=mask, other=-float("inf"), + ).to(tl.float32) + z = A * tl.sigmoid(val * inv_C) + z = tl.where(mask, z, -float("inf")) + curr_max = tl.max(z, axis=0) + new_max = tl.maximum(max_val, curr_max) + sum_exp = sum_exp * tl.exp(max_val - new_max) + tl.sum(tl.exp(z - new_max), axis=0) + max_val = new_max + lse = max_val + tl.log(sum_exp) + tl.store(lse_ptr + row_idx, lse) + target = tl.load(targets_ptr + row_idx).to(tl.int32) + target_val = tl.load(logits_row_ptr + target * stride_logits_v).to(tl.float32) + target_z = A * tl.sigmoid(target_val * inv_C) + tl.store(losses_ptr + row_idx, lse - target_z) + + +@triton.jit +def _softcapped_ce_bwd_kernel( + grad_logits_ptr, grad_losses_ptr, lse_ptr, logits_ptr, targets_ptr, + stride_logits_n, stride_logits_v, + stride_grad_n, stride_grad_v, + n_rows, n_cols, softcap, + block_size: tl.constexpr, +): + row_idx = tl.program_id(0).to(tl.int64) + logits_row_ptr = logits_ptr + row_idx * stride_logits_n + grad_row_ptr = grad_logits_ptr + row_idx * stride_grad_n + lse = tl.load(lse_ptr + row_idx) + grad_loss = tl.load(grad_losses_ptr + row_idx).to(tl.float32) + target = tl.load(targets_ptr + row_idx).to(tl.int32) + A = 2.0 * softcap + inv_C = 2.0 / softcap + dz_dx_scale = A * inv_C + for off in range(0, n_cols, block_size): + cols = off + tl.arange(0, block_size) + mask = cols < n_cols + val = tl.load( + logits_row_ptr + cols * stride_logits_v, + mask=mask, other=0.0, + ).to(tl.float32) + sigmoid_u = tl.sigmoid(val * inv_C) + z = A * sigmoid_u + probs = tl.exp(z - lse) + grad_z = grad_loss * (probs - tl.where(cols == target, 1.0, 0.0)) + grad_x = grad_z * (dz_dx_scale * sigmoid_u * (1.0 - sigmoid_u)) + tl.store(grad_row_ptr + cols * stride_grad_v, grad_x, mask=mask) + + +def _validate_softcapped_ce_inputs( + logits: Tensor, targets: Tensor, softcap: float, +) -> tuple[Tensor, Tensor]: + if logits.ndim != 2: + raise ValueError(f"Expected logits.ndim=2, got {logits.ndim}") + if targets.ndim != 1: + raise ValueError(f"Expected targets.ndim=1, got {targets.ndim}") + if logits.shape[0] != targets.shape[0]: + raise ValueError( + f"Expected matching rows, got logits={tuple(logits.shape)} targets={tuple(targets.shape)}" + ) + if not logits.is_cuda or not targets.is_cuda: + raise ValueError("softcapped_cross_entropy requires CUDA tensors") + if softcap <= 0.0: + raise ValueError(f"softcap must be positive, got {softcap}") + if logits.dtype not in (torch.float16, torch.bfloat16, torch.float32): + raise ValueError(f"Unsupported logits dtype: {logits.dtype}") + logits = logits.contiguous() + targets = targets.contiguous() + if targets.dtype != torch.int64: + targets = targets.to(dtype=torch.int64) + return logits, targets + + +@torch.library.custom_op(f"{_FUSED_CE_LIBRARY}::softcapped_ce", mutates_args=()) +def softcapped_ce_op(logits: Tensor, targets: Tensor, softcap: float) -> tuple[Tensor, Tensor]: + logits, targets = _validate_softcapped_ce_inputs(logits, targets, float(softcap)) + n_rows, n_cols = logits.shape + losses = torch.empty((n_rows,), device=logits.device, dtype=torch.float32) + lse = torch.empty((n_rows,), device=logits.device, dtype=torch.float32) + _softcapped_ce_fwd_kernel[(n_rows,)]( + logits, losses, lse, targets, + logits.stride(0), logits.stride(1), + n_rows, n_cols, float(softcap), + block_size=_FUSED_CE_BLOCK_SIZE, num_warps=_FUSED_CE_NUM_WARPS, + ) + return losses, lse + + +@softcapped_ce_op.register_fake +def _(logits: Tensor, targets: Tensor, softcap: float): + if logits.ndim != 2 or targets.ndim != 1: + raise ValueError("softcapped_ce fake impl expects 2D logits and 1D targets") + if logits.shape[0] != targets.shape[0]: + raise ValueError( + f"Expected matching rows, got logits={tuple(logits.shape)} targets={tuple(targets.shape)}" + ) + n_rows = logits.shape[0] + return ( + logits.new_empty((n_rows,), dtype=torch.float32), + logits.new_empty((n_rows,), dtype=torch.float32), + ) + + +@torch.library.custom_op(f"{_FUSED_CE_LIBRARY}::softcapped_ce_backward", mutates_args=()) +def softcapped_ce_backward_op( + logits: Tensor, targets: Tensor, lse: Tensor, grad_losses: Tensor, softcap: float, +) -> Tensor: + logits, targets = _validate_softcapped_ce_inputs(logits, targets, float(softcap)) + lse = lse.contiguous() + grad_losses = grad_losses.contiguous().to(dtype=torch.float32) + if lse.ndim != 1 or grad_losses.ndim != 1: + raise ValueError("Expected 1D lse and grad_losses") + if lse.shape[0] != logits.shape[0] or grad_losses.shape[0] != logits.shape[0]: + raise ValueError( + f"Expected row-aligned lse/grad_losses, got logits={tuple(logits.shape)} " + f"lse={tuple(lse.shape)} grad_losses={tuple(grad_losses.shape)}" + ) + grad_logits = torch.empty_like(logits) + n_rows, n_cols = logits.shape + _softcapped_ce_bwd_kernel[(n_rows,)]( + grad_logits, grad_losses, lse, logits, targets, + logits.stride(0), logits.stride(1), + grad_logits.stride(0), grad_logits.stride(1), + n_rows, n_cols, float(softcap), + block_size=_FUSED_CE_BLOCK_SIZE, num_warps=_FUSED_CE_NUM_WARPS, + ) + return grad_logits + + +@softcapped_ce_backward_op.register_fake +def _(logits: Tensor, targets: Tensor, lse: Tensor, grad_losses: Tensor, softcap: float): + if logits.ndim != 2 or targets.ndim != 1 or lse.ndim != 1 or grad_losses.ndim != 1: + raise ValueError("softcapped_ce_backward fake impl expects 2D logits and 1D row tensors") + if ( + logits.shape[0] != targets.shape[0] + or logits.shape[0] != lse.shape[0] + or logits.shape[0] != grad_losses.shape[0] + ): + raise ValueError("softcapped_ce_backward fake impl expects row-aligned tensors") + return logits.new_empty(logits.shape) + + +def _softcapped_ce_setup_context( + ctx: torch.autograd.function.FunctionCtx, inputs, output, +) -> None: + logits, targets, softcap = inputs + _losses, lse = output + ctx.save_for_backward(logits, targets, lse) + ctx.softcap = float(softcap) + + +def _softcapped_ce_backward( + ctx: torch.autograd.function.FunctionCtx, grad_losses: Tensor, grad_lse: "Tensor | None", +): + del grad_lse + logits, targets, lse = ctx.saved_tensors + grad_logits = torch.ops.pgsubmission1draft7fusedce.softcapped_ce_backward( + logits, targets, lse, grad_losses, ctx.softcap + ) + return grad_logits, None, None + + +softcapped_ce_op.register_autograd( + _softcapped_ce_backward, setup_context=_softcapped_ce_setup_context, +) + + +def softcapped_cross_entropy( + logits: Tensor, targets: Tensor, softcap: float, reduction: str = "mean", +) -> Tensor: + losses, _lse = torch.ops.pgsubmission1draft7fusedce.softcapped_ce( + logits, targets, float(softcap) + ) + if reduction == "none": + return losses + if reduction == "sum": + return losses.sum() + if reduction == "mean": + return losses.mean() + raise ValueError(f"Unsupported reduction={reduction!r}") + + +class Hyperparameters: + data_dir = os.environ.get("DATA_DIR", "./data/") + seed = int(os.environ.get("SEED", 1337)) + run_id = os.environ.get("RUN_ID", str(uuid.uuid4())) + iterations = int(os.environ.get("ITERATIONS", 20000)) + warmdown_frac = float(os.environ.get("WARMDOWN_FRAC", 0.75)) + warmup_steps = int(os.environ.get("WARMUP_STEPS", 20)) + train_batch_tokens = int(os.environ.get("TRAIN_BATCH_TOKENS", 786432)) + # Fused softcapped CE (Triton). Training-only — forward_logits eval path still uses + # eager softcap+F.cross_entropy. Default ON since validated as at-worst neutral. + fused_ce_enabled = bool(int(os.environ.get("FUSED_CE_ENABLED", "1"))) + train_seq_len = int(os.environ.get("TRAIN_SEQ_LEN", 2048)) + train_log_every = int(os.environ.get("TRAIN_LOG_EVERY", 500)) + max_wallclock_seconds = float(os.environ.get("MAX_WALLCLOCK_SECONDS", 6e2)) + val_batch_tokens = int(os.environ.get("VAL_BATCH_TOKENS", 524288)) + eval_seq_len = int(os.environ.get("EVAL_SEQ_LEN", 2048)) + val_loss_every = int(os.environ.get("VAL_LOSS_EVERY", 4000)) + vocab_size = int(os.environ.get("VOCAB_SIZE", 8192)) + num_layers = int(os.environ.get("NUM_LAYERS", 11)) + xsa_last_n = int(os.environ.get("XSA_LAST_N", 11)) + model_dim = int(os.environ.get("MODEL_DIM", 512)) + num_kv_heads = int(os.environ.get("NUM_KV_HEADS", 4)) + num_heads = int(os.environ.get("NUM_HEADS", 8)) + mlp_mult = float(os.environ.get("MLP_MULT", 4.0)) + leaky_relu_slope = float(os.environ.get("LEAKY_RELU_SLOPE", 0.3)) + skip_gates_enabled = bool(int(os.environ.get("SKIP_GATES_ENABLED", "1"))) + tie_embeddings = bool(int(os.environ.get("TIE_EMBEDDINGS", "1"))) + logit_softcap = float(os.environ.get("LOGIT_SOFTCAP", 3e1)) + rope_base = float(os.environ.get("ROPE_BASE", 1e4)) + rope_dims = int(os.environ.get("ROPE_DIMS", 16)) + rope_train_seq_len = int(os.environ.get("ROPE_TRAIN_SEQ_LEN", 2048)) + rope_yarn = bool(int(os.environ.get("ROPE_YARN", "0"))) + ln_scale = bool(int(os.environ.get("LN_SCALE", "1"))) + qk_gain_init = float(os.environ.get("QK_GAIN_INIT", 5.25)) + num_loops = int(os.environ.get("NUM_LOOPS", 2)) + loop_start = int(os.environ.get("LOOP_START", 3)) + loop_end = int(os.environ.get("LOOP_END", 5)) + enable_looping_at = float(os.environ.get("ENABLE_LOOPING_AT", 0.35)) + parallel_start_layer = int(os.environ.get("PARALLEL_START_LAYER", 8)) + parallel_final_lane = os.environ.get("PARALLEL_FINAL_LANE", "mean") + min_lr = float(os.environ.get("MIN_LR", 0.1)) + embed_lr = float(os.environ.get("EMBED_LR", 0.6)) + tied_embed_lr = float(os.environ.get("TIED_EMBED_LR", 0.03)) + tied_embed_init_std = float(os.environ.get("TIED_EMBED_INIT_STD", 0.005)) + matrix_lr = float(os.environ.get("MATRIX_LR", 0.026)) + scalar_lr = float(os.environ.get("SCALAR_LR", 0.02)) + muon_momentum = float(os.environ.get("MUON_MOMENTUM", 0.97)) + muon_backend_steps = int(os.environ.get("MUON_BACKEND_STEPS", 5)) + muon_momentum_warmup_start = float( + os.environ.get("MUON_MOMENTUM_WARMUP_START", 0.92) + ) + muon_momentum_warmup_steps = int(os.environ.get("MUON_MOMENTUM_WARMUP_STEPS", 1500)) + muon_row_normalize = bool(int(os.environ.get("MUON_ROW_NORMALIZE", "1"))) + beta1 = float(os.environ.get("BETA1", 0.9)) + beta2 = float(os.environ.get("BETA2", 0.95)) + adam_eps = float(os.environ.get("ADAM_EPS", 1e-08)) + grad_clip_norm = float(os.environ.get("GRAD_CLIP_NORM", 0.3)) + eval_stride = int(os.environ.get("EVAL_STRIDE", 64)) + adam_wd = float(os.environ.get("ADAM_WD", 0.02)) + muon_wd = float(os.environ.get("MUON_WD", 0.095)) + embed_wd = float(os.environ.get("EMBED_WD", 0.085)) + ema_decay = float(os.environ.get("EMA_DECAY", 0.9965)) + ttt_enabled = bool(int(os.environ.get("TTT_ENABLED", "1"))) + ttt_lora_rank = int(os.environ.get("TTT_LORA_RANK", 96)) + ttt_lora_lr = float(os.environ.get("TTT_LORA_LR", 0.0001)) + ttt_chunk_size = int(os.environ.get("TTT_CHUNK_SIZE", 48)) + ttt_eval_seq_len = int(os.environ.get("TTT_EVAL_SEQ_LEN", 2048)) + ttt_batch_size = int(os.environ.get("TTT_BATCH_SIZE", 64)) + ttt_grad_steps = int(os.environ.get("TTT_GRAD_STEPS", 1)) + ttt_weight_decay = float(os.environ.get("TTT_WEIGHT_DECAY", 1.0)) + ttt_beta1 = float(os.environ.get("TTT_BETA1", 0)) + ttt_beta2 = float(os.environ.get("TTT_BETA2", 0.999)) + ttt_k_lora = bool(int(os.environ.get("TTT_K_LORA", "1"))) + ttt_mlp_lora = bool(int(os.environ.get("TTT_MLP_LORA", "1"))) + ttt_o_lora = bool(int(os.environ.get("TTT_O_LORA", "1"))) + ttt_optimizer = os.environ.get("TTT_OPTIMIZER", "adam") + ttt_eval_batches = os.environ.get("TTT_EVAL_BATCHES", "") + val_doc_fraction = float(os.environ.get("VAL_DOC_FRACTION", 1.0)) + compressor = os.environ.get("COMPRESSOR", "brotli") + gptq_calibration_batches = int(os.environ.get("GPTQ_CALIBRATION_BATCHES", 16)) + gptq_reserve_seconds = float(os.environ.get("GPTQ_RESERVE_SECONDS", 4.0)) + phased_ttt_prefix_docs = int(os.environ.get("PHASED_TTT_PREFIX_DOCS", 2000)) + phased_ttt_num_phases = int(os.environ.get("PHASED_TTT_NUM_PHASES", 1)) + global_ttt_lr = float(os.environ.get("GLOBAL_TTT_LR", 0.001)) + global_ttt_momentum = float(os.environ.get("GLOBAL_TTT_MOMENTUM", 0.9)) + global_ttt_epochs = int(os.environ.get("GLOBAL_TTT_EPOCHS", 1)) + global_ttt_chunk_tokens = int(os.environ.get("GLOBAL_TTT_CHUNK_TOKENS", 32768)) + global_ttt_batch_seqs = int(os.environ.get("GLOBAL_TTT_BATCH_SEQS", 32)) + global_ttt_warmup_start_lr = float(os.environ.get("GLOBAL_TTT_WARMUP_START_LR", 0.0)) + global_ttt_warmup_chunks = int(os.environ.get("GLOBAL_TTT_WARMUP_CHUNKS", 0)) + global_ttt_grad_clip = float(os.environ.get("GLOBAL_TTT_GRAD_CLIP", 1.0)) + global_ttt_respect_doc_boundaries = bool(int(os.environ.get("GLOBAL_TTT_RESPECT_DOC_BOUNDARIES", "1"))) + matrix_bits = int(os.environ.get("MATRIX_BITS", 6)) + embed_bits = int(os.environ.get("EMBED_BITS", 7)) + matrix_clip_sigmas = float(os.environ.get("MATRIX_CLIP_SIGMAS", 12.85)) + embed_clip_sigmas = float(os.environ.get("EMBED_CLIP_SIGMAS", 14.0)) + mlp_clip_sigmas = float(os.environ.get("MLP_CLIP_SIGMAS", 11.5)) + attn_clip_sigmas = float(os.environ.get("ATTN_CLIP_SIGMAS", 13.0)) + # AttnOutGate (per-head multiplicative output gate, PR #1667 MarioPaerle). + # Zero-init weight: 2*sigmoid(0)=1 -> transparent at start. Source defaults to + # block input x ('proj'); 'q' uses raw Q projection output. + attn_out_gate_enabled = bool(int(os.environ.get("ATTN_OUT_GATE_ENABLED", "0"))) + attn_out_gate_src = os.environ.get("ATTN_OUT_GATE_SRC", "proj") + # SmearGate (input-dependent forward-1 token smear, modded-nanogpt @classiclarryd + # via PR #1667). x_t <- x_t + lam * sigmoid(W*x_t[:gate_window]) * x_{t-1}. + # lam=0 + W=0 -> transparent at init. + smear_gate_enabled = bool(int(os.environ.get("SMEAR_GATE_ENABLED", "1"))) + # Window: first GATE_WINDOW dims of the source feed the gate projection. + gate_window = int(os.environ.get("GATE_WINDOW", 12)) + # Gated Attention (Qwen, NeurIPS 2025 Best Paper, arXiv:2505.06708; + # qiuzh20/gated_attention). Per-head sigmoid gate on SDPA output, BEFORE + # out_proj. Gate input = full block input x (paper's headwise G1 variant + # driven from hidden_states). W_g shape (num_heads, dim), plain sigmoid. + # Near-zero init gives g~0.5 at step 0 (half attention output); per-block + # attn_scale (init 1.0) compensates during training. Name contains + # "attn_gate" so CONTROL_TENSOR_NAME_PATTERNS routes it to scalar AdamW. + gated_attn_enabled = bool(int(os.environ.get("GATED_ATTN_ENABLED", "0"))) + gated_attn_init_std = float(os.environ.get("GATED_ATTN_INIT_STD", 0.01)) + # Dedicated int8-per-row quantization for `attn_gate_w` tensors. These are + # small ((num_heads, dim) = (8, 512) = 4096 params) and bypass GPTQ via the + # numel<=65536 passthrough branch -> stored as fp16 (8 KB/layer, ~65 KB total + # compressed). int8-per-row cuts the raw tensor in half with negligible BPB + # impact: scales per head (8 values), symmetric quant over [-127, 127]. + # No Hessian needed (gate weights not in collect_hessians()). + gated_attn_quant_gate = bool(int(os.environ.get("GATED_ATTN_QUANT_GATE", "0"))) + # Sparse Attention Gate (modded-nanogpt-style). Keeps dense SDPA and only + # swaps the output-gate input to the first GATE_WINDOW residual dims. + # W_g: (num_heads, gate_window) = (8, 12) = 96 params/layer (~44K total), + # vs dense GatedAttn's (8, 512) = 4K/layer (~44K diff). Name "attn_gate_w" + # is shared so quant routing and int8 gate passthrough Just Work. Gate + # passthrough int8 still applies via GATED_ATTN_QUANT_GATE=1. + # Mutually exclusive with ATTN_OUT_GATE_ENABLED and GATED_ATTN_ENABLED. + sparse_attn_gate_enabled = bool(int(os.environ.get("SPARSE_ATTN_GATE_ENABLED", "1"))) + sparse_attn_gate_init_std = float(os.environ.get("SPARSE_ATTN_GATE_INIT_STD", 0.0)) + sparse_attn_gate_scale = float(os.environ.get("SPARSE_ATTN_GATE_SCALE", 1.0)) + # Entropy-weighted loss: upweight hard tokens during training. + # w_i = clamp(loss_i / running_mean_loss, min_w, max_w). Default OFF. + entropy_weighted_loss = bool(int(os.environ.get("ENTROPY_WEIGHTED_LOSS", "0"))) + ewl_min_weight = float(os.environ.get("EWL_MIN_WEIGHT", 0.3)) + ewl_max_weight = float(os.environ.get("EWL_MAX_WEIGHT", 3.0)) + # Causal bigram blending: at eval, blend model log-probs with online + # bigram statistics. λ = BIGRAM_BLEND_LAMBDA * (1 - model_confidence). + bigram_blend_enabled = bool(int(os.environ.get("BIGRAM_BLEND_ENABLED", "0"))) + bigram_blend_lambda = float(os.environ.get("BIGRAM_BLEND_LAMBDA", 0.03)) + # Entropy-adaptive temperature scaling at eval time. + # Sharpens confident predictions (low entropy), softens uncertain ones. + temp_scale_enabled = bool(int(os.environ.get("TEMP_SCALE_ENABLED", "0"))) + temp_scale_low = float(os.environ.get("TEMP_SCALE_LOW", 0.85)) # T for low entropy + temp_scale_high = float(os.environ.get("TEMP_SCALE_HIGH", 1.15)) # T for high entropy + temp_scale_ent_low = float(os.environ.get("TEMP_SCALE_ENT_LOW", 2.0)) # entropy threshold for sharpening + temp_scale_ent_high = float(os.environ.get("TEMP_SCALE_ENT_HIGH", 6.0)) # entropy threshold for softening + # LQER asymmetric rank-k correction on top-K quant-error tensors (PR #1530 v2 port). + # Computes SVD of E = W_fp - W_quant, packs top-r A,B as INT2/INT4 (asym) or INTk (sym). + lqer_enabled = bool(int(os.environ.get("LQER_ENABLED", "1"))) + lqer_rank = int(os.environ.get("LQER_RANK", 4)) + lqer_top_k = int(os.environ.get("LQER_TOP_K", 3)) + lqer_factor_bits = int(os.environ.get("LQER_FACTOR_BITS", 4)) + lqer_asym_enabled = bool(int(os.environ.get("LQER_ASYM_ENABLED", "1"))) + lqer_asym_group = int(os.environ.get("LQER_ASYM_GROUP", "64")) + distributed = "RANK" in os.environ and "WORLD_SIZE" in os.environ + rank = int(os.environ.get("RANK", "0")) + world_size = int(os.environ.get("WORLD_SIZE", "1")) + local_rank = int(os.environ.get("LOCAL_RANK", "0")) + is_main_process = rank == 0 + grad_accum_steps = 8 // world_size + # CaseOps integration: optional override of dataset root + tokenizer path. + # When CASEOPS_ENABLED=1, the wrapper loads a per-token byte sidecar + # (fineweb_val_bytes_*.bin, identical shard layout to val_*.bin) and uses + # it as the canonical raw-byte budget for BPB accounting. The sidecar + # REPLACES the build_sentencepiece_luts byte-counting path entirely. + caseops_enabled = bool(int(os.environ.get("CASEOPS_ENABLED", "0"))) + _default_caseops_data = os.path.join( + data_dir, + "datasets", + "fineweb10B_sp8192_caseops", + "datasets", + "datasets", + "fineweb10B_sp8192_lossless_caps_caseops_v1_reserved", + ) + _default_caseops_tok = os.path.join( + data_dir, + "datasets", + "fineweb10B_sp8192_caseops", + "datasets", + "tokenizers", + "fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model", + ) + if caseops_enabled: + datasets_dir = os.environ.get("DATA_PATH", _default_caseops_data) + tokenizer_path = os.environ.get("TOKENIZER_PATH", _default_caseops_tok) + else: + datasets_dir = os.environ.get( + "DATA_PATH", + os.path.join(data_dir, "datasets", f"fineweb10B_sp{vocab_size}"), + ) + tokenizer_path = os.environ.get( + "TOKENIZER_PATH", + os.path.join(data_dir, "tokenizers", f"fineweb_{vocab_size}_bpe.model"), + ) + train_files = os.path.join(datasets_dir, "fineweb_train_*.bin") + val_files = os.path.join(datasets_dir, "fineweb_val_*.bin") + val_bytes_files = os.path.join(datasets_dir, "fineweb_val_bytes_*.bin") + artifact_dir = os.environ.get("ARTIFACT_DIR", "") + logfile = ( + os.path.join(artifact_dir, f"{run_id}.txt") + if artifact_dir + else f"logs/{run_id}.txt" + ) + model_path = ( + os.path.join(artifact_dir, "final_model.pt") + if artifact_dir + else "final_model.pt" + ) + quantized_model_path = ( + os.path.join(artifact_dir, "final_model.int6.ptz") + if artifact_dir + else "final_model.int6.ptz" + ) + + +_logger_hparams = None + + +def set_logging_hparams(h): + global _logger_hparams + _logger_hparams = h + + +def log(msg, console=True): + if _logger_hparams is None: + print(msg) + return + if _logger_hparams.is_main_process: + if console: + print(msg) + if _logger_hparams.logfile is not None: + with open(_logger_hparams.logfile, "a", encoding="utf-8") as f: + print(msg, file=f) + + +class ValidationData: + def __init__(self, h, device): + self.sp = spm.SentencePieceProcessor(model_file=h.tokenizer_path) + if int(self.sp.vocab_size()) != h.vocab_size: + raise ValueError( + f"VOCAB_SIZE={h.vocab_size} does not match tokenizer vocab_size={int(self.sp.vocab_size())}" + ) + self.val_tokens = load_validation_tokens(h.val_files, h.eval_seq_len) + self.caseops_enabled = bool(getattr(h, "caseops_enabled", False)) + if self.caseops_enabled: + self.base_bytes_lut = None + self.has_leading_space_lut = None + self.is_boundary_token_lut = None + else: + ( + self.base_bytes_lut, + self.has_leading_space_lut, + self.is_boundary_token_lut, + ) = build_sentencepiece_luts(self.sp, h.vocab_size, device) + self.val_bytes = None + if self.caseops_enabled: + self.val_bytes = load_validation_byte_sidecar( + h.val_bytes_files, h.eval_seq_len, self.val_tokens.numel() + ) + + +def build_sentencepiece_luts(sp, vocab_size, device): + sp_vocab_size = int(sp.vocab_size()) + assert ( + sp.piece_to_id("▁") != sp.unk_id() + ), "Tokenizer must have '▁' (space) as its own token for correct BPB byte counting" + table_size = max(sp_vocab_size, vocab_size) + base_bytes_np = np.zeros((table_size,), dtype=np.int16) + has_leading_space_np = np.zeros((table_size,), dtype=np.bool_) + is_boundary_token_np = np.ones((table_size,), dtype=np.bool_) + for token_id in range(sp_vocab_size): + if sp.is_control(token_id) or sp.is_unknown(token_id) or sp.is_unused(token_id): + continue + is_boundary_token_np[token_id] = False + if sp.is_byte(token_id): + base_bytes_np[token_id] = 1 + continue + piece = sp.id_to_piece(token_id) + if piece.startswith("▁"): + has_leading_space_np[token_id] = True + piece = piece[1:] + base_bytes_np[token_id] = len(piece.encode("utf-8")) + return ( + torch.tensor(base_bytes_np, dtype=torch.int16, device=device), + torch.tensor(has_leading_space_np, dtype=torch.bool, device=device), + torch.tensor(is_boundary_token_np, dtype=torch.bool, device=device), + ) + + +def load_validation_tokens(pattern, seq_len): + # Filter out CaseOps byte sidecar shards which share the val_*.bin glob. + files = [ + Path(p) + for p in sorted(glob.glob(pattern)) + if "_bytes_" not in Path(p).name + ] + if not files: + raise FileNotFoundError(f"No files found for pattern: {pattern}") + tokens = torch.cat([load_data_shard(file) for file in files]).contiguous() + usable = (tokens.numel() - 1) // seq_len * seq_len + if usable <= 0: + raise ValueError(f"Validation split is too short for TRAIN_SEQ_LEN={seq_len}") + return tokens[: usable + 1] + + +def load_validation_byte_sidecar(pattern, seq_len, expected_len): + """Load CaseOps per-token byte sidecar(s). Same shard layout as token shards + (256 int32 header + uint16 array). Each entry = canonical raw-text byte + budget for that token in the corresponding val shard. Returns a CPU + int16 tensor sliced to match expected_len (i.e. val_tokens length).""" + files = [Path(p) for p in sorted(glob.glob(pattern))] + if not files: + raise FileNotFoundError(f"No byte sidecar files for pattern: {pattern}") + shards = [load_data_shard(file) for file in files] + # load_data_shard returns uint16 — that's exactly what the sidecar stores. + bytes_full = torch.cat(shards).contiguous() + if bytes_full.numel() < expected_len: + raise ValueError( + f"Byte sidecar too short: {bytes_full.numel()} < val_tokens {expected_len}" + ) + return bytes_full[:expected_len].to(torch.int32) + + +def load_data_shard(file): + header_bytes = 256 * np.dtype(" 0: + pos = start + while pos < end: + seg_starts.append(pos) + pos += max_doc_len + else: + seg_starts.append(start) + boundaries = seg_starts + [total_len] + padded_len = get_next_multiple_of_n(len(boundaries), bucket_size) + cu = torch.full((padded_len,), total_len, dtype=torch.int32, device=device) + cu[: len(boundaries)] = torch.tensor(boundaries, dtype=torch.int32, device=device) + seg_ends = seg_starts[1:] + [total_len] + max_seqlen = max(end - start for start, end in zip(seg_starts, seg_ends)) + return cu, max_seqlen + +class DocumentPackingLoader: + _shard_pool = ThreadPoolExecutor(1) + + def __init__(self, h, device, cu_bucket_size=64): + self.rank = h.rank + self.world_size = h.world_size + self.device = device + self.cu_bucket_size = cu_bucket_size + self.max_seq_len = h.train_seq_len + all_files = [Path(p) for p in sorted(glob.glob(h.train_files))] + if not all_files: + raise FileNotFoundError(f"No files found for pattern: {h.train_files}") + self.files = all_files + self.file_iter = iter(self.files) + self._init_shard(load_data_shard(next(self.file_iter))) + self._next_shard = self._submit_next_shard() + self._batch_pool = ThreadPoolExecutor(1) + self._prefetch_queue = [] + + def _init_shard(self, tokens): + global BOS_ID + self.tokens = tokens + self.shard_size = tokens.numel() + if BOS_ID is None: + BOS_ID = 1 + self.bos_idx = ( + (tokens == BOS_ID).nonzero(as_tuple=True)[0].to(torch.int64).cpu().numpy() + ) + self.cursor = int(self.bos_idx[0]) + + def _submit_next_shard(self): + try: + path = next(self.file_iter) + return self._shard_pool.submit(load_data_shard, path) + except StopIteration: + return None + + def _advance_shard(self): + if self._next_shard is None: + self.file_iter = iter(self.files) + self._next_shard = self._shard_pool.submit( + load_data_shard, next(self.file_iter) + ) + self._init_shard(self._next_shard.result()) + self._next_shard = self._submit_next_shard() + + def _local_doc_starts(self, local_start, total_len): + lo = np.searchsorted(self.bos_idx, local_start, side="left") + hi = np.searchsorted(self.bos_idx, local_start + total_len, side="left") + return (self.bos_idx[lo:hi] - local_start).tolist() + + def _prepare_batch(self, num_tokens_local, max_seq_len): + per_rank_span = num_tokens_local + 1 + global_span = per_rank_span * self.world_size + while self.cursor + global_span > self.shard_size: + self._advance_shard() + local_start = self.cursor + self.rank * per_rank_span + buf = self.tokens[local_start : local_start + per_rank_span] + inputs = torch.empty(per_rank_span - 1, dtype=torch.int64, pin_memory=True) + targets = torch.empty(per_rank_span - 1, dtype=torch.int64, pin_memory=True) + inputs.copy_(buf[:-1]) + targets.copy_(buf[1:]) + starts = self._local_doc_starts(local_start, inputs.numel()) + cu_seqlens, max_seqlen = _build_cu_seqlens( + starts, inputs.numel(), inputs.device, max_seq_len, self.cu_bucket_size + ) + cu_seqlens = cu_seqlens.pin_memory() + self.cursor += global_span + return inputs, targets, cu_seqlens, max_seqlen + + def next_batch(self, global_tokens, grad_accum_steps): + num_tokens_local = global_tokens // (self.world_size * grad_accum_steps) + while len(self._prefetch_queue) < 2: + self._prefetch_queue.append( + self._batch_pool.submit(self._prepare_batch, num_tokens_local, self.max_seq_len)) + inputs, targets, cu_seqlens, max_seqlen = self._prefetch_queue.pop(0).result() + self._prefetch_queue.append( + self._batch_pool.submit(self._prepare_batch, num_tokens_local, self.max_seq_len)) + return ( + inputs[None].to(self.device, non_blocking=True), + targets[None].to(self.device, non_blocking=True), + cu_seqlens.to(self.device, non_blocking=True), + max_seqlen, + ) + + +class ShuffledSequenceLoader: + def __init__(self, h, device): + self.world_size = h.world_size + self.seq_len = h.train_seq_len + self.device = device + all_files = [Path(p) for p in sorted(glob.glob(h.train_files))] + if not all_files: + raise FileNotFoundError(f"No files found for pattern: {h.train_files}") + self.files = all_files[h.rank :: h.world_size] + self.rng = np.random.Generator(np.random.PCG64(h.rank)) + self.num_tokens = [_read_num_tokens(f) for f in self.files] + self.start_inds = [[] for _ in self.files] + for si in range(len(self.files)): + self._reset_shard(si) + + def _reset_shard(self, si): + max_phase = min( + self.seq_len - 1, max(0, self.num_tokens[si] - self.seq_len - 1) + ) + phase = int(self.rng.integers(max_phase + 1)) if max_phase > 0 else 0 + num_sequences = (self.num_tokens[si] - 1 - phase) // self.seq_len + sequence_order = self.rng.permutation(num_sequences) + self.start_inds[si] = (phase + sequence_order * self.seq_len).tolist() + + def next_batch(self, global_tokens, grad_accum_steps): + device_tokens = global_tokens // (self.world_size * grad_accum_steps) + device_batch_size = device_tokens // self.seq_len + remaining = np.array([len(s) for s in self.start_inds], dtype=np.float64) + x = torch.empty((device_batch_size, self.seq_len), dtype=torch.int64) + y = torch.empty((device_batch_size, self.seq_len), dtype=torch.int64) + for bi in range(device_batch_size): + total = remaining.sum() + if total <= 0: + for si in range(len(self.files)): + self._reset_shard(si) + remaining = np.array( + [len(s) for s in self.start_inds], dtype=np.float64 + ) + total = remaining.sum() + probs = remaining / total + si = int(self.rng.choice(len(self.files), p=probs)) + start_ind = self.start_inds[si].pop() + remaining[si] -= 1 + mm = _get_shard_memmap(self.files[si]) + window = torch.as_tensor( + np.array(mm[start_ind : start_ind + self.seq_len + 1], dtype=np.int64) + ) + x[bi] = window[:-1] + y[bi] = window[1:] + return x.to(self.device, non_blocking=True), y.to( + self.device, non_blocking=True + ) + + +class RMSNorm(nn.Module): + def __init__(self, eps=None): + super().__init__() + self.eps = eps + + def forward(self, x): + return F.rms_norm(x, (x.size(-1),), eps=self.eps) + + +class CastedLinear(nn.Linear): + def forward(self, x): + w = self.weight.to(x.dtype) + bias = self.bias.to(x.dtype) if self.bias is not None else None + return F.linear(x, w, bias) + + +_LEAKY_RELU_SLOPE = 0.3 # Set from Hyperparameters at startup + +@triton.jit +def linear_leaky_relu_square_kernel( + a_desc, + b_desc, + c_desc, + aux_desc, + M, + N, + K, + SLOPE, + BLOCK_SIZE_M: tl.constexpr, + BLOCK_SIZE_N: tl.constexpr, + BLOCK_SIZE_K: tl.constexpr, + NUM_SMS: tl.constexpr, + FORWARD: tl.constexpr, +): + dtype = tl.bfloat16 + start_pid = tl.program_id(axis=0) + num_pid_m = tl.cdiv(M, BLOCK_SIZE_M) + num_pid_n = tl.cdiv(N, BLOCK_SIZE_N) + k_tiles = tl.cdiv(K, BLOCK_SIZE_K) + num_tiles = num_pid_m * num_pid_n + tile_id_c = start_pid - NUM_SMS + for tile_id in tl.range(start_pid, num_tiles, NUM_SMS, flatten=True): + pid_m = tile_id // num_pid_n + pid_n = tile_id % num_pid_n + offs_am = pid_m * BLOCK_SIZE_M + offs_bn = pid_n * BLOCK_SIZE_N + accumulator = tl.zeros((BLOCK_SIZE_M, BLOCK_SIZE_N), dtype=tl.float32) + for ki in range(k_tiles): + offs_k = ki * BLOCK_SIZE_K + a = a_desc.load([offs_am, offs_k]) + b = b_desc.load([offs_bn, offs_k]) + accumulator = tl.dot(a, b.T, accumulator) + tile_id_c += NUM_SMS + offs_am_c = offs_am + offs_bn_c = offs_bn + acc = tl.reshape(accumulator, (BLOCK_SIZE_M, 2, BLOCK_SIZE_N // 2)) + acc = tl.permute(acc, (0, 2, 1)) + acc0, acc1 = tl.split(acc) + c0 = acc0.to(dtype) + c1 = acc1.to(dtype) + if not FORWARD: + pre0 = aux_desc.load([offs_am_c, offs_bn_c]) + pre1 = aux_desc.load([offs_am_c, offs_bn_c + BLOCK_SIZE_N // 2]) + c0 = c0 * tl.where(pre0 > 0, 2.0 * pre0, 2.0 * SLOPE * SLOPE * pre0) + c1 = c1 * tl.where(pre1 > 0, 2.0 * pre1, 2.0 * SLOPE * SLOPE * pre1) + c_desc.store([offs_am_c, offs_bn_c], c0) + c_desc.store([offs_am_c, offs_bn_c + BLOCK_SIZE_N // 2], c1) + if FORWARD: + aux0 = tl.where(c0 > 0, c0, SLOPE * c0) + aux1 = tl.where(c1 > 0, c1, SLOPE * c1) + aux_desc.store([offs_am_c, offs_bn_c], aux0 * aux0) + aux_desc.store([offs_am_c, offs_bn_c + BLOCK_SIZE_N // 2], aux1 * aux1) + + +def linear_leaky_relu_square(a, b, aux=None): + M, K = a.shape + N, K2 = b.shape + assert K == K2 + c = torch.empty((M, N), device=a.device, dtype=a.dtype) + forward = aux is None + if aux is None: + aux = torch.empty((M, N), device=a.device, dtype=a.dtype) + num_sms = torch.cuda.get_device_properties(a.device).multi_processor_count + BLOCK_SIZE_M, BLOCK_SIZE_N, BLOCK_SIZE_K = 256, 128, 64 + num_stages = 3 if forward else 2 # H100: 232KB shared mem (H200: 256KB) + a_desc = TensorDescriptor.from_tensor(a, [BLOCK_SIZE_M, BLOCK_SIZE_K]) + b_desc = TensorDescriptor.from_tensor(b, [BLOCK_SIZE_N, BLOCK_SIZE_K]) + c_desc = TensorDescriptor.from_tensor(c, [BLOCK_SIZE_M, BLOCK_SIZE_N // 2]) + aux_desc = TensorDescriptor.from_tensor(aux, [BLOCK_SIZE_M, BLOCK_SIZE_N // 2]) + grid = lambda _meta: ( + min(num_sms, triton.cdiv(M, BLOCK_SIZE_M) * triton.cdiv(N, BLOCK_SIZE_N)), + ) + linear_leaky_relu_square_kernel[grid]( + a_desc, + b_desc, + c_desc, + aux_desc, + M, + N, + K, + _LEAKY_RELU_SLOPE, + BLOCK_SIZE_M=BLOCK_SIZE_M, + BLOCK_SIZE_N=BLOCK_SIZE_N, + BLOCK_SIZE_K=BLOCK_SIZE_K, + NUM_SMS=num_sms, + FORWARD=forward, + num_stages=num_stages, + num_warps=8, + ) + if forward: + return c, aux + return c + + +class FusedLinearLeakyReLUSquareFunction(torch.autograd.Function): + @staticmethod + def forward(ctx, x, w1, w2): + x_flat = x.reshape(-1, x.shape[-1]) + pre, post = linear_leaky_relu_square(x_flat, w1) + out = F.linear(post, w2) + ctx.save_for_backward(x, w1, w2, pre, post) + return out.view(*x.shape[:-1], out.shape[-1]) + + @staticmethod + def backward(ctx, grad_output): + x, w1, w2, pre, post = ctx.saved_tensors + x_flat = x.reshape(-1, x.shape[-1]) + grad_output_flat = grad_output.reshape(-1, grad_output.shape[-1]) + dw2 = grad_output_flat.T @ post + dpre = linear_leaky_relu_square(grad_output_flat, w2.T.contiguous(), aux=pre) + dw1 = dpre.T @ x_flat + dx = dpre @ w1 + return dx.view_as(x), dw1, dw2 + + +FusedLeakyReLUSquareMLP = FusedLinearLeakyReLUSquareFunction.apply + + +class Rotary(nn.Module): + def __init__(self, dim, base=1e4, train_seq_len=1024, rope_dims=0, yarn=True): + super().__init__() + self.dim = dim + self.base = base + self.train_seq_len = train_seq_len + self.yarn = yarn + self.rope_dims = rope_dims if rope_dims > 0 else dim + inv_freq = 1.0 / base ** ( + torch.arange(0, self.rope_dims, 2, dtype=torch.float32) / self.rope_dims + ) + self.register_buffer("inv_freq", inv_freq, persistent=False) + self._seq_len_cached = 0 + self._cos_cached = None + self._sin_cached = None + + def forward(self, seq_len, device, dtype): + if ( + self._cos_cached is None + or self._sin_cached is None + or self._seq_len_cached < seq_len + or self._cos_cached.device != device + ): + rd = self.rope_dims + if self.yarn and seq_len > self.train_seq_len: + scale = seq_len / self.train_seq_len + new_base = self.base * scale ** (rd / (rd - 2)) + inv_freq = 1.0 / new_base ** ( + torch.arange(0, rd, 2, dtype=torch.float32, device=device) / rd + ) + else: + inv_freq = self.inv_freq.float().to(device) + t = torch.arange(seq_len, device=device, dtype=torch.float32) + freqs = torch.outer(t, inv_freq) + self._cos_cached = freqs.cos()[None, :, None, :] + self._sin_cached = freqs.sin()[None, :, None, :] + self._seq_len_cached = seq_len + return self._cos_cached[:, :seq_len].to(dtype=dtype), self._sin_cached[:, :seq_len].to(dtype=dtype) + + +def apply_rotary_emb(x, cos, sin, rope_dims=0): + if rope_dims > 0 and rope_dims < x.size(-1): + x_rope, x_pass = x[..., :rope_dims], x[..., rope_dims:] + half = rope_dims // 2 + x1, x2 = x_rope[..., :half], x_rope[..., half:] + x_rope = torch.cat((x1 * cos + x2 * sin, x1 * -sin + x2 * cos), dim=-1) + return torch.cat((x_rope, x_pass), dim=-1) + half = x.size(-1) // 2 + x1, x2 = x[..., :half], x[..., half:] + return torch.cat((x1 * cos + x2 * sin, x1 * -sin + x2 * cos), dim=-1) + + +class CausalSelfAttention(nn.Module): + def __init__( + self, dim, num_heads, num_kv_heads, rope_base, qk_gain_init, train_seq_len, yarn=True, + attn_out_gate=False, attn_out_gate_src="proj", gate_window=12, + gated_attn=False, gated_attn_init_std=0.01, + sparse_attn_gate=False, sparse_attn_gate_init_std=0.0, sparse_attn_gate_scale=1.0, + ): + super().__init__() + if dim % num_heads != 0: + raise ValueError("model_dim must be divisible by num_heads") + if num_heads % num_kv_heads != 0: + raise ValueError("num_heads must be divisible by num_kv_heads") + if int(attn_out_gate) + int(gated_attn) + int(sparse_attn_gate) > 1: + raise ValueError( + "attn_out_gate, gated_attn, and sparse_attn_gate are mutually exclusive" + ) + self.num_heads = num_heads + self.num_kv_heads = num_kv_heads + self.head_dim = dim // num_heads + if self.head_dim % 2 != 0: + raise ValueError("head_dim must be even for RoPE") + self.q_gain = nn.Parameter( + torch.full((num_heads,), qk_gain_init, dtype=torch.float32) + ) + self.rope_dims = 0 + self.rotary = Rotary(self.head_dim, base=rope_base, train_seq_len=train_seq_len, yarn=yarn) + self.use_xsa = False + # AttnOutGate (PR #1667 MarioPaerle): per-head multiplicative gate on attention + # output. CastedLinear so restore_fp32_params casts back to fp32 for GPTQ. + # _zero_init -> 2*sigmoid(0)=1 -> transparent at init. + self.attn_out_gate = attn_out_gate + self.attn_out_gate_src = attn_out_gate_src + self.gate_window = gate_window + if attn_out_gate: + self.attn_gate_proj = CastedLinear(gate_window, num_heads, bias=False) + self.attn_gate_proj._zero_init = True + # Gated Attention (arXiv:2505.06708, Qwen, NeurIPS 2025). Per-head sigmoid + # gate on SDPA output, BEFORE out_proj. Gate projection W_g: (num_heads, dim). + # Name "attn_gate_w" contains "attn_gate" substring so it matches + # CONTROL_TENSOR_NAME_PATTERNS and routes to the scalar AdamW group. + # fp32 Parameter -> restore_fp32_params path covers it via the ndim<2 OR + # name-pattern check (name matches "attn_gate"). Cast to x.dtype on use. + self.gated_attn = gated_attn + if gated_attn: + W = torch.empty(num_heads, dim, dtype=torch.float32) + nn.init.normal_(W, mean=0.0, std=gated_attn_init_std) + self.attn_gate_w = nn.Parameter(W) + # Sparse attention head-output gate (modded-nanogpt style). Keeps dense SDPA + # and only narrows the gate input to the first gate_window residual dims. + # W_g: (num_heads, gate_window). y_{t,h} <- sigmoid(scale * W_g_h @ x_t[:gate_window]) * y_{t,h}. + # Shares attn_gate_w name with dense GatedAttn so the quant routing + # (CONTROL_TENSOR_NAME_PATTERNS / attn_gate_w int8 passthrough) is unchanged. + self.sparse_attn_gate = sparse_attn_gate + self.sparse_attn_gate_scale = sparse_attn_gate_scale + if sparse_attn_gate: + W = torch.empty(num_heads, gate_window, dtype=torch.float32) + if sparse_attn_gate_init_std > 0: + nn.init.normal_(W, mean=0.0, std=sparse_attn_gate_init_std) + else: + nn.init.zeros_(W) + self.attn_gate_w = nn.Parameter(W) + + def _xsa_efficient(self, y, v): + B, T, H, D = y.shape + Hkv = v.size(-2) + group = H // Hkv + y_g = y.reshape(B, T, Hkv, group, D) + vn = F.normalize(v, dim=-1).unsqueeze(-2) + proj = (y_g * vn).sum(dim=-1, keepdim=True) * vn + return (y_g - proj).reshape(B, T, H, D) + + def forward(self, x, q_w, k_w, v_w, out_w, cu_seqlens=None, max_seqlen=0): + bsz, seqlen, dim = x.shape + # q_raw kept around as a tap point for attn_out_gate_src='q' (post-projection, + # pre-reshape, pre-RoPE). + q_raw = F.linear(x, q_w.to(x.dtype)) + q = q_raw.reshape(bsz, seqlen, self.num_heads, self.head_dim) + k = F.linear(x, k_w.to(x.dtype)).reshape(bsz, seqlen, self.num_kv_heads, self.head_dim) + v = F.linear(x, v_w.to(x.dtype)).reshape(bsz, seqlen, self.num_kv_heads, self.head_dim) + q = F.rms_norm(q, (q.size(-1),)) + k = F.rms_norm(k, (k.size(-1),)) + cos, sin = self.rotary(seqlen, x.device, q.dtype) + q = apply_rotary_emb(q, cos, sin, self.rope_dims) + k = apply_rotary_emb(k, cos, sin, self.rope_dims) + q = q * self.q_gain.to(dtype=q.dtype)[None, None, :, None] + if cu_seqlens is not None: + y = flash_attn_varlen_func( + q[0], + k[0], + v[0], + cu_seqlens_q=cu_seqlens, + cu_seqlens_k=cu_seqlens, + max_seqlen_q=max_seqlen, + max_seqlen_k=max_seqlen, + causal=True, + window_size=(-1, -1), + )[None] + else: + y = flash_attn_3_func(q, k, v, causal=True) + if self.use_xsa: + y = self._xsa_efficient(y, v) + # AttnOutGate inlined (PR #1667). Inline + .contiguous() barrier so torch.compile + # fullgraph=True is happy (this avoids the @torch.compiler.disable trap that + # crashed gates v3). Per-head gate on (B,T,H,D) tensor: g shape [B,T,H], broadcast + # over D via [..., None]. zero-init weight -> 2*sigmoid(0)=1 -> transparent. + if self.attn_out_gate: + gate_src = q_raw if self.attn_out_gate_src == "q" else x + gate_in = gate_src[..., : self.gate_window].contiguous() + g = 2.0 * torch.sigmoid(self.attn_gate_proj(gate_in)) + y = y * g[..., None] + # Gated Attention (arXiv:2505.06708 G1). Inline + .contiguous() barrier so + # torch.compile fullgraph=True is happy. Per-head gate on (B,T,H,D): g shape + # [B,T,H], broadcast over D via [..., None]. Paper: g = sigmoid(x @ W_g.T) + # where W_g: (H, dim). .to(x.dtype) on fp32 param before broadcast with bf16. + if self.gated_attn: + x_c = x.contiguous() + g = torch.sigmoid(F.linear(x_c, self.attn_gate_w.to(x.dtype))) + y = y * g[..., None] + # Sparse head-output gate: narrower (gate_window) input, same shape g as GatedAttn. + if self.sparse_attn_gate: + gate_in = x[..., : self.gate_window].contiguous() + g = torch.sigmoid( + self.sparse_attn_gate_scale + * F.linear(gate_in, self.attn_gate_w.to(x.dtype)) + ) + y = y * g[..., None] + y = y.reshape(bsz, seqlen, dim) + self._last_proj_input = y.detach() if getattr(self, "_calib", False) else None + return F.linear(y, out_w.to(x.dtype)) + + +class MLP(nn.Module): + def __init__(self, dim, mlp_mult): + super().__init__() + self.use_fused = True + + def forward(self, x, up_w, down_w): + if self.training and self.use_fused: + return FusedLeakyReLUSquareMLP(x, up_w.to(x.dtype), down_w.to(x.dtype)) + hidden = F.leaky_relu(F.linear(x, up_w.to(x.dtype)), negative_slope=_LEAKY_RELU_SLOPE).square() + self._last_down_input = hidden.detach() if getattr(self, "_calib", False) else None + return F.linear(hidden, down_w.to(x.dtype)) + + +class Block(nn.Module): + def __init__( + self, + dim, + num_heads, + num_kv_heads, + mlp_mult, + rope_base, + qk_gain_init, + train_seq_len, + layer_idx=0, + ln_scale=False, + yarn=True, + attn_out_gate=False, + attn_out_gate_src="proj", + gate_window=12, + gated_attn=False, + gated_attn_init_std=0.01, + sparse_attn_gate=False, + sparse_attn_gate_init_std=0.0, + sparse_attn_gate_scale=1.0, + ): + super().__init__() + self.attn_norm = RMSNorm() + self.mlp_norm = RMSNorm() + self.attn = CausalSelfAttention( + dim, num_heads, num_kv_heads, rope_base, qk_gain_init, train_seq_len, yarn=yarn, + attn_out_gate=attn_out_gate, attn_out_gate_src=attn_out_gate_src, gate_window=gate_window, + gated_attn=gated_attn, gated_attn_init_std=gated_attn_init_std, + sparse_attn_gate=sparse_attn_gate, + sparse_attn_gate_init_std=sparse_attn_gate_init_std, + sparse_attn_gate_scale=sparse_attn_gate_scale, + ) + self.mlp = MLP(dim, mlp_mult) + self.attn_scale = nn.Parameter(torch.ones(dim, dtype=torch.float32)) + self.mlp_scale = nn.Parameter(torch.ones(dim, dtype=torch.float32)) + self.resid_mix = nn.Parameter( + torch.stack((torch.ones(dim), torch.zeros(dim))).float() + ) + self.ln_scale_factor = 1.0 / math.sqrt(layer_idx + 1) if ln_scale else 1.0 + + def forward(self, x, x0, q_w, k_w, v_w, out_w, up_w, down_w, cu_seqlens=None, max_seqlen=0): + mix = self.resid_mix.to(dtype=x.dtype) + x_in = mix[0][None, None, :] * x + mix[1][None, None, :] * x0 + attn_out = self.attn( + self.attn_norm(x_in) * self.ln_scale_factor, + q_w, k_w, v_w, out_w, + cu_seqlens=cu_seqlens, + max_seqlen=max_seqlen, + ) + x_out = x_in + self.attn_scale.to(dtype=x_in.dtype)[None, None, :] * attn_out + x_out = x_out + self.mlp_scale.to(dtype=x_out.dtype)[ + None, None, : + ] * self.mlp(self.mlp_norm(x_out) * self.ln_scale_factor, up_w, down_w) + return x_out + +class GPT(nn.Module): + def __init__(self, h): + super().__init__() + if h.logit_softcap <= 0.0: + raise ValueError(f"logit_softcap must be positive, got {h.logit_softcap}") + self.tie_embeddings = h.tie_embeddings + self.tied_embed_init_std = h.tied_embed_init_std + self.logit_softcap = h.logit_softcap + self.fused_ce_enabled = bool(h.fused_ce_enabled) + # Entropy-weighted loss config + self._ewl_enabled = bool(h.entropy_weighted_loss) + self._ewl_min_w = h.ewl_min_weight + self._ewl_max_w = h.ewl_max_weight + self.tok_emb = nn.Embedding(h.vocab_size, h.model_dim) + self.num_layers = h.num_layers + head_dim = h.model_dim // h.num_heads + kv_dim = h.num_kv_heads * head_dim + hidden_dim = int(h.mlp_mult * h.model_dim) + self.qo_bank = nn.Parameter(torch.empty(2 * h.num_layers, h.model_dim, h.model_dim)) + self.kv_bank = nn.Parameter(torch.empty(2 * h.num_layers, kv_dim, h.model_dim)) + self.mlp_up_bank = nn.Parameter(torch.empty(h.num_layers, hidden_dim, h.model_dim)) + self.mlp_down_bank = nn.Parameter(torch.empty(h.num_layers, h.model_dim, hidden_dim)) + self.num_encoder_layers = h.num_layers // 2 + self.num_decoder_layers = h.num_layers - self.num_encoder_layers + self.blocks = nn.ModuleList( + [ + Block( + h.model_dim, + h.num_heads, + h.num_kv_heads, + h.mlp_mult, + h.rope_base, + h.qk_gain_init, + h.train_seq_len, + layer_idx=i, + ln_scale=h.ln_scale, + yarn=h.rope_yarn, + attn_out_gate=h.attn_out_gate_enabled, + attn_out_gate_src=h.attn_out_gate_src, + gate_window=h.gate_window, + gated_attn=h.gated_attn_enabled, + gated_attn_init_std=h.gated_attn_init_std, + sparse_attn_gate=h.sparse_attn_gate_enabled, + sparse_attn_gate_init_std=h.sparse_attn_gate_init_std, + sparse_attn_gate_scale=h.sparse_attn_gate_scale, + ) + for i in range(h.num_layers) + ] + ) + if h.rope_dims > 0: + head_dim = h.model_dim // h.num_heads + for block in self.blocks: + block.attn.rope_dims = h.rope_dims + block.attn.rotary = Rotary( + head_dim, + base=h.rope_base, + train_seq_len=h.train_seq_len, + rope_dims=h.rope_dims, + yarn=h.rope_yarn, + ) + self.final_norm = RMSNorm() + self.lm_head = ( + None + if h.tie_embeddings + else CastedLinear(h.model_dim, h.vocab_size, bias=False) + ) + if self.lm_head is not None: + self.lm_head._zero_init = True + if h.xsa_last_n > 0: + for i in range(max(0, h.num_layers - h.xsa_last_n), h.num_layers): + self.blocks[i].attn.use_xsa = True + self.looping_active = False + if h.num_loops > 0: + loop_seg = list(range(h.loop_start, h.loop_end + 1)) + all_indices = list(range(h.loop_start)) + for _ in range(h.num_loops + 1): + all_indices.extend(loop_seg) + all_indices.extend(range(h.loop_end + 1, h.num_layers)) + num_enc = len(all_indices) // 2 + self.encoder_indices = all_indices[:num_enc] + self.decoder_indices = all_indices[num_enc:] + else: + self.encoder_indices = list(range(self.num_encoder_layers)) + self.decoder_indices = list(range(self.num_encoder_layers, h.num_layers)) + self.num_skip_weights = min( + len(self.encoder_indices), len(self.decoder_indices) + ) + self.skip_weights = nn.Parameter( + torch.ones(self.num_skip_weights, h.model_dim, dtype=torch.float32) + ) + self.skip_gates = ( + nn.Parameter( + torch.zeros(self.num_skip_weights, h.model_dim, dtype=torch.float32) + ) + if h.skip_gates_enabled + else None + ) + self.parallel_start_layer = h.parallel_start_layer + self.parallel_final_lane = h.parallel_final_lane.lower() + self.parallel_post_lambdas = nn.Parameter( + torch.ones(h.num_layers, 2, 2, dtype=torch.float32) + ) + self.parallel_resid_lambdas = nn.Parameter( + torch.full((h.num_layers, 2), 1.1, dtype=torch.float32) + ) + # SmearGate (PR #1667 / modded-nanogpt @classiclarryd): + # x_t <- x_t + lam * sigmoid(W * x_t[:gate_window]) * x_{t-1}. + # Per-token forward-1 smear of the embedding lane. W zero-init + lam=0 -> + # transparent at init. Uses CastedLinear so restore_fp32_params handles dtype. + self.smear_gate_enabled = h.smear_gate_enabled + if self.smear_gate_enabled: + self.smear_window = h.gate_window + self.smear_gate = CastedLinear(self.smear_window, 1, bias=False) + self.smear_gate._zero_init = True + self.smear_lambda = nn.Parameter(torch.zeros(1, dtype=torch.float32)) + self._init_weights() + + def _init_weights(self): + if self.tie_embeddings: + nn.init.normal_(self.tok_emb.weight, mean=0.0, std=self.tied_embed_init_std) + n = self.num_layers + proj_scale = 1.0 / math.sqrt(2 * n) + for i in range(n): + nn.init.orthogonal_(self.qo_bank.data[i], gain=1.0) + nn.init.zeros_(self.qo_bank.data[n + i]) + self.qo_bank.data[n + i].mul_(proj_scale) + nn.init.orthogonal_(self.kv_bank.data[i], gain=1.0) + nn.init.orthogonal_(self.kv_bank.data[n + i], gain=1.0) + for i in range(n): + nn.init.orthogonal_(self.mlp_up_bank.data[i], gain=1.0) + nn.init.zeros_(self.mlp_down_bank.data[i]) + self.mlp_down_bank.data[i].mul_(proj_scale) + for name, module in self.named_modules(): + if isinstance(module, nn.Linear): + if getattr(module, "_zero_init", False): + nn.init.zeros_(module.weight) + elif ( + module.weight.ndim == 2 + and module.weight.shape[0] >= 64 + and module.weight.shape[1] >= 64 + ): + nn.init.orthogonal_(module.weight, gain=1.0) + + def _bank_weights(self, i): + n = self.num_layers + return ( + self.qo_bank[i], + self.kv_bank[i], + self.kv_bank[n + i], + self.qo_bank[n + i], + self.mlp_up_bank[i], + self.mlp_down_bank[i], + ) + + def _parallel_block( + self, block_idx, lane0, lane1, x0, + q_w, k_w, v_w, out_w, up_w, down_w, + cu_seqlens=None, max_seqlen=0, + ): + block = self.blocks[block_idx] + mix = block.resid_mix.to(dtype=lane0.dtype) + attn_read = mix[0][None, None, :] * lane0 + mix[1][None, None, :] * x0 + attn_out = block.attn( + block.attn_norm(attn_read) * block.ln_scale_factor, + q_w, k_w, v_w, out_w, + cu_seqlens=cu_seqlens, max_seqlen=max_seqlen, + ) + attn_out = block.attn_scale.to(dtype=attn_out.dtype)[None, None, :] * attn_out + mlp_read = lane1 + mlp_out = block.mlp_scale.to(dtype=lane1.dtype)[None, None, :] * block.mlp( + block.mlp_norm(mlp_read) * block.ln_scale_factor, up_w, down_w + ) + attn_resid = self.parallel_resid_lambdas[block_idx, 0].to(dtype=lane0.dtype) + attn_post = self.parallel_post_lambdas[block_idx, 0].to(dtype=lane0.dtype) + mlp_resid = self.parallel_resid_lambdas[block_idx, 1].to(dtype=lane0.dtype) + mlp_post = self.parallel_post_lambdas[block_idx, 1].to(dtype=lane0.dtype) + lane0 = attn_resid * lane0 + attn_post[0] * attn_out + mlp_post[0] * mlp_out + lane1 = mlp_resid * lane1 + attn_post[1] * attn_out + mlp_post[1] * mlp_out + return lane0, lane1 + + def _final_parallel_hidden(self, lane0, lane1): + if self.parallel_final_lane == "mlp": + return lane1 + if self.parallel_final_lane == "attn": + return lane0 + return 0.5 * (lane0 + lane1) + + def _forward_hidden(self, input_ids, cu_seqlens=None, max_seqlen=0): + """Run the encoder/decoder stack to the final RMSNorm; returns pre-projection hidden. + Shared by eval (softcap+projection via forward_logits) and train (fused CE path).""" + x = self.tok_emb(input_ids) + # SmearGate (PR #1667). lam=0 + W=0 -> identity at init. + # Cross-doc leak fix: zero the prev-token smear at any position whose current token + # is BOS, so the BOS embedding starting doc N+1 in a packed stream is not + # contaminated by doc N's last token (audited issue on PR#1797 base). + if self.smear_gate_enabled: + sl = self.smear_lambda.to(dtype=x.dtype) + gate_in = x[:, 1:, : self.smear_window].contiguous() + g = sl * torch.sigmoid(self.smear_gate(gate_in)) + not_bos = (input_ids[:, 1:] != BOS_ID).to(x.dtype).unsqueeze(-1) + x = torch.cat([x[:, :1], x[:, 1:] + g * x[:, :-1] * not_bos], dim=1) + x = F.rms_norm(x, (x.size(-1),)) + x0 = x + skips = [] + enc_iter = ( + self.encoder_indices + if self.looping_active + else range(self.num_encoder_layers) + ) + dec_iter = ( + self.decoder_indices + if self.looping_active + else range( + self.num_encoder_layers, + self.num_encoder_layers + self.num_decoder_layers, + ) + ) + for i in enc_iter: + q_w, k_w, v_w, out_w, up_w, down_w = self._bank_weights(i) + x = self.blocks[i](x, x0, q_w, k_w, v_w, out_w, up_w, down_w, cu_seqlens=cu_seqlens, max_seqlen=max_seqlen) + skips.append(x) + psl = self.parallel_start_layer + lane0 = None + lane1 = None + for skip_idx, i in enumerate(dec_iter): + q_w, k_w, v_w, out_w, up_w, down_w = self._bank_weights(i) + if i >= psl and psl > 0: + if lane0 is None: + lane0 = x + lane1 = x + if skip_idx < self.num_skip_weights and skips: + skip = skips.pop() + w = self.skip_weights[skip_idx].to(dtype=lane0.dtype)[None, None, :] + if self.skip_gates is not None: + g = torch.sigmoid(self.skip_gates[skip_idx].to(dtype=lane0.dtype))[None, None, :] + lane0 = torch.lerp(w * skip, lane0, g) + else: + lane0 = lane0 + w * skip + lane0, lane1 = self._parallel_block( + i, lane0, lane1, x0, q_w, k_w, v_w, out_w, up_w, down_w, + cu_seqlens=cu_seqlens, max_seqlen=max_seqlen, + ) + else: + if skip_idx < self.num_skip_weights and skips: + scaled_skip = ( + self.skip_weights[skip_idx].to(dtype=x.dtype)[None, None, :] + * skips.pop() + ) + if self.skip_gates is not None: + g = torch.sigmoid(self.skip_gates[skip_idx].to(dtype=x.dtype))[None, None, :] + x = torch.lerp(scaled_skip, x, g) + else: + x = x + scaled_skip + x = self.blocks[i](x, x0, q_w, k_w, v_w, out_w, up_w, down_w, cu_seqlens=cu_seqlens, max_seqlen=max_seqlen) + if lane0 is not None: + x = self._final_parallel_hidden(lane0, lane1) + x = self.final_norm(x) + return x + + def _project_logits(self, hidden): + if self.tie_embeddings: + return F.linear(hidden, self.tok_emb.weight) + return self.lm_head(hidden) + + def forward_logits(self, input_ids, cu_seqlens=None, max_seqlen=0): + hidden = self._forward_hidden(input_ids, cu_seqlens=cu_seqlens, max_seqlen=max_seqlen) + logits_proj = self._project_logits(hidden) + return self.logit_softcap * torch.tanh(logits_proj / self.logit_softcap) + + def forward(self, input_ids, target_ids, cu_seqlens=None, max_seqlen=0): + hidden = self._forward_hidden(input_ids, cu_seqlens=cu_seqlens, max_seqlen=max_seqlen) + logits_proj = self._project_logits(hidden) + flat_targets = target_ids.reshape(-1) + # Fused softcapped-CE kernel (training path only). Applies softcap inside the + # Triton kernel; takes pre-softcap logits_proj. Non-fused path matches stock + # PR-1736 numerics exactly (softcap in fp32, then F.cross_entropy on fp32). + if self.fused_ce_enabled and not self._ewl_enabled: + return softcapped_cross_entropy( + logits_proj.reshape(-1, logits_proj.size(-1)), + flat_targets, + self.logit_softcap, + reduction="mean", + ) + logits = self.logit_softcap * torch.tanh(logits_proj / self.logit_softcap) + if self._ewl_enabled: + # Entropy-weighted loss: harder tokens get higher weight + per_tok = F.cross_entropy( + logits.reshape(-1, logits.size(-1)).float(), + flat_targets, + reduction="none", + ) + with torch.no_grad(): + weights = (per_tok / (per_tok.mean() + 1e-8)).clamp( + self._ewl_min_w, self._ewl_max_w + ) + return (per_tok * weights).mean() + return F.cross_entropy( + logits.reshape(-1, logits.size(-1)).float(), + flat_targets, + reduction="mean", + ) + + def forward_ttt(self, input_ids, target_ids, lora): + x = self.tok_emb(input_ids) + # SmearGate on the TTT path — same inline compute as forward_logits. + # Cross-doc leak fix: see _forward_hidden comment. + if self.smear_gate_enabled: + sl = self.smear_lambda.to(dtype=x.dtype) + gate_in = x[:, 1:, : self.smear_window].contiguous() + g = sl * torch.sigmoid(self.smear_gate(gate_in)) + not_bos = (input_ids[:, 1:] != BOS_ID).to(x.dtype).unsqueeze(-1) + x = torch.cat([x[:, :1], x[:, 1:] + g * x[:, :-1] * not_bos], dim=1) + x = F.rms_norm(x, (x.size(-1),)) + x0 = x + skips = [] + enc_iter = ( + self.encoder_indices + if self.looping_active + else list(range(self.num_encoder_layers)) + ) + dec_iter = ( + self.decoder_indices + if self.looping_active + else list( + range( + self.num_encoder_layers, + self.num_encoder_layers + self.num_decoder_layers, + ) + ) + ) + slot = 0 + for i in enc_iter: + q_w, k_w, v_w, out_w, up_w, down_w = self._bank_weights(i) + x = self._block_with_lora(self.blocks[i], x, x0, lora, slot, q_w, k_w, v_w, out_w, up_w, down_w) + slot += 1 + skips.append(x) + psl = self.parallel_start_layer + lane0 = None + lane1 = None + for skip_idx, i in enumerate(dec_iter): + q_w, k_w, v_w, out_w, up_w, down_w = self._bank_weights(i) + if i >= psl and psl > 0: + if lane0 is None: + lane0 = x + lane1 = x + if skip_idx < self.num_skip_weights and skips: + skip = skips.pop() + w = self.skip_weights[skip_idx].to(dtype=lane0.dtype)[None, None, :] + if self.skip_gates is not None: + g = torch.sigmoid(self.skip_gates[skip_idx].to(dtype=lane0.dtype))[None, None, :] + lane0 = torch.lerp(w * skip, lane0, g) + else: + lane0 = lane0 + w * skip + lane0, lane1 = self._parallel_block_with_lora( + i, lane0, lane1, x0, lora, slot, + q_w, k_w, v_w, out_w, up_w, down_w, + ) + else: + if skip_idx < self.num_skip_weights and skips: + scaled_skip = ( + self.skip_weights[skip_idx].to(dtype=x.dtype)[None, None, :] + * skips.pop() + ) + if self.skip_gates is not None: + g = torch.sigmoid(self.skip_gates[skip_idx].to(dtype=x.dtype))[None, None, :] + x = torch.lerp(scaled_skip, x, g) + else: + x = x + scaled_skip + x = self._block_with_lora(self.blocks[i], x, x0, lora, slot, q_w, k_w, v_w, out_w, up_w, down_w) + slot += 1 + if lane0 is not None: + x = self._final_parallel_hidden(lane0, lane1) + x = self.final_norm(x) + if self.tie_embeddings: + logits = F.linear(x, self.tok_emb.weight) + else: + logits = self.lm_head(x) + logits = logits + lora.lm_head_lora(x) + logits = self.logit_softcap * torch.tanh(logits / self.logit_softcap) + bsz, sl, V = logits.shape + return F.cross_entropy( + logits.float().reshape(-1, V), target_ids.reshape(-1), reduction="none" + ).reshape(bsz, sl) + + def _block_with_lora(self, block, x, x0, lora, slot, q_w, k_w, v_w, out_w, up_w, down_w): + mix = block.resid_mix.to(dtype=x.dtype) + x_in = mix[0][None, None, :] * x + mix[1][None, None, :] * x0 + n = block.attn_norm(x_in) * block.ln_scale_factor + attn = block.attn + bsz, seqlen, dim = n.shape + # Keep raw Q for AttnOutGate src='q' (matches forward path semantics). + q_raw = F.linear(n, q_w.to(n.dtype)) + lora.q_loras[slot](n) + q = q_raw.reshape(bsz, seqlen, attn.num_heads, attn.head_dim) + k = F.linear(n, k_w.to(n.dtype)) + if lora.k_loras is not None: + k = k + lora.k_loras[slot](n) + k = k.reshape(bsz, seqlen, attn.num_kv_heads, attn.head_dim) + v = (F.linear(n, v_w.to(n.dtype)) + lora.v_loras[slot](n)).reshape( + bsz, seqlen, attn.num_kv_heads, attn.head_dim + ) + q = F.rms_norm(q, (q.size(-1),)) + k = F.rms_norm(k, (k.size(-1),)) + cos, sin = attn.rotary(seqlen, n.device, q.dtype) + q = apply_rotary_emb(q, cos, sin, attn.rope_dims) + k = apply_rotary_emb(k, cos, sin, attn.rope_dims) + q = q * attn.q_gain.to(dtype=q.dtype)[None, None, :, None] + y = flash_attn_3_func(q, k, v, causal=True) + if attn.use_xsa: + y = attn._xsa_efficient(y, v) + # AttnOutGate (TTT path) — inline + .contiguous() barrier, same as the eval path. + if attn.attn_out_gate: + gate_src = q_raw if attn.attn_out_gate_src == "q" else n + gate_in = gate_src[..., : attn.gate_window].contiguous() + g = 2.0 * torch.sigmoid(attn.attn_gate_proj(gate_in)) + y = y * g[..., None] + # Gated Attention (TTT path). Gate input is n (post-norm block input), same + # as eval path. .to(n.dtype) on fp32 param before bf16 broadcast. + if attn.gated_attn: + n_c = n.contiguous() + g = torch.sigmoid(F.linear(n_c, attn.attn_gate_w.to(n.dtype))) + y = y * g[..., None] + # Sparse attention head-output gate (TTT path) — must match the eval path in + # forward() exactly, else training (which applied the gate) and TTT eval (which + # skipped it) produce mismatched representations and catastrophic BPB regression. + if attn.sparse_attn_gate: + gate_in = n[..., : attn.gate_window].contiguous() + g = torch.sigmoid( + attn.sparse_attn_gate_scale + * F.linear(gate_in, attn.attn_gate_w.to(n.dtype)) + ) + y = y * g[..., None] + y = y.reshape(bsz, seqlen, dim) + attn_out = F.linear(y, out_w.to(n.dtype)) + if lora.o_loras is not None: + attn_out = attn_out + lora.o_loras[slot](n) + x_out = x_in + block.attn_scale.to(dtype=x_in.dtype)[None, None, :] * attn_out + mlp_n = block.mlp_norm(x_out) * block.ln_scale_factor + mlp_out = block.mlp(mlp_n, up_w, down_w) + if lora.mlp_loras is not None: + mlp_out = mlp_out + lora.mlp_loras[slot](mlp_n) + x_out = x_out + block.mlp_scale.to(dtype=x_out.dtype)[None, None, :] * mlp_out + return x_out + + def _parallel_block_with_lora( + self, block_idx, lane0, lane1, x0, lora, slot, + q_w, k_w, v_w, out_w, up_w, down_w, + ): + block = self.blocks[block_idx] + mix = block.resid_mix.to(dtype=lane0.dtype) + attn_read = mix[0][None, None, :] * lane0 + mix[1][None, None, :] * x0 + n = block.attn_norm(attn_read) * block.ln_scale_factor + attn = block.attn + bsz, seqlen, dim = n.shape + q_raw = F.linear(n, q_w.to(n.dtype)) + lora.q_loras[slot](n) + q = q_raw.reshape(bsz, seqlen, attn.num_heads, attn.head_dim) + k = F.linear(n, k_w.to(n.dtype)) + if lora.k_loras is not None: + k = k + lora.k_loras[slot](n) + k = k.reshape(bsz, seqlen, attn.num_kv_heads, attn.head_dim) + v = (F.linear(n, v_w.to(n.dtype)) + lora.v_loras[slot](n)).reshape( + bsz, seqlen, attn.num_kv_heads, attn.head_dim + ) + q = F.rms_norm(q, (q.size(-1),)) + k = F.rms_norm(k, (k.size(-1),)) + cos, sin = attn.rotary(seqlen, n.device, q.dtype) + q = apply_rotary_emb(q, cos, sin, attn.rope_dims) + k = apply_rotary_emb(k, cos, sin, attn.rope_dims) + q = q * attn.q_gain.to(dtype=q.dtype)[None, None, :, None] + y = flash_attn_3_func(q, k, v, causal=True) + if attn.use_xsa: + y = attn._xsa_efficient(y, v) + # AttnOutGate (TTT parallel path) — inline + .contiguous() barrier. + if attn.attn_out_gate: + gate_src = q_raw if attn.attn_out_gate_src == "q" else n + gate_in = gate_src[..., : attn.gate_window].contiguous() + g = 2.0 * torch.sigmoid(attn.attn_gate_proj(gate_in)) + y = y * g[..., None] + # Gated Attention (TTT parallel path). Gate input is n (post-norm block input). + if attn.gated_attn: + n_c = n.contiguous() + g = torch.sigmoid(F.linear(n_c, attn.attn_gate_w.to(n.dtype))) + y = y * g[..., None] + # Sparse attention head-output gate (TTT parallel path) — must match the + # eval path in forward() to keep train/eval semantics in sync. + if attn.sparse_attn_gate: + gate_in = n[..., : attn.gate_window].contiguous() + g = torch.sigmoid( + attn.sparse_attn_gate_scale + * F.linear(gate_in, attn.attn_gate_w.to(n.dtype)) + ) + y = y * g[..., None] + y = y.reshape(bsz, seqlen, dim) + attn_out = F.linear(y, out_w.to(n.dtype)) + if lora.o_loras is not None: + attn_out = attn_out + lora.o_loras[slot](n) + attn_out = block.attn_scale.to(dtype=attn_out.dtype)[None, None, :] * attn_out + mlp_read = lane1 + mlp_n = block.mlp_norm(mlp_read) * block.ln_scale_factor + mlp_out = block.mlp(mlp_n, up_w, down_w) + if lora.mlp_loras is not None: + mlp_out = mlp_out + lora.mlp_loras[slot](mlp_n) + mlp_out = block.mlp_scale.to(dtype=lane1.dtype)[None, None, :] * mlp_out + attn_resid = self.parallel_resid_lambdas[block_idx, 0].to(dtype=lane0.dtype) + attn_post = self.parallel_post_lambdas[block_idx, 0].to(dtype=lane0.dtype) + mlp_resid = self.parallel_resid_lambdas[block_idx, 1].to(dtype=lane0.dtype) + mlp_post = self.parallel_post_lambdas[block_idx, 1].to(dtype=lane0.dtype) + lane0 = attn_resid * lane0 + attn_post[0] * attn_out + mlp_post[0] * mlp_out + lane1 = mlp_resid * lane1 + attn_post[1] * attn_out + mlp_post[1] * mlp_out + return lane0, lane1 + + +class BatchedLinearLoRA(nn.Module): + # PR-1767: rank-scaled output (alpha/rank), like standard LoRA. Decouples + # effective magnitude from rank so changing rank does not change LR scale. + _ALPHA = float(os.environ.get("TTT_LORA_ALPHA", "144")) + # PR-1767: optionally keep A warm across per-doc resets (only B is zeroed). + # Accumulates useful feature directions across documents within a TTT phase. + _WARM_START_A = bool(int(os.environ.get("TTT_WARM_START_A", "1"))) + + def __init__(self, bsz, in_features, out_features, rank): + super().__init__() + self._bound = 1.0 / math.sqrt(in_features) + self._scale = self._ALPHA / rank + self.A = nn.Parameter( + torch.empty(bsz, rank, in_features).uniform_(-self._bound, self._bound) + ) + self.B = nn.Parameter(torch.zeros(bsz, out_features, rank)) + + def reset(self): + with torch.no_grad(): + if not self._WARM_START_A: + self.A.uniform_(-self._bound, self._bound) + self.B.zero_() + + def forward(self, x): + return ((x @ self.A.transpose(1, 2)) @ self.B.transpose(1, 2)) * self._scale + + +class BatchedTTTLoRA(nn.Module): + def __init__(self, bsz, model, rank, k_lora=True, mlp_lora=True, o_lora=True): + super().__init__() + self.bsz = bsz + dim = model.qo_bank.shape[-1] + vocab = model.tok_emb.num_embeddings + if getattr(model, "looping_active", False): + num_slots = len(model.encoder_indices) + len(model.decoder_indices) + else: + num_slots = len(model.blocks) + kv_dim = model.blocks[0].attn.num_kv_heads * ( + dim // model.blocks[0].attn.num_heads + ) + embed_dim = model.tok_emb.embedding_dim + self.lm_head_lora = BatchedLinearLoRA(bsz, embed_dim, vocab, rank) + self.q_loras = nn.ModuleList( + [BatchedLinearLoRA(bsz, dim, dim, rank) for _ in range(num_slots)] + ) + self.v_loras = nn.ModuleList( + [BatchedLinearLoRA(bsz, dim, kv_dim, rank) for _ in range(num_slots)] + ) + self.k_loras = ( + nn.ModuleList( + [BatchedLinearLoRA(bsz, dim, kv_dim, rank) for _ in range(num_slots)] + ) + if k_lora + else None + ) + self.mlp_loras = ( + nn.ModuleList( + [BatchedLinearLoRA(bsz, dim, dim, rank) for _ in range(num_slots)] + ) + if mlp_lora + else None + ) + self.o_loras = ( + nn.ModuleList( + [BatchedLinearLoRA(bsz, dim, dim, rank) for _ in range(num_slots)] + ) + if o_lora + else None + ) + + def reset(self): + with torch.no_grad(): + self.lm_head_lora.reset() + for loras in [self.q_loras, self.v_loras, self.k_loras, + self.mlp_loras, self.o_loras]: + if loras is not None: + for lora in loras: + lora.reset() + + +# Polar Express per-iteration minimax Newton-Schulz coefficients (PR #1344). +# Replaces the fixed (3.4445, -4.775, 2.0315) coefficients of stock Muon. +# Applied at backend_steps=5 — taking more than 5 iterations from this list +# falls back to the final (converged) tuple via the slice guard below. +_PE_COEFFS = ( + (8.156554524902461, -22.48329292557795, 15.878769915207462), + (4.042929935166739, -2.808917465908714, 0.5000178451051316), + (3.8916678022926607, -2.772484153217685, 0.5060648178503393), + (3.285753657755655, -2.3681294933425376, 0.46449024233003106), + (2.3465413258596377, -1.7097828382687081, 0.42323551169305323), +) + + +@torch.compile +def zeropower_via_newtonschulz5(G, steps=10, eps=1e-07): + was_2d = G.ndim == 2 + if was_2d: + G = G.unsqueeze(0) + X = G.bfloat16() + transposed = X.size(-2) > X.size(-1) + if transposed: + X = X.mT + X = X / (X.norm(dim=(-2, -1), keepdim=True) + eps) + coeffs = _PE_COEFFS[:steps] if steps <= len(_PE_COEFFS) else _PE_COEFFS + for a, b, c in coeffs: + A = X @ X.mT + B = b * A + c * (A @ A) + X = a * X + B @ X + if transposed: + X = X.mT + if was_2d: + X = X.squeeze(0) + return X + + +class Muon(torch.optim.Optimizer): + def __init__( + self, + params, + lr, + momentum, + backend_steps, + nesterov=True, + weight_decay=0.0, + row_normalize=False, + ): + super().__init__( + params, + dict( + lr=lr, + momentum=momentum, + backend_steps=backend_steps, + nesterov=nesterov, + weight_decay=weight_decay, + row_normalize=row_normalize, + ), + ) + self._built = False + + def _build(self): + self._distributed = dist.is_available() and dist.is_initialized() + self._world_size = dist.get_world_size() if self._distributed else 1 + self._rank = dist.get_rank() if self._distributed else 0 + ws = self._world_size + self._bank_meta = [] + for group in self.param_groups: + for p in group["params"]: + B = p.shape[0] + padded_B = ((B + ws - 1) // ws) * ws + shard_B = padded_B // ws + tail = p.shape[1:] + dev = p.device + self._bank_meta.append({ + "p": p, + "B": B, + "padded_grad": torch.zeros(padded_B, *tail, device=dev, dtype=torch.bfloat16), + "shard": torch.zeros(shard_B, *tail, device=dev, dtype=torch.bfloat16), + "shard_mom": torch.zeros(shard_B, *tail, device=dev, dtype=torch.bfloat16), + "full_update": torch.zeros(padded_B, *tail, device=dev, dtype=torch.bfloat16), + "scale": max(1, p.shape[-2] / p.shape[-1]) ** 0.5, + }) + self._bank_meta.sort(key=lambda m: -m["p"].numel()) + self._built = True + + def launch_reduce_scatters(self): + if not self._built: + self._build() + if not self._distributed: + return + self._rs_futures = [] + for m in self._bank_meta: + p = m["p"] + if p.grad is None: + self._rs_futures.append(None) + continue + pg = m["padded_grad"] + pg[: m["B"]].copy_(p.grad) + fut = dist.reduce_scatter_tensor( + m["shard"], pg, op=dist.ReduceOp.AVG, async_op=True + ) + self._rs_futures.append(fut) + + @torch.no_grad() + def step(self, closure=None): + loss = None + if closure is not None: + with torch.enable_grad(): + loss = closure() + if not self._built: + self._build() + for group in self.param_groups: + lr = group["lr"] + momentum = group["momentum"] + backend_steps = group["backend_steps"] + nesterov = group["nesterov"] + wd = group.get("weight_decay", 0.0) + row_normalize = group.get("row_normalize", False) + prev_ag_handle = None + prev_m = None + sharded = self._distributed and hasattr(self, "_rs_futures") + for idx, m in enumerate(self._bank_meta): + p = m["p"] + if p.grad is None: + continue + if prev_ag_handle is not None: + prev_ag_handle.wait() + pp = prev_m["p"] + upd = prev_m["full_update"][: prev_m["B"]] + if wd > 0.0: + pp.data.mul_(1.0 - lr * wd) + pp.add_(upd, alpha=-lr * prev_m["scale"]) + if sharded and self._rs_futures[idx] is not None: + self._rs_futures[idx].wait() + g = m["shard"] + buf = m["shard_mom"] + else: + g = p.grad.bfloat16() + state = self.state[p] + if "momentum_buffer" not in state: + state["momentum_buffer"] = torch.zeros_like(g) + buf = state["momentum_buffer"] + buf.mul_(momentum).add_(g) + if nesterov: + update = g.add(buf, alpha=momentum) + else: + update = buf + if row_normalize: + rn = update.float().norm(dim=-1, keepdim=True).clamp_min(1e-07) + update = update / rn.to(update.dtype) + update = zeropower_via_newtonschulz5(update, steps=backend_steps) + if sharded: + prev_ag_handle = dist.all_gather_into_tensor( + m["full_update"], update, async_op=True + ) + prev_m = m + else: + if wd > 0.0: + p.data.mul_(1.0 - lr * wd) + p.add_(update, alpha=-lr * m["scale"]) + if prev_ag_handle is not None: + prev_ag_handle.wait() + pp = prev_m["p"] + upd = prev_m["full_update"][: prev_m["B"]] + if wd > 0.0: + pp.data.mul_(1.0 - lr * wd) + pp.add_(upd, alpha=-lr * prev_m["scale"]) + if hasattr(self, "_rs_futures"): + del self._rs_futures + return loss + + +CONTROL_TENSOR_NAME_PATTERNS = tuple( + pattern + for pattern in os.environ.get( + "CONTROL_TENSOR_NAME_PATTERNS", + "attn_scale,attn_scales,mlp_scale,mlp_scales,resid_mix,resid_mixes,q_gain,skip_weight,skip_weights,skip_gates,parallel_post_lambdas,parallel_resid_lambdas,attn_gate_proj,attn_gate_w,smear_gate,smear_lambda", + ).split(",") + if pattern +) + + +PACKED_REPLICATED_GRAD_MAX_NUMEL = 1 << 15 + + +class Optimizers: + def __init__(self, h, base_model): + matrix_params = [ + base_model.qo_bank, + base_model.kv_bank, + base_model.mlp_up_bank, + base_model.mlp_down_bank, + ] + block_named_params = list(base_model.blocks.named_parameters()) + scalar_params = [ + p + for (name, p) in block_named_params + if p.ndim < 2 + or any(pattern in name for pattern in CONTROL_TENSOR_NAME_PATTERNS) + ] + if base_model.skip_weights.numel() > 0: + scalar_params.append(base_model.skip_weights) + if base_model.skip_gates is not None and base_model.skip_gates.numel() > 0: + scalar_params.append(base_model.skip_gates) + if base_model.parallel_post_lambdas is not None: + scalar_params.append(base_model.parallel_post_lambdas) + if base_model.parallel_resid_lambdas is not None: + scalar_params.append(base_model.parallel_resid_lambdas) + # SmearGate params live on GPT root (not in .blocks), so add them by hand. + # Both are tiny (gate_window scalars + 1 lambda). Optimized via scalar Adam. + if getattr(base_model, "smear_gate_enabled", False): + scalar_params.append(base_model.smear_gate.weight) + scalar_params.append(base_model.smear_lambda) + token_lr = h.tied_embed_lr if h.tie_embeddings else h.embed_lr + tok_params = [ + {"params": [base_model.tok_emb.weight], "lr": token_lr, "base_lr": token_lr} + ] + self.optimizer_tok = torch.optim.AdamW( + tok_params, + betas=(h.beta1, h.beta2), + eps=h.adam_eps, + weight_decay=h.embed_wd, + fused=True, + ) + self.optimizer_muon = Muon( + matrix_params, + lr=h.matrix_lr, + momentum=h.muon_momentum, + backend_steps=h.muon_backend_steps, + weight_decay=h.muon_wd, + row_normalize=h.muon_row_normalize, + ) + for group in self.optimizer_muon.param_groups: + group["base_lr"] = h.matrix_lr + self.optimizer_scalar = torch.optim.AdamW( + [{"params": scalar_params, "lr": h.scalar_lr, "base_lr": h.scalar_lr}], + betas=(h.beta1, h.beta2), + eps=h.adam_eps, + weight_decay=h.adam_wd, + fused=True, + ) + self.optimizers = [ + self.optimizer_tok, + self.optimizer_muon, + self.optimizer_scalar, + ] + self.replicated_params = list(tok_params[0]["params"]) + self.replicated_params.extend(scalar_params) + self.replicated_large_params = [] + self.replicated_packed_params = [] + for p in self.replicated_params: + if p.numel() <= PACKED_REPLICATED_GRAD_MAX_NUMEL: + self.replicated_packed_params.append(p) + else: + self.replicated_large_params.append(p) + self._aux_stream = torch.cuda.Stream() + + def __iter__(self): + return iter(self.optimizers) + + def zero_grad_all(self): + for opt in self.optimizers: + opt.zero_grad(set_to_none=True) + + def _all_reduce_packed_grads(self): + grads_by_key = collections.defaultdict(list) + for p in self.replicated_packed_params: + if p.grad is not None: + grads_by_key[(p.grad.device, p.grad.dtype)].append(p.grad) + for grads in grads_by_key.values(): + flat = torch.empty( + sum(g.numel() for g in grads), + device=grads[0].device, + dtype=grads[0].dtype, + ) + offset = 0 + for g in grads: + n = g.numel() + flat[offset : offset + n].copy_(g.contiguous().view(-1)) + offset += n + dist.all_reduce(flat, op=dist.ReduceOp.AVG) + offset = 0 + for g in grads: + n = g.numel() + g.copy_(flat[offset : offset + n].view_as(g)) + offset += n + + def step(self, distributed=False): + self.optimizer_muon.launch_reduce_scatters() + if distributed: + reduce_handles = [ + dist.all_reduce(p.grad, op=dist.ReduceOp.AVG, async_op=True) + for p in self.replicated_large_params + if p.grad is not None + ] + self._all_reduce_packed_grads() + for handle in reduce_handles: + handle.wait() + self._aux_stream.wait_stream(torch.cuda.current_stream()) + with torch.cuda.stream(self._aux_stream): + self.optimizer_tok.step() + self.optimizer_scalar.step() + self.optimizer_muon.step() + torch.cuda.current_stream().wait_stream(self._aux_stream) + self.zero_grad_all() + + +def restore_fp32_params(model): + for module in model.modules(): + if isinstance(module, CastedLinear): + module.float() + for name, param in model.named_parameters(): + if ( + param.ndim < 2 + or any(pattern in name for pattern in CONTROL_TENSOR_NAME_PATTERNS) + ) and param.dtype != torch.float32: + param.data = param.data.float() + if hasattr(model, "qo_bank") and model.qo_bank is not None: + model.qo_bank.data = model.qo_bank.data.float() + model.kv_bank.data = model.kv_bank.data.float() + model.mlp_up_bank.data = model.mlp_up_bank.data.float() + model.mlp_down_bank.data = model.mlp_down_bank.data.float() + + +def collect_hessians(model, train_loader, h, device, n_calibration_batches=64): + hessians = {} + hooks = [] + for i, block in enumerate(model.blocks): + block.attn._calib = True + block.mlp._calib = True + block.mlp.use_fused = False + + def make_attn_hook(layer_idx): + def hook_fn(module, inp, out): + x = inp[0].detach().float() + if x.ndim == 3: + x = x.reshape(-1, x.shape[-1]) + for suffix in ["c_q", "c_k", "c_v"]: + name = f"blocks.{layer_idx}.attn.{suffix}.weight" + if name not in hessians: + hessians[name] = torch.zeros( + x.shape[1], x.shape[1], dtype=torch.float32, device=device + ) + hessians[name].addmm_(x.T, x) + y = module._last_proj_input + if y is not None: + y = y.float() + if y.ndim == 3: + y = y.reshape(-1, y.shape[-1]) + name = f"blocks.{layer_idx}.attn.proj.weight" + if name not in hessians: + hessians[name] = torch.zeros( + y.shape[1], y.shape[1], dtype=torch.float32, device=device + ) + hessians[name].addmm_(y.T, y) + return hook_fn + + def make_mlp_hook(layer_idx): + def hook_fn(module, inp, out): + x = inp[0].detach().float() + if x.ndim == 3: + x = x.reshape(-1, x.shape[-1]) + name = f"blocks.{layer_idx}.mlp.fc.weight" + if name not in hessians: + hessians[name] = torch.zeros( + x.shape[1], x.shape[1], dtype=torch.float32, device=device + ) + hessians[name].addmm_(x.T, x) + h_act = module._last_down_input + if h_act is not None: + h_act = h_act.float() + if h_act.ndim == 3: + h_act = h_act.reshape(-1, h_act.shape[-1]) + name = f"blocks.{layer_idx}.mlp.proj.weight" + if name not in hessians: + hessians[name] = torch.zeros( + h_act.shape[1], h_act.shape[1], dtype=torch.float32, device=device + ) + hessians[name].addmm_(h_act.T, h_act) + return hook_fn + + for i, block in enumerate(model.blocks): + hooks.append(block.attn.register_forward_hook(make_attn_hook(i))) + hooks.append(block.mlp.register_forward_hook(make_mlp_hook(i))) + + # Hessian hooks for embedding factorization projection layers + def make_linear_input_hook(weight_name): + def hook_fn(module, inp, out): + x = inp[0].detach().float() + if x.ndim == 3: + x = x.reshape(-1, x.shape[-1]) + if weight_name not in hessians: + hessians[weight_name] = torch.zeros( + x.shape[1], x.shape[1], dtype=torch.float32, device=device + ) + hessians[weight_name].addmm_(x.T, x) + return hook_fn + + if model.tie_embeddings: + hook_module = model.final_norm + + def make_output_hook(name): + def hook_fn(module, inp, out): + x = out.detach().float() + if x.ndim == 3: + x = x.reshape(-1, x.shape[-1]) + if name not in hessians: + hessians[name] = torch.zeros( + x.shape[1], x.shape[1], dtype=torch.float32, device=device + ) + hessians[name].addmm_(x.T, x) + return hook_fn + + hooks.append( + hook_module.register_forward_hook(make_output_hook("tok_emb.weight")) + ) + model.eval() + with torch.no_grad(): + for _ in range(n_calibration_batches): + x, _ = train_loader.next_batch(h.train_batch_tokens, h.grad_accum_steps) + model.forward_logits(x) + for hook in hooks: + hook.remove() + for i, block in enumerate(model.blocks): + block.attn._calib = False + block.mlp._calib = False + block.mlp.use_fused = True + for name in hessians: + hessians[name] = hessians[name].cpu() / n_calibration_batches + return hessians + + +def gptq_quantize_weight(w, H, clip_sigmas=3.0, clip_range=63, block_size=128): + W_orig = w.float().clone() + rows, cols = W_orig.shape + H = H.float().clone() + dead = torch.diag(H) == 0 + H[dead, dead] = 1 + damp = 0.01 * H.diag().mean() + H.diagonal().add_(damp) + perm = torch.argsort(H.diag(), descending=True) + invperm = torch.argsort(perm) + W_perm = W_orig[:, perm].clone() + W_perm[:, dead[perm]] = 0 + H = H[perm][:, perm] + Hinv = torch.cholesky_inverse(torch.linalg.cholesky(H)) + Hinv = torch.linalg.cholesky(Hinv, upper=True) + row_std = W_orig.std(dim=1) + s = (clip_sigmas * row_std / clip_range).clamp_min(1e-10).to(torch.float16) + sf = s.float() + Q = torch.zeros(rows, cols, dtype=torch.int8) + W_work = W_perm.clone() + for i1 in range(0, cols, block_size): + i2 = min(i1 + block_size, cols) + W_block = W_work[:, i1:i2].clone() + Hinv_block = Hinv[i1:i2, i1:i2] + Err = torch.zeros(rows, i2 - i1) + for j in range(i2 - i1): + w_col = W_block[:, j] + d = Hinv_block[j, j] + q_col = torch.clamp(torch.round(w_col / sf), -clip_range, clip_range) + Q[:, i1 + j] = q_col.to(torch.int8) + err = (w_col - q_col.float() * sf) / d + Err[:, j] = err + W_block[:, j:] -= err.unsqueeze(1) * Hinv_block[j, j:].unsqueeze(0) + if i2 < cols: + W_work[:, i2:] -= Err @ Hinv[i1:i2, i2:] + return Q[:, invperm], s + + +def _quantize_gate_int8_row(w): + # Symmetric int8-per-row quantization for small gate tensors. w shape + # (R, C) -> (R,) scales in fp16, int8 values in [-127, 127]. Single scale + # per row keeps accuracy high while halving storage vs fp16. + W = w.float().contiguous() + row_max = W.abs().amax(dim=1).clamp_min(1e-10) + s = (row_max / 127.0).to(torch.float16) + sf = s.float().view(-1, 1) + q = torch.clamp(torch.round(W / sf), -127, 127).to(torch.int8) + return q, s + + +def _lqer_pack(A, B, bits): + rng = 2 ** (bits - 1) - 1 + sA = (A.abs().amax(dim=1).clamp_min(1e-10) / rng).to(torch.float16) + sB = (B.abs().amax(dim=1).clamp_min(1e-10) / rng).to(torch.float16) + qA = torch.clamp(torch.round(A / sA.float().view(-1, 1)), -rng, rng).to(torch.int8) + qB = torch.clamp(torch.round(B / sB.float().view(-1, 1)), -rng, rng).to(torch.int8) + return qA, sA, qB, sB + + +def _lqer_pack_asym(A, B, g=64): + # A: INT2 per-matrix scalar (signed [-2,1], scale = |A|max/1.5). + sA = (A.abs().amax().clamp_min(1e-10) / 1.5).to(torch.float16) + qA = torch.clamp(torch.round(A / sA.float()), -2, 1).to(torch.int8) + # B: INT4 groupwise g over flattened B (signed [-8,7], per-group scale). + Bf = B.reshape(-1, g) + Bmax = Bf.abs().amax(dim=-1, keepdim=True).clamp_min(1e-10) + sB = (Bmax / 7.5).to(torch.float16).reshape(-1) + qB = torch.clamp(torch.round(Bf / sB.float().reshape(-1, 1)), -8, 7).to( + torch.int8 + ).reshape(B.shape) + return qA, sA, qB, sB + + +def gptq_mixed_quantize(state_dict, hessians, h): + result = {} + meta = {} + quant_gate = bool(getattr(h, "gated_attn_quant_gate", False)) + lqer_on = bool(getattr(h, "lqer_enabled", False)) + lqer_cands = {} + for (name, tensor) in state_dict.items(): + t = tensor.detach().cpu().contiguous() + # Dedicated int8-per-row path for attn_gate_w (bypasses both GPTQ and + # fp16 passthrough). Applied BEFORE the numel<=65536 passthrough check + # so the gate tensor is routed here instead of to fp16. + if ( + quant_gate + and t.is_floating_point() + and t.ndim == 2 + and name.endswith(".attn_gate_w") + # Dense GatedAttn: (num_heads, dim) = (8, 512) = 4096. + # Sparse gate: (num_heads, gate_window) = (8, 12) = 96. + # Both need int8-per-row routing; the 1024 lower bound in stock + # PR-1736 presumed dense-only. Widen to catch both. + and 32 <= t.numel() <= 8192 + ): + gq, gs = _quantize_gate_int8_row(t) + result[name + ".gq"] = gq + result[name + ".gs"] = gs + meta[name] = "gate_int8_row" + continue + if not t.is_floating_point() or t.numel() <= 65536: + result[name] = t.to(torch.float16) if t.is_floating_point() else t + meta[name] = "passthrough (float16)" + continue + if "tok_emb" in name: + cs = h.embed_clip_sigmas + elif ".mlp." in name: + cs = h.mlp_clip_sigmas + elif ".attn." in name: + cs = h.attn_clip_sigmas + else: + cs = h.matrix_clip_sigmas + bits = h.embed_bits if "tok_emb" in name else h.matrix_bits + clip_range = 2 ** (bits - 1) - 1 + ret = gptq_quantize_weight( + t, hessians[name], clip_sigmas=cs, clip_range=clip_range + ) + q, s = ret + result[name + ".q"] = q + result[name + ".scale"] = s + meta[name] = f"gptq (int{bits})" + if lqer_on: + W_q = q.float() * s.float().view(-1, 1) + E = t.float() - W_q + lqer_cands[name] = (E, float(E.norm())) + if lqer_on and lqer_cands: + top = sorted(lqer_cands.items(), key=lambda kv: -kv[1][1])[: h.lqer_top_k] + asym_on = bool(getattr(h, "lqer_asym_enabled", False)) + asym_g = int(getattr(h, "lqer_asym_group", 64)) + for (name, (E, _)) in top: + U, S, Vh = torch.linalg.svd(E, full_matrices=False) + r = min(h.lqer_rank, S.numel()) + A = (U[:, :r] * S[:r]).contiguous() + B = Vh[:r, :].contiguous() + if asym_on and B.numel() % asym_g == 0: + qA, sA, qB, sB = _lqer_pack_asym(A, B, asym_g) + result[name + ".lqA_a"] = qA + result[name + ".lqAs_a"] = sA + result[name + ".lqB_a"] = qB + result[name + ".lqBs_a"] = sB + meta[name] = meta[name] + "+lqer_asym" + else: + qA, sA, qB, sB = _lqer_pack(A, B, h.lqer_factor_bits) + result[name + ".lqA"] = qA + result[name + ".lqAs"] = sA + result[name + ".lqB"] = qB + result[name + ".lqBs"] = sB + meta[name] = meta[name] + "+lqer" + categories = collections.defaultdict(set) + for (name, cat) in meta.items(): + short = re.sub("\\.\\d+$", "", re.sub("blocks\\.\\d+", "blocks", name)) + categories[cat].add(short) + log("Quantized weights:") + for cat in sorted(categories): + log(f" {cat}: {', '.join(sorted(categories[cat]))}") + return result, meta + +def dequantize_mixed(result, meta, template_sd): + out = {} + for (name, orig) in template_sd.items(): + info = meta.get(name) + if info is None: + continue + orig_dtype = orig.dtype + if "passthrough" in info: + t = result[name] + if t.dtype == torch.float16 and orig_dtype in ( + torch.float32, + torch.bfloat16, + ): + t = t.to(orig_dtype) + out[name] = t + continue + if info == "gate_int8_row": + gq = result[name + ".gq"] + gs = result[name + ".gs"] + out[name] = (gq.float() * gs.float().view(-1, 1)).to(orig_dtype) + continue + q, s = result[name + ".q"], result[name + ".scale"] + if s.ndim > 0: + W = q.float() * s.float().view(q.shape[0], *[1] * (q.ndim - 1)) + else: + W = q.float() * float(s.item()) + if "lqer_asym" in info: + qA_t = result[name + ".lqA_a"] + sA_t = result[name + ".lqAs_a"] + qB_t = result[name + ".lqB_a"] + sB_t = result[name + ".lqBs_a"] + qA = qA_t.float() * float(sA_t) + g_sz = qB_t.numel() // sB_t.numel() + qB = (qB_t.reshape(-1, g_sz).float() * sB_t.float().view(-1, 1)).reshape( + qB_t.shape + ) + W = W + qA @ qB + elif "lqer" in info: + qA = result[name + ".lqA"].float() * result[name + ".lqAs"].float().view(-1, 1) + qB = result[name + ".lqB"].float() * result[name + ".lqBs"].float().view(-1, 1) + W = W + qA @ qB + out[name] = W.to(orig_dtype) + return out + + +_BSHF_MAGIC = b"BSHF" + + +# ── Per-group lrzip compression (ported from PR#1586 via PR#1667/1729) ──────── + +_GROUP_ORDER = [ + "_tok_emb.weight.q", + "attn.c_k.weight.q", "attn.c_q.weight.q", + "attn.c_v.weight.q", "attn.proj.weight.q", + "mlp.fc.weight.q", "mlp.proj.weight.q", +] +_SIMSORT_KEYS = {"_tok_emb.weight.q", "attn.c_q.weight.q", "mlp.fc.weight.q"} +_PACK_MAGIC = b"PGRP" + + +def _similarity_sort_l1(matrix): + import numpy as _np + n = matrix.shape[0] + used = _np.zeros(n, dtype=bool) + order = [0] + used[0] = True + cur = matrix[0].astype(_np.float32) + for _ in range(n - 1): + dists = _np.sum(_np.abs(matrix[~used].astype(_np.float32) - cur), axis=1) + unused = _np.where(~used)[0] + best = unused[_np.argmin(dists)] + order.append(best) + used[best] = True + cur = matrix[best].astype(_np.float32) + return _np.array(order, dtype=_np.uint16) + + +def _lrzip_compress(data, tmpdir, label): + inp = os.path.join(tmpdir, f"{label}.bin") + out = f"{inp}.lrz" + with open(inp, "wb") as f: + f.write(data) + subprocess.run(["lrzip", "-z", "-L", "9", "-o", out, inp], capture_output=True, check=True) + with open(out, "rb") as f: + result = f.read() + os.remove(inp); os.remove(out) + return result + + +def _lrzip_decompress(data, tmpdir, label): + inp = os.path.join(tmpdir, f"{label}.lrz") + out = os.path.join(tmpdir, f"{label}.bin") + with open(inp, "wb") as f: + f.write(data) + subprocess.run(["lrzip", "-d", "-f", "-o", out, inp], capture_output=True, check=True) + with open(out, "rb") as f: + result = f.read() + os.remove(inp); os.remove(out) + return result + + +def _pack_streams(streams): + import struct + n = len(streams) + hdr = _PACK_MAGIC + struct.pack("= 2 + docs.append((start, end - start)) + return docs + + +def _build_ttt_global_batches(doc_entries, h, ascending=False): + batch_size = h.ttt_batch_size + global_doc_entries = sorted(doc_entries, key=lambda x: x[1][1]) + global_batches = [ + global_doc_entries[i : i + batch_size] + for i in range(0, len(global_doc_entries), batch_size) + ] + indexed = list(enumerate(global_batches)) + if not ascending: + indexed.sort(key=lambda ib: -max(dl for _, (_, dl) in ib[1])) + return indexed + + +def _init_batch_counter(path): + with open(path, "wb") as f: + f.write((0).to_bytes(4, "little")) + + +def _claim_next_batch(counter_path, queue_len): + try: + with open(counter_path, "r+b") as f: + fcntl.flock(f, fcntl.LOCK_EX) + idx = int.from_bytes(f.read(4), "little") + f.seek(0) + f.write((idx + 1).to_bytes(4, "little")) + f.flush() + except FileNotFoundError: + return queue_len + return idx + + +def _compute_chunk_window(ci, pred_len, num_chunks, chunk_size, eval_seq_len): + chunk_end = pred_len if ci == num_chunks - 1 else (ci + 1) * chunk_size + win_start = max(0, chunk_end - eval_seq_len) + win_len = chunk_end - win_start + chunk_start = ci * chunk_size + chunk_offset = chunk_start - win_start + chunk_len = chunk_end - chunk_start + return win_start, win_len, chunk_offset, chunk_len + + +def _accumulate_bpb( + ptl, + x, + y, + chunk_offsets, + chunk_lens, + pos_idx, + base_bytes_lut, + has_leading_space_lut, + is_boundary_token_lut, + loss_sum, + byte_sum, + token_count, + y_bytes=None, +): + pos = pos_idx[: x.size(1)].unsqueeze(0) + mask = ( + (chunk_lens.unsqueeze(1) > 0) + & (pos >= chunk_offsets.unsqueeze(1)) + & (pos < (chunk_offsets + chunk_lens).unsqueeze(1)) + ) + mask_f64 = mask.to(torch.float64) + if y_bytes is not None: + tok_bytes = y_bytes.to(torch.float64) + else: + tok_bytes = base_bytes_lut[y].to(torch.float64) + tok_bytes += (has_leading_space_lut[y] & ~is_boundary_token_lut[x]).to( + torch.float64 + ) + loss_sum += (ptl.to(torch.float64) * mask_f64).sum() + byte_sum += (tok_bytes * mask_f64).sum() + token_count += chunk_lens.to(torch.float64).sum() + + +def _loss_bpb_from_sums(loss_sum, token_count, byte_sum): + val_loss = (loss_sum / token_count).item() + val_bpb = val_loss / math.log(2.0) * (token_count.item() / byte_sum.item()) + return val_loss, val_bpb + + +def _add_to_counter(path, delta): + try: + with open(path, "r+b") as f: + fcntl.flock(f, fcntl.LOCK_EX) + cur = int.from_bytes(f.read(8), "little", signed=True) + cur += int(delta) + f.seek(0) + f.write(int(cur).to_bytes(8, "little", signed=True)) + f.flush() + return cur + except FileNotFoundError: + return int(delta) + + +def _init_int64_counter(path): + with open(path, "wb") as f: + f.write((0).to_bytes(8, "little", signed=True)) + + +def _select_ttt_doc_entries(docs, h): + doc_entries = list(enumerate(docs)) + if h.val_doc_fraction < 1.0: + sample_n = max(1, int(round(len(docs) * h.val_doc_fraction))) + sampled_indices = sorted( + random.Random(h.seed).sample(range(len(docs)), sample_n) + ) + return [(i, docs[i]) for i in sampled_indices] + return doc_entries + + +def train_val_ttt_global_sgd_distributed(h, device, val_data, base_model, val_tokens, batch_seqs=None): + global BOS_ID + if BOS_ID is None: + BOS_ID = 1 + base_model.eval() + seq_len = h.eval_seq_len + total_tokens = val_tokens.numel() - 1 + ttt_chunk = h.global_ttt_chunk_tokens + batch_seqs = h.global_ttt_batch_seqs if batch_seqs is None else batch_seqs + num_chunks = (total_tokens + ttt_chunk - 1) // ttt_chunk + ttt_params = [p for p in base_model.parameters()] + for p in ttt_params: + p.requires_grad_(True) + optimizer = torch.optim.SGD( + ttt_params, lr=h.global_ttt_lr, momentum=h.global_ttt_momentum + ) + t_start = time.perf_counter() + for ci in range(num_chunks): + chunk_start = ci * ttt_chunk + chunk_end = min((ci + 1) * ttt_chunk, total_tokens) + is_last_chunk = ci == num_chunks - 1 + if is_last_chunk or h.global_ttt_epochs <= 0: + continue + base_model.train() + chunk_seqs = (chunk_end - chunk_start) // seq_len + if chunk_seqs <= 0: + continue + warmup_chunks = max(0, min(h.global_ttt_warmup_chunks, num_chunks - 1)) + if warmup_chunks > 0 and ci < warmup_chunks: + warmup_denom = max(warmup_chunks - 1, 1) + warmup_t = ci / warmup_denom + lr_now = ( + h.global_ttt_warmup_start_lr + + (h.global_ttt_lr - h.global_ttt_warmup_start_lr) * warmup_t + ) + else: + decay_steps = max(num_chunks - 1 - warmup_chunks, 1) + decay_ci = max(ci - warmup_chunks, 0) + lr_now = h.global_ttt_lr * 0.5 * ( + 1.0 + math.cos(math.pi * decay_ci / decay_steps) + ) + for pg in optimizer.param_groups: + pg["lr"] = lr_now + my_seq_s = chunk_seqs * h.rank // h.world_size + my_seq_e = chunk_seqs * (h.rank + 1) // h.world_size + my_chunk_seqs = my_seq_e - my_seq_s + for _ in range(h.global_ttt_epochs): + for bs in range(0, my_chunk_seqs, batch_seqs): + be = min(bs + batch_seqs, my_chunk_seqs) + actual_bs = my_seq_s + bs + start_tok = chunk_start + actual_bs * seq_len + end_tok = chunk_start + (my_seq_s + be) * seq_len + 1 + if end_tok > val_tokens.numel(): + continue + local = val_tokens[start_tok:end_tok].to(device=device, dtype=torch.int64) + x_flat = local[:-1] + y_flat = local[1:] + optimizer.zero_grad(set_to_none=True) + with torch.enable_grad(): + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + if h.global_ttt_respect_doc_boundaries: + bos_pos = (x_flat == BOS_ID).nonzero(as_tuple=True)[0].tolist() + cu_seqlens, max_seqlen = _build_cu_seqlens( + bos_pos, x_flat.numel(), x_flat.device, h.eval_seq_len, 64 + ) + loss = base_model( + x_flat[None], + y_flat[None], + cu_seqlens=cu_seqlens, + max_seqlen=max_seqlen, + ) + else: + x = x_flat.reshape(-1, seq_len) + y = y_flat.reshape(-1, seq_len) + loss = base_model(x, y) + loss.backward() + if dist.is_available() and dist.is_initialized(): + for p in ttt_params: + if p.grad is not None: + dist.all_reduce(p.grad, op=dist.ReduceOp.SUM) + p.grad.mul_(1.0 / h.world_size) + if h.global_ttt_grad_clip > 0: + torch.nn.utils.clip_grad_norm_(ttt_params, h.global_ttt_grad_clip) + optimizer.step() + base_model.eval() + if h.rank == 0: + elapsed = time.perf_counter() - t_start + log( + f"tttg: c{ci+1}/{num_chunks} lr:{lr_now:.6f} t:{elapsed:.1f}s" + ) + for p in base_model.parameters(): + p.requires_grad_(True) + base_model.eval() + + +def eval_val_ttt_phased(h, base_model, device, val_data, forward_ttt_train): + global BOS_ID + if BOS_ID is None: + BOS_ID = 1 + base_model.eval() + for p in base_model.parameters(): + p.requires_grad_(False) + all_tokens = val_data.val_tokens + all_tokens_idx = all_tokens.to(torch.int32) + docs = _find_docs(all_tokens) + doc_entries = _select_ttt_doc_entries(docs, h) + prefix_doc_limit = max(0, min(len(doc_entries), int(h.phased_ttt_prefix_docs))) + num_phases = max(1, int(h.phased_ttt_num_phases)) + phase_boundaries = [] + for pi in range(num_phases): + boundary = prefix_doc_limit * (pi + 1) // num_phases + phase_boundaries.append(boundary) + current_phase = 0 + current_phase_boundary = phase_boundaries[0] + log( + "ttt_phased:" + f" total_docs:{len(doc_entries)} prefix_docs:{prefix_doc_limit} " + f"suffix_docs:{len(doc_entries) - prefix_doc_limit}" + f" num_phases:{num_phases} boundaries:{phase_boundaries}" + ) + chunk_size, eval_seq_len = h.ttt_chunk_size, h.ttt_eval_seq_len + eval_batch_set = None + if h.ttt_eval_batches: + eval_batch_set = set(int(x) for x in h.ttt_eval_batches.split(",") if x.strip()) + use_ascending = eval_batch_set is not None + global_batches_sorted = _build_ttt_global_batches( + doc_entries, h, ascending=use_ascending + ) + queue_len = len(global_batches_sorted) + counter_path = f"/tmp/ttt_counter_{h.run_id}" + prefix_counter_path = f"/tmp/ttt_prefix_counter_{h.run_id}" + pause_flag_path = f"/tmp/ttt_pause_flag_{h.run_id}" + if h.rank == 0: + _init_batch_counter(counter_path) + _init_int64_counter(prefix_counter_path) + try: + os.remove(pause_flag_path) + except FileNotFoundError: + pass + if dist.is_available() and dist.is_initialized(): + path_list = [counter_path, prefix_counter_path, pause_flag_path] + dist.broadcast_object_list(path_list, src=0) + counter_path, prefix_counter_path, pause_flag_path = path_list + dist.barrier() + loss_sum = torch.zeros((), device=device, dtype=torch.float64) + byte_sum = torch.zeros((), device=device, dtype=torch.float64) + token_count = torch.zeros((), device=device, dtype=torch.float64) + t_start = time.perf_counter() + reusable_lora = BatchedTTTLoRA( + h.ttt_batch_size, base_model, h.ttt_lora_rank, + k_lora=h.ttt_k_lora, mlp_lora=h.ttt_mlp_lora, o_lora=h.ttt_o_lora, + ).to(device) + + def _build_opt(lora): + if h.ttt_optimizer == "sgd": + return torch.optim.SGD( + lora.parameters(), lr=h.ttt_lora_lr, + momentum=h.ttt_beta1, weight_decay=h.ttt_weight_decay, + ) + return torch.optim.AdamW( + lora.parameters(), lr=h.ttt_lora_lr, + betas=(h.ttt_beta1, h.ttt_beta2), + eps=1e-10, weight_decay=h.ttt_weight_decay, fused=True, + ) + + reusable_opt = _build_opt(reusable_lora) + local_scored_docs = [] + global_ttt_done = prefix_doc_limit == 0 + try: + while True: + queue_idx = _claim_next_batch(counter_path, queue_len) + if queue_idx >= queue_len: + break + orig_batch_idx, batch_entries = global_batches_sorted[queue_idx] + batch = [doc for _, doc in batch_entries] + bsz = len(batch) + prev_loss = loss_sum.item() + prev_bytes = byte_sum.item() + prev_tokens = token_count.item() + if bsz == reusable_lora.bsz: + reusable_lora.reset() + for s in reusable_opt.state.values(): + for k, v in s.items(): + if isinstance(v, torch.Tensor): + v.zero_() + elif k == "step": + s[k] = 0 + cur_lora = reusable_lora + cur_opt = reusable_opt + else: + cur_lora = BatchedTTTLoRA( + bsz, base_model, h.ttt_lora_rank, + k_lora=h.ttt_k_lora, mlp_lora=h.ttt_mlp_lora, o_lora=h.ttt_o_lora, + ).to(device) + cur_opt = _build_opt(cur_lora) + pred_lens = [doc_len - 1 for _, doc_len in batch] + num_chunks = [(pl + chunk_size - 1) // chunk_size for pl in pred_lens] + max_nc = max(num_chunks) + num_chunks_t = torch.tensor(num_chunks, dtype=torch.int64, device=device) + for ci in range(max_nc): + active = [ci < nc for nc in num_chunks] + needs_train = any(ci < nc - 1 for nc in num_chunks) + tok_starts = torch.zeros(bsz, dtype=torch.int64) + tok_wls = torch.zeros(bsz, dtype=torch.int64) + chunk_offsets_cpu = torch.zeros(bsz, dtype=torch.int64) + chunk_lens_cpu = torch.zeros(bsz, dtype=torch.int64) + for b in range(bsz): + if not active[b]: + continue + doc_start, doc_len = batch[b] + win_start, win_len, chunk_offset, chunk_len = _compute_chunk_window( + ci, pred_lens[b], num_chunks[b], chunk_size, eval_seq_len + ) + tok_starts[b] = doc_start + win_start + tok_wls[b] = win_len + chunk_offsets_cpu[b] = chunk_offset + chunk_lens_cpu[b] = chunk_len + _, context_size, chunk_offset, _ = _compute_chunk_window( + ci, (ci + 1) * chunk_size, ci + 1, chunk_size, eval_seq_len + ) + col_idx = torch.arange(context_size + 1) + idx = tok_starts.unsqueeze(1) + col_idx.unsqueeze(0) + idx.clamp_(max=all_tokens.numel() - 1) + gathered_gpu = all_tokens_idx[idx].to( + device=device, dtype=torch.int64, non_blocking=True + ) + valid = (col_idx[:context_size].unsqueeze(0) < tok_wls.unsqueeze(1)).to( + device, non_blocking=True + ) + chunk_offsets = chunk_offsets_cpu.to(device, non_blocking=True) + chunk_lens = chunk_lens_cpu.to(device, non_blocking=True) + x = torch.where(valid, gathered_gpu[:, :context_size], 0) + y = torch.where(valid, gathered_gpu[:, 1 : context_size + 1], 0) + ctx_pos = torch.arange(context_size, device=device, dtype=torch.int64) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + per_tok_loss = forward_ttt_train(x, y, lora=cur_lora) + # CaseOps sidecar-driven byte budget. Mirror the index pattern + # used to build y from all_tokens: y[b, j] corresponds to the + # token at global position tok_starts[b] + 1 + j (when valid). + y_bytes_arg = None + if val_data.caseops_enabled and val_data.val_bytes is not None: + y_idx = ( + tok_starts.unsqueeze(1) + + 1 + + col_idx[:context_size].unsqueeze(0) + ) + y_idx = y_idx.clamp_(max=val_data.val_bytes.numel() - 1) + y_bytes_arg = val_data.val_bytes[y_idx].to( + device=device, dtype=torch.int32, non_blocking=True + ) + # Mirror the `valid` masking used for y so out-of-range tokens + # contribute zero bytes (matches y=0 substitution above). + y_bytes_arg = torch.where( + valid, y_bytes_arg, torch.zeros_like(y_bytes_arg) + ) + with torch.no_grad(): + _accumulate_bpb( + per_tok_loss, + x, + y, + chunk_offsets, + chunk_lens, + ctx_pos, + val_data.base_bytes_lut, + val_data.has_leading_space_lut, + val_data.is_boundary_token_lut, + loss_sum, + byte_sum, + token_count, + y_bytes=y_bytes_arg, + ) + if needs_train: + activate_chunk_mask = (num_chunks_t - 1 > ci).float() + for gi in range(h.ttt_grad_steps): + if gi > 0: + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + per_tok_loss = forward_ttt_train(x, y, lora=cur_lora) + per_doc = per_tok_loss[ + :, chunk_offset : chunk_offset + chunk_size + ].mean(dim=-1) + cur_opt.zero_grad(set_to_none=True) + (per_doc * activate_chunk_mask).sum().backward() + cur_opt.step() + else: + del per_tok_loss + batch_num = orig_batch_idx + 1 + doc_lens = [dl for _, dl in batch] + should_report = batch_num in eval_batch_set if eval_batch_set is not None else True + if should_report: + cur_tokens = token_count.item() + cur_loss_val = loss_sum.item() + cur_bytes_val = byte_sum.item() + dt = cur_tokens - prev_tokens + db = cur_bytes_val - prev_bytes + if dt > 0 and db > 0: + b_loss = (cur_loss_val - prev_loss) / dt + b_bpb = b_loss / math.log(2.0) * (dt / db) + else: + b_loss = b_bpb = 0.0 + r_loss = cur_loss_val / max(cur_tokens, 1) + r_bpb = r_loss / math.log(2.0) * (cur_tokens / max(cur_bytes_val, 1)) + elapsed = time.perf_counter() - t_start + log( + f"ttp: b{batch_num}/{queue_len} bl:{b_loss:.4f} bb:{b_bpb:.4f} " + f"rl:{r_loss:.4f} rb:{r_bpb:.4f} dl:{min(doc_lens)}-{max(doc_lens)} " + f"gd:{int(global_ttt_done)}" + ) + if not global_ttt_done: + local_scored_docs.extend( + (orig_batch_idx, pos, doc_start, doc_len) + for pos, (doc_start, doc_len) in enumerate(batch) + ) + prefix_done = _add_to_counter(prefix_counter_path, len(batch_entries)) + if prefix_done >= current_phase_boundary: + try: + with open(pause_flag_path, "x"): + pass + except FileExistsError: + pass + should_pause = os.path.exists(pause_flag_path) + if should_pause: + if dist.is_available() and dist.is_initialized(): + dist.barrier() + gathered_scored_docs = [None] * h.world_size + if dist.is_available() and dist.is_initialized(): + dist.all_gather_object(gathered_scored_docs, local_scored_docs) + else: + gathered_scored_docs = [local_scored_docs] + scored_docs_for_global = [] + for rank_docs in gathered_scored_docs: + if rank_docs: + scored_docs_for_global.extend(rank_docs) + scored_docs_for_global.sort(key=lambda x: (x[0], x[1])) + scored_docs_for_global = scored_docs_for_global[:current_phase_boundary] + scored_token_chunks = [ + val_data.val_tokens[doc_start : doc_start + doc_len] + for _, _, doc_start, doc_len in scored_docs_for_global + ] + if scored_token_chunks: + global_ttt_tokens = torch.cat(scored_token_chunks) + else: + global_ttt_tokens = val_data.val_tokens[:0] + if h.rank == 0: + prefix_done = 0 + try: + with open(prefix_counter_path, "rb") as f: + prefix_done = int.from_bytes( + f.read(8), "little", signed=True + ) + except FileNotFoundError: + pass + log( + f"ttpp: phase:{current_phase + 1}/{num_phases} pd:{prefix_done} " + f"gd:{len(scored_docs_for_global)} " + f"t:{time.perf_counter() - t_start:.1f}s" + ) + train_val_ttt_global_sgd_distributed( + h, device, val_data, base_model, global_ttt_tokens + ) + for p in base_model.parameters(): + p.requires_grad_(False) + reusable_lora = BatchedTTTLoRA( + h.ttt_batch_size, base_model, h.ttt_lora_rank, + k_lora=h.ttt_k_lora, mlp_lora=h.ttt_mlp_lora, o_lora=h.ttt_o_lora, + ).to(device) + reusable_opt = _build_opt(reusable_lora) + current_phase += 1 + if current_phase >= num_phases: + global_ttt_done = True + else: + current_phase_boundary = phase_boundaries[current_phase] + if h.rank == 0: + try: + os.remove(pause_flag_path) + except FileNotFoundError: + pass + if dist.is_available() and dist.is_initialized(): + dist.barrier() + if h.rank == 0: + log(f"ttpr: phase:{current_phase}/{num_phases} t:{time.perf_counter() - t_start:.1f}s") + del cur_lora, cur_opt + finally: + pass + if dist.is_available() and dist.is_initialized(): + dist.all_reduce(loss_sum, op=dist.ReduceOp.SUM) + dist.all_reduce(byte_sum, op=dist.ReduceOp.SUM) + dist.all_reduce(token_count, op=dist.ReduceOp.SUM) + for p in base_model.parameters(): + p.requires_grad_(True) + base_model.train() + return _loss_bpb_from_sums(loss_sum, token_count, byte_sum) + + +def timed_eval(label, fn, *args, **kwargs): + torch.cuda.synchronize() + t0 = time.perf_counter() + val_loss, val_bpb = fn(*args, **kwargs) + torch.cuda.synchronize() + elapsed_ms = 1e3 * (time.perf_counter() - t0) + log( + f"{label} val_loss:{val_loss:.8f} val_bpb:{val_bpb:.8f} eval_time:{elapsed_ms:.0f}ms" + ) + return val_loss, val_bpb + + +def train_model(h, device, val_data): + global _LEAKY_RELU_SLOPE + _LEAKY_RELU_SLOPE = h.leaky_relu_slope + base_model = GPT(h).to(device).bfloat16() + restore_fp32_params(base_model) + compiled_model = torch.compile(base_model, dynamic=False, fullgraph=True) + compiled_forward_logits = torch.compile( + base_model.forward_logits, dynamic=False, fullgraph=True + ) + model = compiled_model + log(f"model_params:{sum(p.numel()for p in base_model.parameters())}") + optimizers = Optimizers(h, base_model) + train_loader = DocumentPackingLoader(h, device) + max_wallclock_ms = ( + 1e3 * h.max_wallclock_seconds if h.max_wallclock_seconds > 0 else None + ) + if max_wallclock_ms is not None: + max_wallclock_ms -= h.gptq_reserve_seconds * 1e3 + log( + f"gptq:reserving {h.gptq_reserve_seconds:.0f}s, effective={max_wallclock_ms:.0f}ms" + ) + + def training_frac(step, elapsed_ms): + if max_wallclock_ms is None: + return step / max(h.iterations, 1) + return elapsed_ms / max(max_wallclock_ms, 1e-09) + + def lr_mul(frac): + if h.warmdown_frac <= 0: + return 1.0 + if frac >= 1.0 - h.warmdown_frac: + return max((1.0 - frac) / h.warmdown_frac, h.min_lr) + return 1.0 + + _clip_params = [p for p in base_model.parameters() if p.requires_grad] + def step_fn(step, lr_scale): + train_loss = torch.zeros((), device=device) + for micro_step in range(h.grad_accum_steps): + x, y, cu_seqlens, _max_seqlen = train_loader.next_batch( + h.train_batch_tokens, h.grad_accum_steps + ) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16, enabled=True): + loss = model(x, y, cu_seqlens=cu_seqlens, max_seqlen=h.train_seq_len) + train_loss += loss.detach() + (loss / h.grad_accum_steps).backward() + train_loss /= h.grad_accum_steps + if step <= h.muon_momentum_warmup_steps: + + frac = ( + + min(step / h.muon_momentum_warmup_steps, 1.0) + + if h.muon_momentum_warmup_steps > 0 + + else 1.0 + + ) + + muon_momentum = ( + + 1 - frac + + ) * h.muon_momentum_warmup_start + frac * h.muon_momentum + + for group in optimizers.optimizer_muon.param_groups: + + group["momentum"] = muon_momentum + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["base_lr"] * lr_scale + if h.grad_clip_norm > 0: + torch.nn.utils.clip_grad_norm_(_clip_params, h.grad_clip_norm) + optimizers.step(distributed=h.distributed) + return train_loss + + if h.warmup_steps > 0: + initial_model_state = { + name: tensor.detach().cpu().clone() + for (name, tensor) in base_model.state_dict().items() + } + initial_optimizer_states = [ + copy.deepcopy(opt.state_dict()) for opt in optimizers + ] + model.train() + num_tokens_local = h.train_batch_tokens // h.world_size + for blk in base_model.blocks: + blk.attn.rotary(num_tokens_local, device, torch.bfloat16) + cu_bucket_size = train_loader.cu_bucket_size + warmup_cu_buckets = tuple(cu_bucket_size * i for i in range(1, 5)) + warmup_cu_iters = 3 + x, y, cu_seqlens, _ = train_loader.next_batch( + h.train_batch_tokens, h.grad_accum_steps + ) + log(f"warmup_cu_buckets:{','.join(str(b) for b in warmup_cu_buckets)} iters_each:{warmup_cu_iters}") + def _run_cu_bucket_warmup(): + for bucket_len in warmup_cu_buckets: + boundaries = list(range(0, x.size(1), max(h.train_seq_len, 1))) + if boundaries[-1] != x.size(1): + boundaries.append(x.size(1)) + cu = torch.full((bucket_len,), x.size(1), dtype=torch.int32, device=device) + cu[: len(boundaries)] = torch.tensor(boundaries, dtype=torch.int32, device=device) + for _ in range(warmup_cu_iters): + optimizers.zero_grad_all() + with torch.autocast(device_type="cuda", dtype=torch.bfloat16, enabled=True): + wloss = model(x, y, cu_seqlens=cu, max_seqlen=h.train_seq_len) + (wloss / h.grad_accum_steps).backward() + optimizers.zero_grad_all() + _run_cu_bucket_warmup() + if h.num_loops > 0: + base_model.looping_active = True + _run_cu_bucket_warmup() + base_model.looping_active = False + for warmup_step in range(h.warmup_steps): + step_fn(warmup_step, 1.0) + if ( + warmup_step <= 5 + or (warmup_step + 1) % 10 == 0 + or warmup_step + 1 == h.warmup_steps + ): + log(f"warmup_step: {warmup_step+1}/{h.warmup_steps}") + if h.num_loops > 0: + base_model.looping_active = True + log( + f"loop_warmup:enabled encoder:{base_model.encoder_indices} decoder:{base_model.decoder_indices}" + ) + for warmup_step in range(h.warmup_steps): + step_fn(warmup_step, 1.0) + if ( + warmup_step <= 5 + or (warmup_step + 1) % 10 == 0 + or warmup_step + 1 == h.warmup_steps + ): + log(f"loop_warmup_step: {warmup_step+1}/{h.warmup_steps}") + base_model.looping_active = False + base_model.load_state_dict(initial_model_state, strict=True) + for (opt, state) in zip(optimizers, initial_optimizer_states, strict=True): + opt.load_state_dict(state) + optimizers.zero_grad_all() + train_loader = DocumentPackingLoader(h, device) + _live_state = base_model.state_dict(keep_vars=True) + ema_state = { + name: t.detach().float().clone() + for (name, t) in _live_state.items() + } + _ema_pairs = [(ema_state[name], t) for (name, t) in _live_state.items()] + ema_decay = h.ema_decay + training_time_ms = 0.0 + stop_after_step = None + torch.cuda.synchronize() + t0 = time.perf_counter() + step = 0 + while True: + last_step = ( + step == h.iterations + or stop_after_step is not None + and step >= stop_after_step + ) + should_validate = ( + last_step or h.val_loss_every > 0 and step % h.val_loss_every == 0 + ) + if should_validate: + torch.cuda.synchronize() + training_time_ms += 1e3 * (time.perf_counter() - t0) + val_loss, val_bpb = eval_val( + h, device, val_data, model, compiled_forward_logits + ) + log( + f"{step}/{h.iterations} val_loss: {val_loss:.4f} val_bpb: {val_bpb:.4f}" + ) + torch.cuda.synchronize() + t0 = time.perf_counter() + if last_step: + if stop_after_step is not None and step < h.iterations: + log( + f"stopping_early: wallclock_cap train_time: {training_time_ms:.0f}ms step: {step}/{h.iterations}" + ) + break + elapsed_ms = training_time_ms + 1e3 * (time.perf_counter() - t0) + frac = training_frac(step, elapsed_ms) + scale = lr_mul(frac) + if ( + h.num_loops > 0 + and not base_model.looping_active + and frac >= h.enable_looping_at + ): + base_model.looping_active = True + log( + f"layer_loop:enabled step:{step} frac:{frac:.3f} encoder:{base_model.encoder_indices} decoder:{base_model.decoder_indices}" + ) + train_loss = step_fn(step, scale) + with torch.no_grad(): + for ema_t, t in _ema_pairs: + ema_t.mul_(ema_decay).add_(t.detach(), alpha=1.0 - ema_decay) + step += 1 + approx_training_time_ms = training_time_ms + 1e3 * (time.perf_counter() - t0) + should_log_train = h.train_log_every > 0 and ( + step <= 5 or step % h.train_log_every == 0 or stop_after_step is not None + ) + if should_log_train: + tok_per_sec = step * h.train_batch_tokens / (approx_training_time_ms / 1e3) + log( + f"{step}/{h.iterations} train_loss: {train_loss.item():.4f} train_time: {approx_training_time_ms/60000:.1f}m tok/s: {tok_per_sec:.0f}" + ) + reached_cap = ( + max_wallclock_ms is not None and approx_training_time_ms >= max_wallclock_ms + ) + if h.distributed and max_wallclock_ms is not None: + reached_cap_tensor = torch.tensor(int(reached_cap), device=device) + dist.all_reduce(reached_cap_tensor, op=dist.ReduceOp.MAX) + reached_cap = bool(reached_cap_tensor.item()) + if stop_after_step is None and reached_cap: + stop_after_step = step + log( + f"peak memory allocated: {torch.cuda.max_memory_allocated()//1024//1024} MiB reserved: {torch.cuda.max_memory_reserved()//1024//1024} MiB" + ) + log("ema:applying EMA weights") + current_state = base_model.state_dict() + avg_state = { + name: t.to(dtype=current_state[name].dtype) for (name, t) in ema_state.items() + } + base_model.load_state_dict(avg_state, strict=True) + return base_model, compiled_model, compiled_forward_logits + + +def train_and_eval(h, device): + random.seed(h.seed) + np.random.seed(h.seed) + torch.manual_seed(h.seed) + torch.cuda.manual_seed_all(h.seed) + if h.artifact_dir and h.is_main_process: + os.makedirs(h.artifact_dir, exist_ok=True) + val_data = ValidationData(h, device) + log( + f"train_shards: {len(list(Path(h.datasets_dir).resolve().glob('fineweb_train_*.bin')))}" + ) + log(f"val_tokens: {val_data.val_tokens.numel()-1}") + # TTT_EVAL_ONLY: skip training + GPTQ, jump straight to TTT eval on a + # pre-existing quantized artifact. Used to test TTT-only improvements + # (e.g., PR-1767's alpha/warm-start/WD) without retraining. + ttt_eval_only = os.environ.get("TTT_EVAL_ONLY", "0") == "1" + if ttt_eval_only: + log("TTT_EVAL_ONLY=1 — skipping training + GPTQ, loading saved artifact for TTT eval") + log(f"ttt_lora_alpha: {BatchedLinearLoRA._ALPHA}") + log(f"ttt_warm_start_a: {BatchedLinearLoRA._WARM_START_A}") + log(f"ttt_weight_decay: {h.ttt_weight_decay}") + else: + base_model, compiled_model, compiled_forward_logits = train_model( + h, device, val_data + ) + torch._dynamo.reset() + timed_eval( + "diagnostic pre-quantization post-ema", + eval_val, + h, + device, + val_data, + compiled_model, + compiled_forward_logits, + ) + if os.environ.get("PREQUANT_ONLY", "0") == "1": + log("PREQUANT_ONLY=1 — skipping serialize/GPTQ/post-quant eval/TTT") + return + serialize(h, base_model, Path(__file__).read_text(encoding="utf-8")) + if h.distributed: + dist.barrier() + eval_model = deserialize(h, device) + if h.num_loops > 0: + eval_model.looping_active = True + if not ttt_eval_only: + compiled_model = torch.compile(eval_model, dynamic=False, fullgraph=True) + compiled_forward_logits = torch.compile( + eval_model.forward_logits, dynamic=False, fullgraph=True + ) + timed_eval( + "diagnostic quantized", + eval_val, + h, + device, + val_data, + compiled_model, + compiled_forward_logits, + ) + del eval_model + if h.ttt_enabled: + if not ttt_eval_only: + del compiled_model + if ttt_eval_only: + del eval_model + torch._dynamo.reset() + torch.cuda.empty_cache() + ttt_model = deserialize(h, device) + if h.num_loops > 0: + ttt_model.looping_active = True + for p in ttt_model.parameters(): + p.requires_grad_(False) + + if h.rope_yarn: + _yarn_seqlen = h.train_batch_tokens // h.grad_accum_steps + for block in ttt_model.blocks: + block.attn.rotary(_yarn_seqlen, device, torch.bfloat16) + else: + for block in ttt_model.blocks: + block.attn.rotary._cos_cached = None + block.attn.rotary._sin_cached = None + block.attn.rotary._seq_len_cached = 0 + block.attn.rotary(h.ttt_eval_seq_len, device, torch.bfloat16) + + def _fwd_ttt_inner(input_ids, target_ids, lora): + return ttt_model.forward_ttt(input_ids, target_ids, lora=lora) + + _fwd_ttt_compiled_inner = None + + def _fwd_ttt(input_ids, target_ids, lora): + nonlocal _fwd_ttt_compiled_inner + if _fwd_ttt_compiled_inner is None: + _fwd_ttt_compiled_inner = _fwd_ttt_inner + return _fwd_ttt_compiled_inner(input_ids, target_ids, lora=lora) + + fwd_ttt_compiled = _fwd_ttt + log(f"ttt_lora:warming up compile (random tokens, no val data)") + global BOS_ID + if BOS_ID is None: + BOS_ID = 1 + t_warmup = time.perf_counter() + warmup_bszes = [h.ttt_batch_size] + for bsz in warmup_bszes: + wl = BatchedTTTLoRA( + bsz, ttt_model, h.ttt_lora_rank, + k_lora=h.ttt_k_lora, mlp_lora=h.ttt_mlp_lora, o_lora=h.ttt_o_lora, + ).to(device) + wo = torch.optim.AdamW( + wl.parameters(), + lr=h.ttt_lora_lr, + betas=(h.ttt_beta1, h.ttt_beta2), + eps=1e-10, + weight_decay=h.ttt_weight_decay, + fused=True, + ) + for ctx_len in (h.ttt_chunk_size, h.ttt_eval_seq_len): + xw = torch.randint(0, h.vocab_size, (bsz, ctx_len), device=device, dtype=torch.int64) + yw = torch.randint(0, h.vocab_size, (bsz, ctx_len), device=device, dtype=torch.int64) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + ptl = fwd_ttt_compiled(xw, yw, lora=wl) + ptl[:, : min(h.ttt_chunk_size, ctx_len)].mean(dim=-1).sum().backward() + wo.step() + wo.zero_grad(set_to_none=True) + del wl, wo + torch.cuda.empty_cache() + compile_elapsed = time.perf_counter() - t_warmup + log(f"ttt_lora:compile warmup done ({compile_elapsed:.1f}s)") + log("\nbeginning TTT eval timer") + torch.cuda.synchronize() + t_ttt = time.perf_counter() + ttt_val_loss, ttt_val_bpb = eval_val_ttt_phased( + h, ttt_model, device, val_data, forward_ttt_train=fwd_ttt_compiled + ) + torch.cuda.synchronize() + ttt_eval_elapsed = time.perf_counter() - t_ttt + log( + "quantized_ttt_phased " + f"val_loss:{ttt_val_loss:.8f} val_bpb:{ttt_val_bpb:.8f} " + f"eval_time:{1e3*ttt_eval_elapsed:.0f}ms" + ) + log(f"total_eval_time:{ttt_eval_elapsed:.1f}s") + del ttt_model + + +def main(): + world_size = int(os.environ.get("WORLD_SIZE", "1")) + local_rank = int(os.environ.get("LOCAL_RANK", "0")) + distributed = "RANK" in os.environ and "WORLD_SIZE" in os.environ + if not torch.cuda.is_available(): + raise RuntimeError("CUDA is required") + if world_size <= 0: + raise ValueError(f"WORLD_SIZE must be positive, got {world_size}") + if 8 % world_size != 0: + raise ValueError( + f"WORLD_SIZE={world_size} must divide 8 so grad_accum_steps stays integral" + ) + device = torch.device("cuda", local_rank) + torch.cuda.set_device(device) + if distributed: + dist.init_process_group(backend="nccl", device_id=device) + dist.barrier() + torch.backends.cuda.matmul.allow_tf32 = True + torch.backends.cudnn.allow_tf32 = True + torch.set_float32_matmul_precision("high") + from torch.backends.cuda import ( + enable_cudnn_sdp, + enable_flash_sdp, + enable_math_sdp, + enable_mem_efficient_sdp, + ) + + enable_cudnn_sdp(False) + enable_flash_sdp(True) + enable_mem_efficient_sdp(False) + enable_math_sdp(False) + torch._dynamo.config.optimize_ddp = False + torch._dynamo.config.cache_size_limit = 64 + h = Hyperparameters() + set_logging_hparams(h) + if h.is_main_process: + os.makedirs(h.artifact_dir if h.artifact_dir else "logs", exist_ok=True) + log(100 * "=", console=False) + log("Hyperparameters:", console=True) + for (k, v) in sorted(vars(type(h)).items()): + if not k.startswith("_"): + log(f" {k}: {v}", console=True) + log("=" * 100, console=False) + log("Source code:", console=False) + log("=" * 100, console=False) + with open(__file__, "r", encoding="utf-8") as _src: + log(_src.read(), console=False) + log("=" * 100, console=False) + log(f"Running Python {sys.version}", console=False) + log(f"Running PyTorch {torch.__version__}", console=False) + log("=" * 100, console=False) + train_and_eval(h, device) + if distributed: + dist.destroy_process_group() + + +if __name__ == "__main__": + main() diff --git a/train_h200_seed0.log b/train_h200_seed0.log new file mode 100644 index 0000000000..f702bb26f3 --- /dev/null +++ b/train_h200_seed0.log @@ -0,0 +1,1318 @@ +Hyperparameters: + adam_eps: 1e-08 + adam_wd: 0.02 + artifact_dir: + attn_clip_sigmas: 13.0 + attn_out_gate_enabled: False + attn_out_gate_src: proj + beta1: 0.9 + beta2: 0.95 + bigram_blend_enabled: True + bigram_blend_lambda: 0.03 + caseops_enabled: False + compressor: brotli + data_dir: ./data/ + datasets_dir: ./data/datasets/fineweb10B_sp8192 + distributed: False + ema_decay: 0.9965 + embed_bits: 7 + embed_clip_sigmas: 14.0 + embed_lr: 0.6 + embed_wd: 0.085 + enable_looping_at: 0.35 + entropy_weighted_loss: False + eval_seq_len: 2048 + eval_stride: 64 + ewl_max_weight: 3.0 + ewl_min_weight: 0.3 + fused_ce_enabled: True + gate_window: 12 + gated_attn_enabled: False + gated_attn_init_std: 0.01 + gated_attn_quant_gate: False + global_ttt_batch_seqs: 32 + global_ttt_chunk_tokens: 32768 + global_ttt_epochs: 1 + global_ttt_grad_clip: 1.0 + global_ttt_lr: 0.001 + global_ttt_momentum: 0.9 + global_ttt_respect_doc_boundaries: True + global_ttt_warmup_chunks: 0 + global_ttt_warmup_start_lr: 0.0 + gptq_calibration_batches: 16 + gptq_reserve_seconds: 4.0 + grad_accum_steps: 8 + grad_clip_norm: 0.3 + is_main_process: True + iterations: 5000 + leaky_relu_slope: 0.3 + ln_scale: True + local_rank: 0 + logfile: logs/5f2ad09d-20fc-4453-8068-3148dc957cd5.txt + logit_softcap: 30.0 + loop_end: 5 + loop_start: 3 + lqer_asym_enabled: True + lqer_asym_group: 64 + lqer_enabled: True + lqer_factor_bits: 4 + lqer_rank: 4 + lqer_top_k: 3 + matrix_bits: 6 + matrix_clip_sigmas: 12.85 + matrix_lr: 0.026 + max_wallclock_seconds: 0.0 + min_lr: 0.1 + mlp_clip_sigmas: 11.5 + mlp_mult: 4.0 + model_dim: 512 + model_path: final_model.pt + muon_backend_steps: 5 + muon_momentum: 0.97 + muon_momentum_warmup_start: 0.92 + muon_momentum_warmup_steps: 1500 + muon_row_normalize: True + muon_wd: 0.095 + num_heads: 8 + num_kv_heads: 4 + num_layers: 11 + num_loops: 2 + parallel_final_lane: mean + parallel_start_layer: 8 + phased_ttt_num_phases: 1 + phased_ttt_prefix_docs: 2000 + qk_gain_init: 5.25 + quantized_model_path: final_model.int6.ptz + rank: 0 + rope_base: 10000.0 + rope_dims: 16 + rope_train_seq_len: 2048 + rope_yarn: False + run_id: 5f2ad09d-20fc-4453-8068-3148dc957cd5 + scalar_lr: 0.02 + seed: 0 + skip_gates_enabled: True + smear_gate_enabled: True + sparse_attn_gate_enabled: True + sparse_attn_gate_init_std: 0.0 + sparse_attn_gate_scale: 1.0 + temp_scale_enabled: False + temp_scale_ent_high: 6.0 + temp_scale_ent_low: 2.0 + temp_scale_high: 1.15 + temp_scale_low: 0.85 + tie_embeddings: True + tied_embed_init_std: 0.005 + tied_embed_lr: 0.03 + tokenizer_path: ./data/tokenizers/fineweb_8192_bpe.model + train_batch_tokens: 786432 + train_files: ./data/datasets/fineweb10B_sp8192/fineweb_train_*.bin + train_log_every: 500 + train_seq_len: 2048 + ttt_batch_size: 64 + ttt_beta1: 0.0 + ttt_beta2: 0.999 + ttt_chunk_size: 48 + ttt_enabled: True + ttt_eval_batches: + ttt_eval_seq_len: 2048 + ttt_grad_steps: 1 + ttt_k_lora: True + ttt_lora_lr: 0.0001 + ttt_lora_rank: 96 + ttt_mlp_lora: True + ttt_o_lora: True + ttt_optimizer: adam + ttt_weight_decay: 1.0 + val_batch_tokens: 524288 + val_bytes_files: ./data/datasets/fineweb10B_sp8192/fineweb_val_bytes_*.bin + val_doc_fraction: 1.0 + val_files: ./data/datasets/fineweb10B_sp8192/fineweb_val_*.bin + val_loss_every: 5000 + vocab_size: 8192 + warmdown_frac: 0.75 + warmup_steps: 20 + world_size: 1 + xsa_last_n: 11 +train_shards: 5 +val_tokens: 40540160 +model_params:35945671 +warmup_cu_buckets:64,128,192,256 iters_each:3 +warmup_step: 1/20 +warmup_step: 2/20 +warmup_step: 3/20 +warmup_step: 4/20 +warmup_step: 5/20 +warmup_step: 6/20 +warmup_step: 10/20 +warmup_step: 20/20 +loop_warmup:enabled encoder:[0, 1, 2, 3, 4, 5, 3, 4] decoder:[5, 3, 4, 5, 6, 7, 8, 9, 10] +loop_warmup_step: 1/20 +loop_warmup_step: 2/20 +loop_warmup_step: 3/20 +loop_warmup_step: 4/20 +loop_warmup_step: 5/20 +loop_warmup_step: 6/20 +loop_warmup_step: 10/20 +loop_warmup_step: 20/20 +bigram_blend:enabled lambda=0.03 +0/5000 val_loss: 7.9758 val_bpb: 3.0876 +1/5000 train_loss: 9.0079 train_time: 0.0m tok/s: 1301639 +2/5000 train_loss: 12.4702 train_time: 0.0m tok/s: 1247028 +3/5000 train_loss: 11.2348 train_time: 0.0m tok/s: 1218153 +4/5000 train_loss: 9.6742 train_time: 0.0m tok/s: 1205558 +5/5000 train_loss: 8.4816 train_time: 0.1m tok/s: 1201216 +500/5000 train_loss: 3.3133 train_time: 5.6m tok/s: 1166177 +1000/5000 train_loss: 3.2156 train_time: 11.3m tok/s: 1164920 +1500/5000 train_loss: 3.2314 train_time: 16.9m tok/s: 1164480 +layer_loop:enabled step:1750 frac:0.350 encoder:[0, 1, 2, 3, 4, 5, 3, 4] decoder:[5, 3, 4, 5, 6, 7, 8, 9, 10] +2000/5000 train_loss: 3.0763 train_time: 23.9m tok/s: 1095582 +2500/5000 train_loss: 3.0203 train_time: 32.4m tok/s: 1012026 +3000/5000 train_loss: 2.9479 train_time: 40.8m tok/s: 963459 +3500/5000 train_loss: 2.8867 train_time: 49.2m tok/s: 931547 +4000/5000 train_loss: 2.7535 train_time: 57.7m tok/s: 908176 +4500/5000 train_loss: 2.7693 train_time: 66.2m tok/s: 891495 +5000/5000 train_loss: 2.8086 train_time: 74.6m tok/s: 878652 +bigram_blend:enabled lambda=0.03 +5000/5000 val_loss: 2.7868 val_bpb: 1.0788 +peak memory allocated: 42423 MiB reserved: 59798 MiB +ema:applying EMA weights +bigram_blend:enabled lambda=0.03 +diagnostic pre-quantization post-ema val_loss:2.75170702 val_bpb:1.06524298 eval_time:30278ms +Serialized model: 135417533 bytes +Code size (uncompressed): 164068 bytes +Code size (compressed): 32949 bytes +GPTQ:collecting Hessians from calibration data... +GPTQ:collected 67 Hessians in 3.2s +Quantized weights: + gptq (int6): blocks.attn.c_k.weight, blocks.attn.c_q.weight, blocks.attn.c_v.weight, blocks.attn.proj.weight, blocks.mlp.fc.weight, blocks.mlp.proj.weight + gptq (int6)+lqer_asym: blocks.mlp.fc.weight + gptq (int7)+lqer_asym: tok_emb.weight + passthrough (float16): blocks.attn.attn_gate_w, blocks.attn.q_gain, blocks.attn_scale, blocks.mlp_scale, blocks.resid_mix, parallel_post_lambdas, parallel_resid_lambdas, skip_gates, skip_weights, smear_gate.weight, smear_lambda +Serialized model quantized+brotli: 16118127 bytes +Total submission size quantized+brotli: 16151076 bytes +bigram_blend:enabled lambda=0.03 +diagnostic quantized val_loss:2.77695592 val_bpb:1.07501735 eval_time:35550ms +ttt_lora:warming up compile (random tokens, no val data) +ttt_lora:compile warmup done (5.5s) + +beginning TTT eval timer +ttt_phased: total_docs:50000 prefix_docs:2000 suffix_docs:48000 num_phases:1 boundaries:[2000] +ttp: b782/782 bl:2.5601 bb:1.0336 rl:2.5601 rb:1.0336 dl:26524-79464 gd:0 +ttp: b781/782 bl:2.5664 bb:1.0595 rl:2.5642 rb:1.0500 dl:14510-25988 gd:0 +ttp: b780/782 bl:2.6243 bb:1.0767 rl:2.5821 rb:1.0580 dl:11071-14414 gd:0 +ttp: b779/782 bl:2.6512 bb:1.0798 rl:2.5953 rb:1.0622 dl:9037-11049 gd:0 +ttp: b778/782 bl:2.7934 bb:1.1175 rl:2.6231 rb:1.0701 dl:7961-8997 gd:0 +ttp: b777/782 bl:2.7328 bb:1.0931 rl:2.6353 rb:1.0727 dl:7190-7938 gd:0 +ttp: b776/782 bl:2.7258 bb:1.0906 rl:2.6435 rb:1.0743 dl:6364-7180 gd:0 +ttp: b775/782 bl:2.6922 bb:1.0658 rl:2.6472 rb:1.0737 dl:5853-6355 gd:0 +ttp: b774/782 bl:2.7339 bb:1.0797 rl:2.6529 rb:1.0741 dl:5552-5852 gd:0 +ttp: b773/782 bl:2.6578 bb:1.0784 rl:2.6532 rb:1.0743 dl:5203-5550 gd:0 +ttp: b772/782 bl:2.7689 bb:1.1075 rl:2.6592 rb:1.0761 dl:4937-5193 gd:0 +ttp: b771/782 bl:2.7682 bb:1.0823 rl:2.6644 rb:1.0764 dl:4701-4937 gd:0 +ttp: b770/782 bl:2.6652 bb:1.0547 rl:2.6644 rb:1.0754 dl:4479-4698 gd:0 +ttp: b769/782 bl:2.7772 bb:1.0991 rl:2.6689 rb:1.0764 dl:4307-4479 gd:0 +ttp: b768/782 bl:2.7130 bb:1.0887 rl:2.6705 rb:1.0768 dl:4128-4306 gd:0 +ttp: b767/782 bl:2.7602 bb:1.1021 rl:2.6736 rb:1.0777 dl:3963-4123 gd:0 +ttp: b766/782 bl:2.6503 bb:1.0376 rl:2.6728 rb:1.0764 dl:3846-3962 gd:0 +ttp: b765/782 bl:2.7885 bb:1.0951 rl:2.6763 rb:1.0770 dl:3743-3845 gd:0 +ttp: b764/782 bl:2.7714 bb:1.1012 rl:2.6790 rb:1.0777 dl:3639-3742 gd:0 +ttp: b763/782 bl:2.8004 bb:1.1050 rl:2.6823 rb:1.0784 dl:3536-3637 gd:0 +ttp: b762/782 bl:2.8307 bb:1.0774 rl:2.6860 rb:1.0784 dl:3431-3533 gd:0 +ttp: b761/782 bl:2.7553 bb:1.0658 rl:2.6877 rb:1.0781 dl:3336-3430 gd:0 +ttp: b760/782 bl:2.8490 bb:1.1189 rl:2.6914 rb:1.0790 dl:3255-3334 gd:0 +ttp: b759/782 bl:2.7301 bb:1.1019 rl:2.6922 rb:1.0795 dl:3188-3253 gd:0 +ttp: b758/782 bl:2.8856 bb:1.0888 rl:2.6963 rb:1.0797 dl:3108-3187 gd:0 +ttp: b757/782 bl:2.6431 bb:1.0215 rl:2.6952 rb:1.0785 dl:3033-3108 gd:0 +ttp: b756/782 bl:2.7869 bb:1.0802 rl:2.6970 rb:1.0786 dl:2973-3032 gd:0 +ttp: b755/782 bl:2.6928 bb:1.0433 rl:2.6969 rb:1.0779 dl:2899-2972 gd:0 +ttp: b754/782 bl:2.6920 bb:1.0563 rl:2.6968 rb:1.0775 dl:2839-2899 gd:0 +ttp: b753/782 bl:2.7511 bb:1.0493 rl:2.6977 rb:1.0770 dl:2795-2838 gd:0 +ttp: b752/782 bl:2.7683 bb:1.0621 rl:2.6989 rb:1.0767 dl:2740-2793 gd:0 +ttp: b751/782 bl:2.7975 bb:1.0740 rl:2.7005 rb:1.0767 dl:2689-2740 gd:0 +ttpp: phase:1/1 pd:2000 gd:2000 t:1648.5s +tttg: c1/333 lr:0.001000 t:0.4s +tttg: c2/333 lr:0.001000 t:0.5s +tttg: c3/333 lr:0.001000 t:0.6s +tttg: c4/333 lr:0.001000 t:0.7s +tttg: c5/333 lr:0.001000 t:0.9s +tttg: c6/333 lr:0.000999 t:1.0s +tttg: c7/333 lr:0.000999 t:1.1s +tttg: c8/333 lr:0.000999 t:1.2s +tttg: c9/333 lr:0.000999 t:1.3s +tttg: c10/333 lr:0.000998 t:1.4s +tttg: c11/333 lr:0.000998 t:1.5s +tttg: c12/333 lr:0.000997 t:1.6s +tttg: c13/333 lr:0.000997 t:1.8s +tttg: c14/333 lr:0.000996 t:1.9s +tttg: c15/333 lr:0.000996 t:2.0s +tttg: c16/333 lr:0.000995 t:2.1s +tttg: c17/333 lr:0.000994 t:2.2s +tttg: c18/333 lr:0.000994 t:2.3s +tttg: c19/333 lr:0.000993 t:2.4s +tttg: c20/333 lr:0.000992 t:2.5s +tttg: c21/333 lr:0.000991 t:2.7s +tttg: c22/333 lr:0.000990 t:2.8s +tttg: c23/333 lr:0.000989 t:2.9s +tttg: c24/333 lr:0.000988 t:3.0s +tttg: c25/333 lr:0.000987 t:3.1s +tttg: c26/333 lr:0.000986 t:3.2s +tttg: c27/333 lr:0.000985 t:3.3s +tttg: c28/333 lr:0.000984 t:3.4s +tttg: c29/333 lr:0.000983 t:3.6s +tttg: c30/333 lr:0.000981 t:3.7s +tttg: c31/333 lr:0.000980 t:3.8s +tttg: c32/333 lr:0.000979 t:3.9s +tttg: c33/333 lr:0.000977 t:4.0s +tttg: c34/333 lr:0.000976 t:4.1s +tttg: c35/333 lr:0.000974 t:4.2s +tttg: c36/333 lr:0.000973 t:4.3s +tttg: c37/333 lr:0.000971 t:4.4s +tttg: c38/333 lr:0.000970 t:4.6s +tttg: c39/333 lr:0.000968 t:4.7s +tttg: c40/333 lr:0.000966 t:4.8s +tttg: c41/333 lr:0.000965 t:4.9s +tttg: c42/333 lr:0.000963 t:5.0s +tttg: c43/333 lr:0.000961 t:5.1s +tttg: c44/333 lr:0.000959 t:5.2s +tttg: c45/333 lr:0.000957 t:5.3s +tttg: c46/333 lr:0.000955 t:5.5s +tttg: c47/333 lr:0.000953 t:5.6s +tttg: c48/333 lr:0.000951 t:5.7s +tttg: c49/333 lr:0.000949 t:5.8s +tttg: c50/333 lr:0.000947 t:5.9s +tttg: c51/333 lr:0.000945 t:6.0s +tttg: c52/333 lr:0.000943 t:6.1s +tttg: c53/333 lr:0.000941 t:6.2s +tttg: c54/333 lr:0.000938 t:6.4s +tttg: c55/333 lr:0.000936 t:6.5s +tttg: c56/333 lr:0.000934 t:6.6s +tttg: c57/333 lr:0.000931 t:6.7s +tttg: c58/333 lr:0.000929 t:6.8s +tttg: c59/333 lr:0.000927 t:6.9s +tttg: c60/333 lr:0.000924 t:7.0s +tttg: c61/333 lr:0.000922 t:7.1s +tttg: c62/333 lr:0.000919 t:7.3s +tttg: c63/333 lr:0.000916 t:7.4s +tttg: c64/333 lr:0.000914 t:7.5s +tttg: c65/333 lr:0.000911 t:7.6s +tttg: c66/333 lr:0.000908 t:7.7s +tttg: c67/333 lr:0.000906 t:7.8s +tttg: c68/333 lr:0.000903 t:7.9s +tttg: c69/333 lr:0.000900 t:8.0s +tttg: c70/333 lr:0.000897 t:8.2s +tttg: c71/333 lr:0.000894 t:8.3s +tttg: c72/333 lr:0.000891 t:8.4s +tttg: c73/333 lr:0.000888 t:8.5s +tttg: c74/333 lr:0.000885 t:8.6s +tttg: c75/333 lr:0.000882 t:8.7s +tttg: c76/333 lr:0.000879 t:8.8s +tttg: c77/333 lr:0.000876 t:8.9s +tttg: c78/333 lr:0.000873 t:9.1s +tttg: c79/333 lr:0.000870 t:9.2s +tttg: c80/333 lr:0.000867 t:9.3s +tttg: c81/333 lr:0.000863 t:9.4s +tttg: c82/333 lr:0.000860 t:9.5s +tttg: c83/333 lr:0.000857 t:9.6s +tttg: c84/333 lr:0.000854 t:9.7s +tttg: c85/333 lr:0.000850 t:9.8s +tttg: c86/333 lr:0.000847 t:10.0s +tttg: c87/333 lr:0.000843 t:10.1s +tttg: c88/333 lr:0.000840 t:10.2s +tttg: c89/333 lr:0.000836 t:10.3s +tttg: c90/333 lr:0.000833 t:10.4s +tttg: c91/333 lr:0.000829 t:10.5s +tttg: c92/333 lr:0.000826 t:10.6s +tttg: c93/333 lr:0.000822 t:10.7s +tttg: c94/333 lr:0.000819 t:10.9s +tttg: c95/333 lr:0.000815 t:11.0s +tttg: c96/333 lr:0.000811 t:11.1s +tttg: c97/333 lr:0.000807 t:11.2s +tttg: c98/333 lr:0.000804 t:11.3s +tttg: c99/333 lr:0.000800 t:11.4s +tttg: c100/333 lr:0.000796 t:11.5s +tttg: c101/333 lr:0.000792 t:11.6s +tttg: c102/333 lr:0.000789 t:11.8s +tttg: c103/333 lr:0.000785 t:11.9s +tttg: c104/333 lr:0.000781 t:12.0s +tttg: c105/333 lr:0.000777 t:12.1s +tttg: c106/333 lr:0.000773 t:12.2s +tttg: c107/333 lr:0.000769 t:12.3s +tttg: c108/333 lr:0.000765 t:12.4s +tttg: c109/333 lr:0.000761 t:12.6s +tttg: c110/333 lr:0.000757 t:12.7s +tttg: c111/333 lr:0.000753 t:12.8s +tttg: c112/333 lr:0.000749 t:12.9s +tttg: c113/333 lr:0.000745 t:13.0s +tttg: c114/333 lr:0.000740 t:13.1s +tttg: c115/333 lr:0.000736 t:13.2s +tttg: c116/333 lr:0.000732 t:13.3s +tttg: c117/333 lr:0.000728 t:13.5s +tttg: c118/333 lr:0.000724 t:13.6s +tttg: c119/333 lr:0.000719 t:13.7s +tttg: c120/333 lr:0.000715 t:13.8s +tttg: c121/333 lr:0.000711 t:13.9s +tttg: c122/333 lr:0.000707 t:14.0s +tttg: c123/333 lr:0.000702 t:14.1s +tttg: c124/333 lr:0.000698 t:14.2s +tttg: c125/333 lr:0.000694 t:14.4s +tttg: c126/333 lr:0.000689 t:14.5s +tttg: c127/333 lr:0.000685 t:14.6s +tttg: c128/333 lr:0.000680 t:14.7s +tttg: c129/333 lr:0.000676 t:14.8s +tttg: c130/333 lr:0.000672 t:14.9s +tttg: c131/333 lr:0.000667 t:15.0s +tttg: c132/333 lr:0.000663 t:15.1s +tttg: c133/333 lr:0.000658 t:15.3s +tttg: c134/333 lr:0.000654 t:15.4s +tttg: c135/333 lr:0.000649 t:15.5s +tttg: c136/333 lr:0.000645 t:15.6s +tttg: c137/333 lr:0.000640 t:15.7s +tttg: c138/333 lr:0.000635 t:15.8s +tttg: c139/333 lr:0.000631 t:15.9s +tttg: c140/333 lr:0.000626 t:16.0s +tttg: c141/333 lr:0.000622 t:16.2s +tttg: c142/333 lr:0.000617 t:16.3s +tttg: c143/333 lr:0.000613 t:16.4s +tttg: c144/333 lr:0.000608 t:16.5s +tttg: c145/333 lr:0.000603 t:16.6s +tttg: c146/333 lr:0.000599 t:16.7s +tttg: c147/333 lr:0.000594 t:16.8s +tttg: c148/333 lr:0.000589 t:16.9s +tttg: c149/333 lr:0.000585 t:17.1s +tttg: c150/333 lr:0.000580 t:17.2s +tttg: c151/333 lr:0.000575 t:17.3s +tttg: c152/333 lr:0.000571 t:17.4s +tttg: c153/333 lr:0.000566 t:17.5s +tttg: c154/333 lr:0.000561 t:17.6s +tttg: c155/333 lr:0.000557 t:17.7s +tttg: c156/333 lr:0.000552 t:17.8s +tttg: c157/333 lr:0.000547 t:18.0s +tttg: c158/333 lr:0.000543 t:18.1s +tttg: c159/333 lr:0.000538 t:18.2s +tttg: c160/333 lr:0.000533 t:18.3s +tttg: c161/333 lr:0.000528 t:18.4s +tttg: c162/333 lr:0.000524 t:18.5s +tttg: c163/333 lr:0.000519 t:18.6s +tttg: c164/333 lr:0.000514 t:18.7s +tttg: c165/333 lr:0.000509 t:18.9s +tttg: c166/333 lr:0.000505 t:19.0s +tttg: c167/333 lr:0.000500 t:19.1s +tttg: c168/333 lr:0.000495 t:19.2s +tttg: c169/333 lr:0.000491 t:19.3s +tttg: c170/333 lr:0.000486 t:19.4s +tttg: c171/333 lr:0.000481 t:19.5s +tttg: c172/333 lr:0.000476 t:19.7s +tttg: c173/333 lr:0.000472 t:19.8s +tttg: c174/333 lr:0.000467 t:19.9s +tttg: c175/333 lr:0.000462 t:20.0s +tttg: c176/333 lr:0.000457 t:20.1s +tttg: c177/333 lr:0.000453 t:20.2s +tttg: c178/333 lr:0.000448 t:20.3s +tttg: c179/333 lr:0.000443 t:20.4s +tttg: c180/333 lr:0.000439 t:20.6s +tttg: c181/333 lr:0.000434 t:20.7s +tttg: c182/333 lr:0.000429 t:20.8s +tttg: c183/333 lr:0.000425 t:20.9s +tttg: c184/333 lr:0.000420 t:21.0s +tttg: c185/333 lr:0.000415 t:21.1s +tttg: c186/333 lr:0.000411 t:21.2s +tttg: c187/333 lr:0.000406 t:21.3s +tttg: c188/333 lr:0.000401 t:21.5s +tttg: c189/333 lr:0.000397 t:21.6s +tttg: c190/333 lr:0.000392 t:21.7s +tttg: c191/333 lr:0.000387 t:21.8s +tttg: c192/333 lr:0.000383 t:21.9s +tttg: c193/333 lr:0.000378 t:22.0s +tttg: c194/333 lr:0.000374 t:22.1s +tttg: c195/333 lr:0.000369 t:22.2s +tttg: c196/333 lr:0.000365 t:22.4s +tttg: c197/333 lr:0.000360 t:22.5s +tttg: c198/333 lr:0.000355 t:22.6s +tttg: c199/333 lr:0.000351 t:22.7s +tttg: c200/333 lr:0.000346 t:22.8s +tttg: c201/333 lr:0.000342 t:22.9s +tttg: c202/333 lr:0.000337 t:23.0s +tttg: c203/333 lr:0.000333 t:23.2s +tttg: c204/333 lr:0.000328 t:23.3s +tttg: c205/333 lr:0.000324 t:23.4s +tttg: c206/333 lr:0.000320 t:23.5s +tttg: c207/333 lr:0.000315 t:23.6s +tttg: c208/333 lr:0.000311 t:23.7s +tttg: c209/333 lr:0.000306 t:23.8s +tttg: c210/333 lr:0.000302 t:23.9s +tttg: c211/333 lr:0.000298 t:24.1s +tttg: c212/333 lr:0.000293 t:24.2s +tttg: c213/333 lr:0.000289 t:24.3s +tttg: c214/333 lr:0.000285 t:24.4s +tttg: c215/333 lr:0.000281 t:24.5s +tttg: c216/333 lr:0.000276 t:24.6s +tttg: c217/333 lr:0.000272 t:24.7s +tttg: c218/333 lr:0.000268 t:24.8s +tttg: c219/333 lr:0.000264 t:25.0s +tttg: c220/333 lr:0.000260 t:25.1s +tttg: c221/333 lr:0.000255 t:25.2s +tttg: c222/333 lr:0.000251 t:25.3s +tttg: c223/333 lr:0.000247 t:25.4s +tttg: c224/333 lr:0.000243 t:25.5s +tttg: c225/333 lr:0.000239 t:25.6s +tttg: c226/333 lr:0.000235 t:25.7s +tttg: c227/333 lr:0.000231 t:25.9s +tttg: c228/333 lr:0.000227 t:26.0s +tttg: c229/333 lr:0.000223 t:26.1s +tttg: c230/333 lr:0.000219 t:26.2s +tttg: c231/333 lr:0.000215 t:26.3s +tttg: c232/333 lr:0.000211 t:26.4s +tttg: c233/333 lr:0.000208 t:26.5s +tttg: c234/333 lr:0.000204 t:26.6s +tttg: c235/333 lr:0.000200 t:26.8s +tttg: c236/333 lr:0.000196 t:26.9s +tttg: c237/333 lr:0.000193 t:27.0s +tttg: c238/333 lr:0.000189 t:27.1s +tttg: c239/333 lr:0.000185 t:27.2s +tttg: c240/333 lr:0.000181 t:27.3s +tttg: c241/333 lr:0.000178 t:27.4s +tttg: c242/333 lr:0.000174 t:27.6s +tttg: c243/333 lr:0.000171 t:27.7s +tttg: c244/333 lr:0.000167 t:27.8s +tttg: c245/333 lr:0.000164 t:27.9s +tttg: c246/333 lr:0.000160 t:28.0s +tttg: c247/333 lr:0.000157 t:28.1s +tttg: c248/333 lr:0.000153 t:28.2s +tttg: c249/333 lr:0.000150 t:28.3s +tttg: c250/333 lr:0.000146 t:28.5s +tttg: c251/333 lr:0.000143 t:28.6s +tttg: c252/333 lr:0.000140 t:28.7s +tttg: c253/333 lr:0.000137 t:28.8s +tttg: c254/333 lr:0.000133 t:28.9s +tttg: c255/333 lr:0.000130 t:29.0s +tttg: c256/333 lr:0.000127 t:29.1s +tttg: c257/333 lr:0.000124 t:29.2s +tttg: c258/333 lr:0.000121 t:29.4s +tttg: c259/333 lr:0.000118 t:29.5s +tttg: c260/333 lr:0.000115 t:29.6s +tttg: c261/333 lr:0.000112 t:29.7s +tttg: c262/333 lr:0.000109 t:29.8s +tttg: c263/333 lr:0.000106 t:29.9s +tttg: c264/333 lr:0.000103 t:30.0s +tttg: c265/333 lr:0.000100 t:30.1s +tttg: c266/333 lr:0.000097 t:30.3s +tttg: c267/333 lr:0.000094 t:30.4s +tttg: c268/333 lr:0.000092 t:30.5s +tttg: c269/333 lr:0.000089 t:30.6s +tttg: c270/333 lr:0.000086 t:30.7s +tttg: c271/333 lr:0.000084 t:30.8s +tttg: c272/333 lr:0.000081 t:30.9s +tttg: c273/333 lr:0.000078 t:31.1s +tttg: c274/333 lr:0.000076 t:31.2s +tttg: c275/333 lr:0.000073 t:31.3s +tttg: c276/333 lr:0.000071 t:31.4s +tttg: c277/333 lr:0.000069 t:31.5s +tttg: c278/333 lr:0.000066 t:31.6s +tttg: c279/333 lr:0.000064 t:31.7s +tttg: c280/333 lr:0.000062 t:31.8s +tttg: c281/333 lr:0.000059 t:32.0s +tttg: c282/333 lr:0.000057 t:32.1s +tttg: c283/333 lr:0.000055 t:32.2s +tttg: c284/333 lr:0.000053 t:32.3s +tttg: c285/333 lr:0.000051 t:32.4s +tttg: c286/333 lr:0.000049 t:32.5s +tttg: c287/333 lr:0.000047 t:32.6s +tttg: c288/333 lr:0.000045 t:32.8s +tttg: c289/333 lr:0.000043 t:32.9s +tttg: c290/333 lr:0.000041 t:33.0s +tttg: c291/333 lr:0.000039 t:33.1s +tttg: c292/333 lr:0.000037 t:33.2s +tttg: c293/333 lr:0.000035 t:33.3s +tttg: c294/333 lr:0.000034 t:33.4s +tttg: c295/333 lr:0.000032 t:33.5s +tttg: c296/333 lr:0.000030 t:33.7s +tttg: c297/333 lr:0.000029 t:33.8s +tttg: c298/333 lr:0.000027 t:33.9s +tttg: c299/333 lr:0.000026 t:34.0s +tttg: c300/333 lr:0.000024 t:34.1s +tttg: c301/333 lr:0.000023 t:34.2s +tttg: c302/333 lr:0.000021 t:34.3s +tttg: c303/333 lr:0.000020 t:34.5s +tttg: c304/333 lr:0.000019 t:34.6s +tttg: c305/333 lr:0.000017 t:34.7s +tttg: c306/333 lr:0.000016 t:34.8s +tttg: c307/333 lr:0.000015 t:34.9s +tttg: c308/333 lr:0.000014 t:35.0s +tttg: c309/333 lr:0.000013 t:35.1s +tttg: c310/333 lr:0.000012 t:35.2s +tttg: c311/333 lr:0.000011 t:35.4s +tttg: c312/333 lr:0.000010 t:35.5s +tttg: c313/333 lr:0.000009 t:35.6s +tttg: c314/333 lr:0.000008 t:35.7s +tttg: c315/333 lr:0.000007 t:35.8s +tttg: c316/333 lr:0.000006 t:35.9s +tttg: c317/333 lr:0.000006 t:36.0s +tttg: c318/333 lr:0.000005 t:36.2s +tttg: c319/333 lr:0.000004 t:36.3s +tttg: c320/333 lr:0.000004 t:36.4s +tttg: c321/333 lr:0.000003 t:36.5s +tttg: c322/333 lr:0.000003 t:36.6s +tttg: c323/333 lr:0.000002 t:36.7s +tttg: c324/333 lr:0.000002 t:36.8s +tttg: c325/333 lr:0.000001 t:36.9s +tttg: c326/333 lr:0.000001 t:37.1s +tttg: c327/333 lr:0.000001 t:37.2s +tttg: c328/333 lr:0.000001 t:37.3s +tttg: c329/333 lr:0.000000 t:37.4s +tttg: c330/333 lr:0.000000 t:37.5s +tttg: c331/333 lr:0.000000 t:37.6s +tttg: c332/333 lr:0.000000 t:37.7s +ttpr: phase:1/1 t:1688.2s +ttp: b750/782 bl:2.8367 bb:1.0702 rl:2.7026 rb:1.0766 dl:2638-2688 gd:1 +ttp: b749/782 bl:2.8363 bb:1.0912 rl:2.7046 rb:1.0768 dl:2580-2638 gd:1 +ttp: b748/782 bl:2.8065 bb:1.0747 rl:2.7060 rb:1.0768 dl:2539-2578 gd:1 +ttp: b747/782 bl:2.7875 bb:1.0602 rl:2.7072 rb:1.0765 dl:2501-2538 gd:1 +ttp: b746/782 bl:2.6789 bb:1.0548 rl:2.7068 rb:1.0762 dl:2459-2501 gd:1 +ttp: b745/782 bl:2.7910 bb:1.0910 rl:2.7079 rb:1.0764 dl:2421-2458 gd:1 +ttp: b744/782 bl:2.6576 bb:1.0588 rl:2.7072 rb:1.0762 dl:2388-2419 gd:1 +ttp: b743/782 bl:2.7154 bb:1.0454 rl:2.7073 rb:1.0758 dl:2355-2388 gd:1 +ttp: b742/782 bl:2.7877 bb:1.0678 rl:2.7083 rb:1.0757 dl:2319-2353 gd:1 +ttp: b741/782 bl:2.8088 bb:1.1057 rl:2.7095 rb:1.0761 dl:2286-2319 gd:1 +ttp: b740/782 bl:2.7383 bb:1.0338 rl:2.7098 rb:1.0756 dl:2254-2285 gd:1 +ttp: b739/782 bl:2.8270 bb:1.0734 rl:2.7111 rb:1.0755 dl:2227-2253 gd:1 +ttp: b738/782 bl:2.7501 bb:1.0551 rl:2.7116 rb:1.0753 dl:2194-2227 gd:1 +ttp: b737/782 bl:2.8020 bb:1.0684 rl:2.7125 rb:1.0752 dl:2165-2193 gd:1 +ttp: b736/782 bl:2.6761 bb:1.0431 rl:2.7122 rb:1.0749 dl:2140-2165 gd:1 +ttp: b735/782 bl:2.8340 bb:1.0792 rl:2.7134 rb:1.0749 dl:2116-2140 gd:1 +ttp: b734/782 bl:2.7765 bb:1.0588 rl:2.7140 rb:1.0748 dl:2091-2115 gd:1 +ttp: b733/782 bl:2.7595 bb:1.0530 rl:2.7145 rb:1.0745 dl:2062-2090 gd:1 +ttp: b732/782 bl:2.8234 bb:1.0989 rl:2.7155 rb:1.0748 dl:2041-2062 gd:1 +ttp: b731/782 bl:2.7770 bb:1.0599 rl:2.7161 rb:1.0746 dl:2017-2041 gd:1 +ttp: b730/782 bl:2.7708 bb:1.0908 rl:2.7166 rb:1.0748 dl:1995-2016 gd:1 +ttp: b729/782 bl:2.7234 bb:1.0379 rl:2.7167 rb:1.0744 dl:1978-1994 gd:1 +ttp: b728/782 bl:2.7598 bb:1.0689 rl:2.7170 rb:1.0744 dl:1960-1977 gd:1 +ttp: b727/782 bl:2.7748 bb:1.0563 rl:2.7175 rb:1.0742 dl:1936-1960 gd:1 +ttp: b726/782 bl:2.7996 bb:1.0654 rl:2.7182 rb:1.0742 dl:1915-1936 gd:1 +ttp: b725/782 bl:2.7624 bb:1.0707 rl:2.7186 rb:1.0741 dl:1900-1915 gd:1 +ttp: b724/782 bl:2.7554 bb:1.0533 rl:2.7189 rb:1.0739 dl:1885-1900 gd:1 +ttp: b723/782 bl:2.7832 bb:1.0618 rl:2.7194 rb:1.0738 dl:1861-1885 gd:1 +ttp: b722/782 bl:2.7739 bb:1.0606 rl:2.7199 rb:1.0737 dl:1846-1861 gd:1 +ttp: b721/782 bl:2.7512 bb:1.0269 rl:2.7201 rb:1.0734 dl:1832-1846 gd:1 +ttp: b720/782 bl:2.8252 bb:1.0791 rl:2.7209 rb:1.0734 dl:1816-1832 gd:1 +ttp: b719/782 bl:2.6828 bb:1.0287 rl:2.7206 rb:1.0731 dl:1793-1816 gd:1 +ttp: b718/782 bl:2.7803 bb:1.0717 rl:2.7211 rb:1.0730 dl:1773-1792 gd:1 +ttp: b717/782 bl:2.7916 bb:1.0514 rl:2.7216 rb:1.0729 dl:1754-1773 gd:1 +ttp: b716/782 bl:2.8090 bb:1.0366 rl:2.7222 rb:1.0726 dl:1739-1754 gd:1 +ttp: b715/782 bl:2.6452 bb:1.0394 rl:2.7217 rb:1.0724 dl:1725-1739 gd:1 +ttp: b714/782 bl:2.8127 bb:1.0706 rl:2.7223 rb:1.0724 dl:1711-1725 gd:1 +ttp: b713/782 bl:2.8343 bb:1.0457 rl:2.7230 rb:1.0722 dl:1697-1711 gd:1 +ttp: b712/782 bl:2.8326 bb:1.0784 rl:2.7238 rb:1.0722 dl:1684-1697 gd:1 +ttp: b711/782 bl:2.7762 bb:1.0454 rl:2.7241 rb:1.0720 dl:1673-1683 gd:1 +ttp: b710/782 bl:2.7609 bb:1.0704 rl:2.7244 rb:1.0720 dl:1661-1673 gd:1 +ttp: b709/782 bl:2.7850 bb:1.0582 rl:2.7248 rb:1.0719 dl:1649-1661 gd:1 +ttp: b708/782 bl:2.7259 bb:1.0475 rl:2.7248 rb:1.0718 dl:1639-1649 gd:1 +ttp: b707/782 bl:2.7735 bb:1.0842 rl:2.7251 rb:1.0718 dl:1627-1638 gd:1 +ttp: b706/782 bl:2.7176 bb:1.0447 rl:2.7250 rb:1.0717 dl:1617-1627 gd:1 +ttp: b705/782 bl:2.7863 bb:1.0732 rl:2.7254 rb:1.0717 dl:1606-1617 gd:1 +ttp: b704/782 bl:2.7473 bb:1.0246 rl:2.7255 rb:1.0714 dl:1595-1606 gd:1 +ttp: b703/782 bl:2.9202 bb:1.1046 rl:2.7267 rb:1.0716 dl:1582-1594 gd:1 +ttp: b702/782 bl:2.8051 bb:1.0670 rl:2.7272 rb:1.0715 dl:1572-1581 gd:1 +ttp: b701/782 bl:2.7571 bb:1.0485 rl:2.7273 rb:1.0714 dl:1562-1572 gd:1 +ttp: b700/782 bl:2.6743 bb:1.0438 rl:2.7270 rb:1.0713 dl:1552-1562 gd:1 +ttp: b699/782 bl:2.8201 bb:1.0436 rl:2.7276 rb:1.0711 dl:1543-1552 gd:1 +ttp: b698/782 bl:2.7839 bb:1.0318 rl:2.7279 rb:1.0709 dl:1534-1543 gd:1 +ttp: b697/782 bl:2.7639 bb:1.0413 rl:2.7281 rb:1.0707 dl:1522-1534 gd:1 +ttp: b696/782 bl:2.8098 bb:1.0740 rl:2.7285 rb:1.0707 dl:1513-1522 gd:1 +ttp: b695/782 bl:2.7836 bb:1.0792 rl:2.7288 rb:1.0707 dl:1504-1513 gd:1 +ttp: b694/782 bl:2.7629 bb:1.0664 rl:2.7290 rb:1.0707 dl:1494-1504 gd:1 +ttp: b693/782 bl:2.8173 bb:1.1052 rl:2.7295 rb:1.0709 dl:1485-1494 gd:1 +ttp: b692/782 bl:2.7675 bb:1.0500 rl:2.7297 rb:1.0708 dl:1477-1484 gd:1 +ttp: b691/782 bl:2.6958 bb:1.0409 rl:2.7295 rb:1.0706 dl:1467-1476 gd:1 +ttp: b690/782 bl:2.8382 bb:1.0635 rl:2.7300 rb:1.0706 dl:1458-1467 gd:1 +ttp: b689/782 bl:2.7798 bb:1.0636 rl:2.7303 rb:1.0706 dl:1450-1458 gd:1 +ttp: b688/782 bl:2.7484 bb:1.0485 rl:2.7304 rb:1.0704 dl:1441-1450 gd:1 +ttp: b687/782 bl:2.7185 bb:1.0498 rl:2.7303 rb:1.0703 dl:1432-1441 gd:1 +ttp: b686/782 bl:2.8049 bb:1.0538 rl:2.7307 rb:1.0703 dl:1422-1432 gd:1 +ttp: b685/782 bl:2.7760 bb:1.0640 rl:2.7309 rb:1.0702 dl:1414-1422 gd:1 +ttp: b684/782 bl:2.7973 bb:1.0754 rl:2.7312 rb:1.0703 dl:1407-1414 gd:1 +ttp: b683/782 bl:2.7776 bb:1.0694 rl:2.7315 rb:1.0702 dl:1400-1406 gd:1 +ttp: b682/782 bl:2.8081 bb:1.0727 rl:2.7318 rb:1.0703 dl:1393-1400 gd:1 +ttp: b681/782 bl:2.8186 bb:1.0701 rl:2.7322 rb:1.0703 dl:1383-1393 gd:1 +ttp: b680/782 bl:2.8021 bb:1.0541 rl:2.7325 rb:1.0702 dl:1375-1383 gd:1 +ttp: b679/782 bl:2.8522 bb:1.0866 rl:2.7331 rb:1.0703 dl:1368-1374 gd:1 +ttp: b678/782 bl:2.7870 bb:1.0491 rl:2.7333 rb:1.0702 dl:1361-1368 gd:1 +ttp: b677/782 bl:2.8706 bb:1.1128 rl:2.7339 rb:1.0704 dl:1353-1360 gd:1 +ttp: b676/782 bl:2.7916 bb:1.0666 rl:2.7342 rb:1.0703 dl:1347-1353 gd:1 +ttp: b675/782 bl:2.8406 bb:1.0664 rl:2.7347 rb:1.0703 dl:1341-1347 gd:1 +ttp: b674/782 bl:2.7860 bb:1.0572 rl:2.7349 rb:1.0703 dl:1334-1341 gd:1 +ttp: b673/782 bl:2.8155 bb:1.0573 rl:2.7352 rb:1.0702 dl:1327-1334 gd:1 +ttp: b672/782 bl:2.9053 bb:1.1084 rl:2.7359 rb:1.0704 dl:1321-1327 gd:1 +ttp: b671/782 bl:2.8837 bb:1.1176 rl:2.7366 rb:1.0706 dl:1316-1321 gd:1 +ttp: b670/782 bl:2.8286 bb:1.0576 rl:2.7370 rb:1.0705 dl:1308-1315 gd:1 +ttp: b669/782 bl:2.7811 bb:1.0546 rl:2.7371 rb:1.0704 dl:1301-1308 gd:1 +ttp: b668/782 bl:2.7948 bb:1.0593 rl:2.7374 rb:1.0704 dl:1295-1301 gd:1 +ttp: b667/782 bl:2.8211 bb:1.1051 rl:2.7377 rb:1.0705 dl:1288-1295 gd:1 +ttp: b666/782 bl:2.8211 bb:1.0603 rl:2.7381 rb:1.0705 dl:1282-1288 gd:1 +ttp: b665/782 bl:2.7388 bb:1.0321 rl:2.7381 rb:1.0703 dl:1275-1282 gd:1 +ttp: b664/782 bl:2.7021 bb:1.0418 rl:2.7379 rb:1.0702 dl:1270-1275 gd:1 +ttp: b663/782 bl:2.7959 bb:1.0612 rl:2.7381 rb:1.0702 dl:1264-1269 gd:1 +ttp: b662/782 bl:2.8090 bb:1.0719 rl:2.7384 rb:1.0702 dl:1258-1263 gd:1 +ttp: b661/782 bl:2.7172 bb:1.0187 rl:2.7383 rb:1.0700 dl:1251-1258 gd:1 +ttp: b660/782 bl:2.8542 bb:1.0921 rl:2.7388 rb:1.0701 dl:1245-1250 gd:1 +ttp: b659/782 bl:2.7138 bb:1.0219 rl:2.7387 rb:1.0699 dl:1239-1245 gd:1 +ttp: b658/782 bl:2.8130 bb:1.0767 rl:2.7390 rb:1.0699 dl:1234-1239 gd:1 +ttp: b657/782 bl:2.7838 bb:1.0454 rl:2.7391 rb:1.0698 dl:1227-1234 gd:1 +ttp: b656/782 bl:2.7476 bb:1.0373 rl:2.7392 rb:1.0697 dl:1220-1227 gd:1 +ttp: b655/782 bl:2.6845 bb:1.0211 rl:2.7390 rb:1.0695 dl:1215-1220 gd:1 +ttp: b654/782 bl:2.7321 bb:1.0371 rl:2.7389 rb:1.0694 dl:1209-1215 gd:1 +ttp: b653/782 bl:2.7574 bb:1.0345 rl:2.7390 rb:1.0693 dl:1203-1209 gd:1 +ttp: b652/782 bl:2.7978 bb:1.0720 rl:2.7392 rb:1.0693 dl:1198-1203 gd:1 +ttp: b651/782 bl:2.7209 bb:1.0450 rl:2.7391 rb:1.0692 dl:1193-1198 gd:1 +ttp: b650/782 bl:2.7931 bb:1.0755 rl:2.7393 rb:1.0692 dl:1188-1193 gd:1 +ttp: b649/782 bl:2.8111 bb:1.0603 rl:2.7396 rb:1.0692 dl:1183-1188 gd:1 +ttp: b648/782 bl:2.7476 bb:1.0415 rl:2.7396 rb:1.0691 dl:1177-1182 gd:1 +ttp: b647/782 bl:2.7555 bb:1.0490 rl:2.7397 rb:1.0690 dl:1171-1177 gd:1 +ttp: b646/782 bl:2.7671 bb:1.0715 rl:2.7398 rb:1.0690 dl:1166-1171 gd:1 +ttp: b645/782 bl:2.7961 bb:1.0940 rl:2.7400 rb:1.0691 dl:1160-1166 gd:1 +ttp: b644/782 bl:2.7312 bb:1.0305 rl:2.7399 rb:1.0690 dl:1155-1160 gd:1 +ttp: b643/782 bl:2.7933 bb:1.0649 rl:2.7401 rb:1.0689 dl:1150-1155 gd:1 +ttp: b642/782 bl:2.7797 bb:1.0814 rl:2.7402 rb:1.0690 dl:1144-1150 gd:1 +ttp: b641/782 bl:2.7706 bb:1.0431 rl:2.7403 rb:1.0689 dl:1140-1144 gd:1 +ttp: b640/782 bl:2.7867 bb:1.0846 rl:2.7405 rb:1.0689 dl:1134-1140 gd:1 +ttp: b639/782 bl:2.8563 bb:1.0820 rl:2.7409 rb:1.0690 dl:1129-1134 gd:1 +ttp: b638/782 bl:2.8430 bb:1.0486 rl:2.7412 rb:1.0689 dl:1123-1129 gd:1 +ttp: b637/782 bl:2.8047 bb:1.0806 rl:2.7414 rb:1.0690 dl:1120-1123 gd:1 +ttp: b636/782 bl:2.7606 bb:1.0709 rl:2.7414 rb:1.0690 dl:1116-1120 gd:1 +ttp: b635/782 bl:2.7416 bb:1.0613 rl:2.7414 rb:1.0689 dl:1111-1116 gd:1 +ttp: b634/782 bl:2.7011 bb:1.0428 rl:2.7413 rb:1.0689 dl:1105-1111 gd:1 +ttp: b633/782 bl:2.8315 bb:1.1048 rl:2.7416 rb:1.0690 dl:1101-1105 gd:1 +ttp: b632/782 bl:2.7384 bb:1.0286 rl:2.7416 rb:1.0688 dl:1096-1101 gd:1 +ttp: b631/782 bl:2.7658 bb:1.0619 rl:2.7417 rb:1.0688 dl:1092-1096 gd:1 +ttp: b630/782 bl:2.8339 bb:1.0612 rl:2.7419 rb:1.0688 dl:1087-1092 gd:1 +ttp: b629/782 bl:2.7275 bb:1.0450 rl:2.7419 rb:1.0687 dl:1082-1086 gd:1 +ttp: b628/782 bl:2.7724 bb:1.0484 rl:2.7420 rb:1.0687 dl:1078-1082 gd:1 +ttp: b627/782 bl:2.7337 bb:1.0348 rl:2.7420 rb:1.0686 dl:1073-1077 gd:1 +ttp: b626/782 bl:2.8171 bb:1.0468 rl:2.7422 rb:1.0685 dl:1068-1073 gd:1 +ttp: b625/782 bl:2.6690 bb:1.0027 rl:2.7420 rb:1.0683 dl:1064-1068 gd:1 +ttp: b624/782 bl:2.7930 bb:1.0747 rl:2.7421 rb:1.0683 dl:1060-1064 gd:1 +ttp: b623/782 bl:2.7915 bb:1.0744 rl:2.7423 rb:1.0683 dl:1055-1060 gd:1 +ttp: b622/782 bl:2.8466 bb:1.0772 rl:2.7426 rb:1.0684 dl:1050-1055 gd:1 +ttp: b621/782 bl:2.8434 bb:1.0890 rl:2.7428 rb:1.0684 dl:1046-1050 gd:1 +ttp: b620/782 bl:2.7817 bb:1.0425 rl:2.7429 rb:1.0683 dl:1041-1046 gd:1 +ttp: b619/782 bl:2.7957 bb:1.0591 rl:2.7431 rb:1.0683 dl:1037-1041 gd:1 +ttp: b618/782 bl:2.7362 bb:1.0490 rl:2.7431 rb:1.0683 dl:1031-1037 gd:1 +ttp: b617/782 bl:2.7421 bb:1.0378 rl:2.7431 rb:1.0682 dl:1027-1031 gd:1 +ttp: b616/782 bl:2.8537 bb:1.0882 rl:2.7434 rb:1.0682 dl:1024-1027 gd:1 +ttp: b615/782 bl:2.8397 bb:1.0662 rl:2.7436 rb:1.0682 dl:1020-1023 gd:1 +ttp: b614/782 bl:2.7886 bb:1.0666 rl:2.7438 rb:1.0682 dl:1016-1020 gd:1 +ttp: b613/782 bl:2.8243 bb:1.0630 rl:2.7440 rb:1.0682 dl:1012-1016 gd:1 +ttp: b612/782 bl:2.8247 bb:1.0434 rl:2.7442 rb:1.0681 dl:1007-1012 gd:1 +ttp: b611/782 bl:2.7652 bb:1.0704 rl:2.7442 rb:1.0681 dl:1004-1007 gd:1 +ttp: b610/782 bl:2.8329 bb:1.0636 rl:2.7445 rb:1.0681 dl:999-1004 gd:1 +ttp: b609/782 bl:2.7927 bb:1.0601 rl:2.7446 rb:1.0681 dl:994-999 gd:1 +ttp: b608/782 bl:2.7367 bb:1.0329 rl:2.7446 rb:1.0680 dl:990-994 gd:1 +ttp: b607/782 bl:2.6933 bb:1.0380 rl:2.7444 rb:1.0679 dl:986-990 gd:1 +ttp: b606/782 bl:2.8168 bb:1.0838 rl:2.7446 rb:1.0680 dl:982-986 gd:1 +ttp: b605/782 bl:2.7417 bb:1.0576 rl:2.7446 rb:1.0680 dl:978-982 gd:1 +ttp: b604/782 bl:2.7271 bb:1.0367 rl:2.7446 rb:1.0679 dl:974-978 gd:1 +ttp: b603/782 bl:2.8344 bb:1.0857 rl:2.7448 rb:1.0679 dl:971-974 gd:1 +ttp: b602/782 bl:2.7805 bb:1.0400 rl:2.7449 rb:1.0678 dl:966-971 gd:1 +ttp: b601/782 bl:2.7693 bb:1.0643 rl:2.7450 rb:1.0678 dl:963-966 gd:1 +ttp: b600/782 bl:2.7948 bb:1.0609 rl:2.7451 rb:1.0678 dl:958-963 gd:1 +ttp: b599/782 bl:2.7377 bb:1.0515 rl:2.7451 rb:1.0678 dl:954-958 gd:1 +ttp: b598/782 bl:2.8097 bb:1.0702 rl:2.7452 rb:1.0678 dl:950-954 gd:1 +ttp: b597/782 bl:2.7746 bb:1.0418 rl:2.7453 rb:1.0677 dl:947-950 gd:1 +ttp: b596/782 bl:2.7756 bb:1.0630 rl:2.7454 rb:1.0677 dl:943-947 gd:1 +ttp: b595/782 bl:2.7337 bb:1.0569 rl:2.7453 rb:1.0677 dl:940-943 gd:1 +ttp: b594/782 bl:2.9075 bb:1.1043 rl:2.7457 rb:1.0678 dl:937-940 gd:1 +ttp: b593/782 bl:2.7993 bb:1.0469 rl:2.7458 rb:1.0677 dl:933-937 gd:1 +ttp: b592/782 bl:2.7887 bb:1.0499 rl:2.7459 rb:1.0677 dl:930-933 gd:1 +ttp: b591/782 bl:2.6682 bb:1.0082 rl:2.7458 rb:1.0675 dl:927-930 gd:1 +ttp: b590/782 bl:2.7337 bb:1.0288 rl:2.7457 rb:1.0674 dl:924-927 gd:1 +ttp: b589/782 bl:2.7544 bb:1.0544 rl:2.7457 rb:1.0674 dl:921-924 gd:1 +ttp: b588/782 bl:2.7452 bb:1.0473 rl:2.7457 rb:1.0674 dl:917-921 gd:1 +ttp: b587/782 bl:2.7797 bb:1.0614 rl:2.7458 rb:1.0674 dl:914-917 gd:1 +ttp: b586/782 bl:2.7257 bb:1.0142 rl:2.7458 rb:1.0672 dl:911-914 gd:1 +ttp: b585/782 bl:2.7673 bb:1.0670 rl:2.7458 rb:1.0672 dl:908-911 gd:1 +ttp: b584/782 bl:2.7677 bb:1.0399 rl:2.7459 rb:1.0672 dl:904-907 gd:1 +ttp: b583/782 bl:2.8000 bb:1.0922 rl:2.7460 rb:1.0672 dl:901-904 gd:1 +ttp: b582/782 bl:2.8633 bb:1.0921 rl:2.7463 rb:1.0673 dl:897-901 gd:1 +ttp: b581/782 bl:2.7248 bb:1.0165 rl:2.7462 rb:1.0672 dl:894-897 gd:1 +ttp: b580/782 bl:2.7276 bb:1.0363 rl:2.7462 rb:1.0671 dl:891-894 gd:1 +ttp: b579/782 bl:2.6357 bb:1.0045 rl:2.7459 rb:1.0670 dl:888-891 gd:1 +ttp: b578/782 bl:2.8079 bb:1.0698 rl:2.7461 rb:1.0670 dl:884-887 gd:1 +ttp: b577/782 bl:2.7537 bb:1.0416 rl:2.7461 rb:1.0669 dl:880-884 gd:1 +ttp: b576/782 bl:2.7779 bb:1.0462 rl:2.7461 rb:1.0669 dl:877-880 gd:1 +ttp: b575/782 bl:2.7964 bb:1.0529 rl:2.7463 rb:1.0668 dl:874-877 gd:1 +ttp: b574/782 bl:2.7853 bb:1.0404 rl:2.7463 rb:1.0668 dl:871-874 gd:1 +ttp: b573/782 bl:2.9310 bb:1.0727 rl:2.7467 rb:1.0668 dl:868-871 gd:1 +ttp: b572/782 bl:2.9431 bb:1.1200 rl:2.7471 rb:1.0669 dl:865-868 gd:1 +ttp: b571/782 bl:2.7067 bb:1.0325 rl:2.7470 rb:1.0668 dl:862-865 gd:1 +ttp: b570/782 bl:2.7726 bb:1.0790 rl:2.7471 rb:1.0668 dl:858-862 gd:1 +ttp: b569/782 bl:2.7612 bb:1.0550 rl:2.7471 rb:1.0668 dl:855-858 gd:1 +ttp: b568/782 bl:2.7992 bb:1.0559 rl:2.7472 rb:1.0668 dl:852-855 gd:1 +ttp: b567/782 bl:2.6722 bb:1.0292 rl:2.7471 rb:1.0667 dl:849-852 gd:1 +ttp: b566/782 bl:2.7184 bb:1.0331 rl:2.7470 rb:1.0667 dl:846-849 gd:1 +ttp: b565/782 bl:2.7736 bb:1.0628 rl:2.7471 rb:1.0666 dl:843-846 gd:1 +ttp: b564/782 bl:2.8631 bb:1.1077 rl:2.7473 rb:1.0667 dl:840-843 gd:1 +ttp: b563/782 bl:2.8029 bb:1.0633 rl:2.7474 rb:1.0667 dl:837-840 gd:1 +ttp: b562/782 bl:2.7132 bb:1.0255 rl:2.7473 rb:1.0666 dl:834-837 gd:1 +ttp: b561/782 bl:2.7125 bb:1.0638 rl:2.7473 rb:1.0666 dl:831-834 gd:1 +ttp: b560/782 bl:2.8166 bb:1.0908 rl:2.7474 rb:1.0667 dl:828-831 gd:1 +ttp: b559/782 bl:2.7537 bb:1.0466 rl:2.7474 rb:1.0666 dl:824-827 gd:1 +ttp: b558/782 bl:2.7006 bb:1.0228 rl:2.7473 rb:1.0666 dl:821-824 gd:1 +ttp: b557/782 bl:2.8017 bb:1.0446 rl:2.7474 rb:1.0665 dl:818-821 gd:1 +ttp: b556/782 bl:2.8382 bb:1.0851 rl:2.7476 rb:1.0665 dl:815-818 gd:1 +ttp: b555/782 bl:2.7608 bb:1.0536 rl:2.7476 rb:1.0665 dl:812-815 gd:1 +ttp: b554/782 bl:2.7364 bb:1.0300 rl:2.7476 rb:1.0665 dl:809-812 gd:1 +ttp: b553/782 bl:2.7666 bb:1.0600 rl:2.7476 rb:1.0664 dl:806-809 gd:1 +ttp: b552/782 bl:2.8036 bb:1.0448 rl:2.7478 rb:1.0664 dl:804-806 gd:1 +ttp: b551/782 bl:2.8248 bb:1.0647 rl:2.7479 rb:1.0664 dl:801-804 gd:1 +ttp: b550/782 bl:2.8053 bb:1.0764 rl:2.7480 rb:1.0664 dl:798-801 gd:1 +ttp: b549/782 bl:2.7672 bb:1.0647 rl:2.7480 rb:1.0664 dl:795-798 gd:1 +ttp: b548/782 bl:2.7622 bb:1.0474 rl:2.7481 rb:1.0664 dl:793-795 gd:1 +ttp: b547/782 bl:2.7339 bb:1.0325 rl:2.7480 rb:1.0663 dl:790-793 gd:1 +ttp: b546/782 bl:2.8261 bb:1.0730 rl:2.7482 rb:1.0663 dl:788-790 gd:1 +ttp: b545/782 bl:2.7881 bb:1.0543 rl:2.7482 rb:1.0663 dl:785-788 gd:1 +ttp: b544/782 bl:2.7550 bb:1.0433 rl:2.7483 rb:1.0663 dl:782-785 gd:1 +ttp: b543/782 bl:2.7875 bb:1.0464 rl:2.7483 rb:1.0662 dl:779-782 gd:1 +ttp: b542/782 bl:2.8345 bb:1.0737 rl:2.7485 rb:1.0662 dl:777-779 gd:1 +ttp: b541/782 bl:2.8020 bb:1.0605 rl:2.7486 rb:1.0662 dl:774-776 gd:1 +ttp: b540/782 bl:2.7012 bb:1.0191 rl:2.7485 rb:1.0661 dl:771-774 gd:1 +ttp: b539/782 bl:2.7309 bb:1.0456 rl:2.7485 rb:1.0661 dl:769-771 gd:1 +ttp: b538/782 bl:2.6854 bb:1.0386 rl:2.7484 rb:1.0661 dl:767-769 gd:1 +ttp: b537/782 bl:2.7145 bb:1.0264 rl:2.7483 rb:1.0660 dl:764-767 gd:1 +ttp: b536/782 bl:2.7868 bb:1.0749 rl:2.7484 rb:1.0660 dl:762-764 gd:1 +ttp: b535/782 bl:2.7893 bb:1.0576 rl:2.7484 rb:1.0660 dl:759-762 gd:1 +ttp: b534/782 bl:2.8194 bb:1.0725 rl:2.7485 rb:1.0660 dl:757-759 gd:1 +ttp: b533/782 bl:2.7717 bb:1.0352 rl:2.7486 rb:1.0659 dl:754-757 gd:1 +ttp: b532/782 bl:2.8174 bb:1.0579 rl:2.7487 rb:1.0659 dl:752-754 gd:1 +ttp: b531/782 bl:2.7754 bb:1.0527 rl:2.7487 rb:1.0659 dl:750-752 gd:1 +ttp: b530/782 bl:2.8058 bb:1.0386 rl:2.7488 rb:1.0659 dl:747-750 gd:1 +ttp: b529/782 bl:2.7776 bb:1.0579 rl:2.7489 rb:1.0658 dl:745-747 gd:1 +ttp: b528/782 bl:2.7532 bb:1.0314 rl:2.7489 rb:1.0658 dl:742-745 gd:1 +ttp: b527/782 bl:2.7436 bb:1.0426 rl:2.7489 rb:1.0658 dl:739-742 gd:1 +ttp: b526/782 bl:2.7683 bb:1.0571 rl:2.7489 rb:1.0657 dl:737-739 gd:1 +ttp: b525/782 bl:2.7855 bb:1.0717 rl:2.7490 rb:1.0657 dl:735-737 gd:1 +ttp: b524/782 bl:2.8138 bb:1.0515 rl:2.7491 rb:1.0657 dl:732-735 gd:1 +ttp: b523/782 bl:2.8129 bb:1.0564 rl:2.7492 rb:1.0657 dl:730-732 gd:1 +ttp: b522/782 bl:2.8237 bb:1.0854 rl:2.7493 rb:1.0657 dl:727-730 gd:1 +ttp: b521/782 bl:2.7704 bb:1.0512 rl:2.7493 rb:1.0657 dl:725-727 gd:1 +ttp: b520/782 bl:2.7890 bb:1.0570 rl:2.7494 rb:1.0657 dl:723-725 gd:1 +ttp: b519/782 bl:2.7287 bb:1.0348 rl:2.7494 rb:1.0657 dl:720-723 gd:1 +ttp: b518/782 bl:2.7293 bb:1.0512 rl:2.7493 rb:1.0656 dl:717-720 gd:1 +ttp: b517/782 bl:2.7780 bb:1.0515 rl:2.7494 rb:1.0656 dl:715-717 gd:1 +ttp: b516/782 bl:2.8609 bb:1.0771 rl:2.7496 rb:1.0656 dl:713-715 gd:1 +ttp: b515/782 bl:2.7878 bb:1.0745 rl:2.7496 rb:1.0656 dl:710-713 gd:1 +ttp: b514/782 bl:2.9118 bb:1.0982 rl:2.7499 rb:1.0657 dl:707-710 gd:1 +ttp: b513/782 bl:2.7376 bb:1.0133 rl:2.7498 rb:1.0656 dl:705-707 gd:1 +ttp: b512/782 bl:2.7863 bb:1.0578 rl:2.7499 rb:1.0656 dl:703-705 gd:1 +ttp: b511/782 bl:2.7693 bb:1.0459 rl:2.7499 rb:1.0656 dl:700-703 gd:1 +ttp: b510/782 bl:2.7573 bb:1.0198 rl:2.7499 rb:1.0655 dl:698-700 gd:1 +ttp: b509/782 bl:2.7485 bb:1.0696 rl:2.7499 rb:1.0655 dl:695-698 gd:1 +ttp: b508/782 bl:2.7584 bb:1.0308 rl:2.7499 rb:1.0654 dl:693-695 gd:1 +ttp: b507/782 bl:2.7568 bb:1.0407 rl:2.7500 rb:1.0654 dl:690-693 gd:1 +ttp: b506/782 bl:2.8107 bb:1.0767 rl:2.7500 rb:1.0654 dl:688-690 gd:1 +ttp: b505/782 bl:2.7819 bb:1.0629 rl:2.7501 rb:1.0654 dl:686-688 gd:1 +ttp: b504/782 bl:2.8660 bb:1.0982 rl:2.7503 rb:1.0655 dl:685-686 gd:1 +ttp: b503/782 bl:2.8289 bb:1.0773 rl:2.7504 rb:1.0655 dl:683-685 gd:1 +ttp: b502/782 bl:2.8296 bb:1.0625 rl:2.7505 rb:1.0655 dl:680-682 gd:1 +ttp: b501/782 bl:2.7905 bb:1.0395 rl:2.7505 rb:1.0654 dl:677-680 gd:1 +ttp: b500/782 bl:2.8373 bb:1.0838 rl:2.7507 rb:1.0655 dl:675-677 gd:1 +ttp: b499/782 bl:2.7875 bb:1.0520 rl:2.7507 rb:1.0655 dl:673-675 gd:1 +ttp: b498/782 bl:2.6785 bb:1.0369 rl:2.7506 rb:1.0654 dl:671-673 gd:1 +ttp: b497/782 bl:2.8341 bb:1.0806 rl:2.7507 rb:1.0654 dl:668-671 gd:1 +ttp: b496/782 bl:2.8344 bb:1.0505 rl:2.7509 rb:1.0654 dl:666-668 gd:1 +ttp: b495/782 bl:2.7658 bb:1.0560 rl:2.7509 rb:1.0654 dl:664-666 gd:1 +ttp: b494/782 bl:2.7949 bb:1.0536 rl:2.7509 rb:1.0654 dl:661-664 gd:1 +ttp: b493/782 bl:2.8466 bb:1.1163 rl:2.7511 rb:1.0654 dl:659-661 gd:1 +ttp: b492/782 bl:2.8105 bb:1.0569 rl:2.7512 rb:1.0654 dl:657-659 gd:1 +ttp: b491/782 bl:2.7387 bb:1.0319 rl:2.7511 rb:1.0654 dl:655-657 gd:1 +ttp: b490/782 bl:2.8551 bb:1.0910 rl:2.7513 rb:1.0654 dl:653-655 gd:1 +ttp: b489/782 bl:2.7967 bb:1.0814 rl:2.7513 rb:1.0654 dl:651-653 gd:1 +ttp: b488/782 bl:2.8224 bb:1.0518 rl:2.7514 rb:1.0654 dl:649-651 gd:1 +ttp: b487/782 bl:2.8065 bb:1.0722 rl:2.7515 rb:1.0654 dl:647-649 gd:1 +ttp: b486/782 bl:2.7935 bb:1.0603 rl:2.7516 rb:1.0654 dl:645-646 gd:1 +ttp: b485/782 bl:2.7876 bb:1.0491 rl:2.7516 rb:1.0654 dl:643-645 gd:1 +ttp: b484/782 bl:2.8050 bb:1.0705 rl:2.7517 rb:1.0654 dl:641-643 gd:1 +ttp: b483/782 bl:2.7493 bb:1.0514 rl:2.7517 rb:1.0654 dl:639-641 gd:1 +ttp: b482/782 bl:2.7575 bb:1.0822 rl:2.7517 rb:1.0654 dl:637-639 gd:1 +ttp: b481/782 bl:2.7978 bb:1.1000 rl:2.7518 rb:1.0655 dl:635-637 gd:1 +ttp: b480/782 bl:2.7969 bb:1.0559 rl:2.7518 rb:1.0654 dl:632-635 gd:1 +ttp: b479/782 bl:2.7096 bb:1.0342 rl:2.7518 rb:1.0654 dl:630-632 gd:1 +ttp: b478/782 bl:2.7957 bb:1.0528 rl:2.7518 rb:1.0654 dl:628-630 gd:1 +ttp: b477/782 bl:2.7747 bb:1.0541 rl:2.7518 rb:1.0654 dl:626-628 gd:1 +ttp: b476/782 bl:2.7550 bb:1.0522 rl:2.7518 rb:1.0654 dl:624-626 gd:1 +ttp: b475/782 bl:2.7261 bb:1.0220 rl:2.7518 rb:1.0653 dl:622-623 gd:1 +ttp: b474/782 bl:2.7593 bb:1.0521 rl:2.7518 rb:1.0653 dl:620-622 gd:1 +ttp: b473/782 bl:2.8361 bb:1.0791 rl:2.7519 rb:1.0653 dl:618-620 gd:1 +ttp: b472/782 bl:2.8029 bb:1.0715 rl:2.7520 rb:1.0653 dl:616-618 gd:1 +ttp: b471/782 bl:2.8460 bb:1.0723 rl:2.7521 rb:1.0653 dl:614-616 gd:1 +ttp: b470/782 bl:2.8708 bb:1.0967 rl:2.7523 rb:1.0654 dl:611-613 gd:1 +ttp: b469/782 bl:2.8018 bb:1.1141 rl:2.7523 rb:1.0654 dl:610-611 gd:1 +ttp: b468/782 bl:2.7929 bb:1.0602 rl:2.7524 rb:1.0654 dl:608-610 gd:1 +ttp: b467/782 bl:2.7969 bb:1.0566 rl:2.7524 rb:1.0654 dl:606-608 gd:1 +ttp: b466/782 bl:2.8045 bb:1.0662 rl:2.7525 rb:1.0654 dl:604-606 gd:1 +ttp: b465/782 bl:2.8100 bb:1.0600 rl:2.7526 rb:1.0654 dl:602-604 gd:1 +ttp: b464/782 bl:2.7105 bb:1.0742 rl:2.7525 rb:1.0654 dl:600-602 gd:1 +ttp: b463/782 bl:2.8056 bb:1.0770 rl:2.7526 rb:1.0654 dl:599-600 gd:1 +ttp: b462/782 bl:2.8689 bb:1.0689 rl:2.7527 rb:1.0654 dl:597-599 gd:1 +ttp: b461/782 bl:2.7768 bb:1.0589 rl:2.7527 rb:1.0654 dl:595-597 gd:1 +ttp: b460/782 bl:2.7988 bb:1.0616 rl:2.7528 rb:1.0654 dl:593-595 gd:1 +ttp: b459/782 bl:2.7437 bb:1.0411 rl:2.7528 rb:1.0654 dl:591-593 gd:1 +ttp: b458/782 bl:2.8164 bb:1.0676 rl:2.7529 rb:1.0654 dl:589-591 gd:1 +ttp: b457/782 bl:2.7641 bb:1.0495 rl:2.7529 rb:1.0654 dl:587-589 gd:1 +ttp: b456/782 bl:2.8144 bb:1.0688 rl:2.7529 rb:1.0654 dl:586-587 gd:1 +ttp: b455/782 bl:2.8026 bb:1.0747 rl:2.7530 rb:1.0654 dl:584-586 gd:1 +ttp: b454/782 bl:2.8366 bb:1.0741 rl:2.7531 rb:1.0654 dl:582-584 gd:1 +ttp: b453/782 bl:2.7548 bb:1.0571 rl:2.7531 rb:1.0654 dl:580-582 gd:1 +ttp: b452/782 bl:2.7422 bb:1.0578 rl:2.7531 rb:1.0654 dl:579-580 gd:1 +ttp: b451/782 bl:2.7772 bb:1.0639 rl:2.7531 rb:1.0654 dl:576-579 gd:1 +ttp: b450/782 bl:2.7610 bb:1.0305 rl:2.7531 rb:1.0653 dl:575-576 gd:1 +ttp: b449/782 bl:2.7981 bb:1.0532 rl:2.7532 rb:1.0653 dl:573-575 gd:1 +ttp: b448/782 bl:2.7298 bb:1.0371 rl:2.7531 rb:1.0653 dl:571-573 gd:1 +ttp: b447/782 bl:2.8324 bb:1.0892 rl:2.7532 rb:1.0653 dl:569-571 gd:1 +ttp: b446/782 bl:2.8284 bb:1.0917 rl:2.7533 rb:1.0653 dl:568-569 gd:1 +ttp: b445/782 bl:2.7691 bb:1.0647 rl:2.7533 rb:1.0653 dl:566-568 gd:1 +ttp: b444/782 bl:2.6709 bb:1.0120 rl:2.7532 rb:1.0653 dl:564-566 gd:1 +ttp: b443/782 bl:2.7797 bb:1.0588 rl:2.7533 rb:1.0653 dl:562-564 gd:1 +ttp: b442/782 bl:2.8195 bb:1.0589 rl:2.7534 rb:1.0653 dl:560-562 gd:1 +ttp: b441/782 bl:2.7095 bb:1.0430 rl:2.7533 rb:1.0652 dl:559-560 gd:1 +ttp: b440/782 bl:2.8646 bb:1.0937 rl:2.7534 rb:1.0653 dl:556-559 gd:1 +ttp: b439/782 bl:2.7512 bb:1.0427 rl:2.7534 rb:1.0652 dl:555-556 gd:1 +ttp: b438/782 bl:2.7181 bb:1.0575 rl:2.7534 rb:1.0652 dl:553-555 gd:1 +ttp: b437/782 bl:2.8756 bb:1.0611 rl:2.7535 rb:1.0652 dl:551-553 gd:1 +ttp: b436/782 bl:2.8449 bb:1.0673 rl:2.7536 rb:1.0652 dl:549-551 gd:1 +ttp: b435/782 bl:2.7314 bb:1.0517 rl:2.7536 rb:1.0652 dl:547-549 gd:1 +ttp: b434/782 bl:2.7283 bb:1.0425 rl:2.7536 rb:1.0652 dl:545-547 gd:1 +ttp: b433/782 bl:2.7761 bb:1.0654 rl:2.7536 rb:1.0652 dl:544-545 gd:1 +ttp: b432/782 bl:2.7639 bb:1.0515 rl:2.7536 rb:1.0652 dl:542-544 gd:1 +ttp: b431/782 bl:2.7529 bb:1.0626 rl:2.7536 rb:1.0652 dl:540-542 gd:1 +ttp: b430/782 bl:2.7586 bb:1.0473 rl:2.7536 rb:1.0652 dl:539-540 gd:1 +ttp: b429/782 bl:2.7573 bb:1.0821 rl:2.7536 rb:1.0652 dl:537-539 gd:1 +ttp: b428/782 bl:2.8257 bb:1.0691 rl:2.7537 rb:1.0652 dl:535-537 gd:1 +ttp: b427/782 bl:2.7557 bb:1.0647 rl:2.7537 rb:1.0652 dl:533-535 gd:1 +ttp: b426/782 bl:2.7283 bb:1.0677 rl:2.7537 rb:1.0652 dl:532-533 gd:1 +ttp: b425/782 bl:2.7629 bb:1.0512 rl:2.7537 rb:1.0652 dl:530-532 gd:1 +ttp: b424/782 bl:2.7928 bb:1.0794 rl:2.7537 rb:1.0652 dl:528-530 gd:1 +ttp: b423/782 bl:2.7456 bb:1.0312 rl:2.7537 rb:1.0651 dl:526-528 gd:1 +ttp: b422/782 bl:2.7300 bb:1.0410 rl:2.7537 rb:1.0651 dl:524-526 gd:1 +ttp: b421/782 bl:2.7907 bb:1.0527 rl:2.7537 rb:1.0651 dl:523-524 gd:1 +ttp: b420/782 bl:2.7794 bb:1.0585 rl:2.7537 rb:1.0651 dl:521-522 gd:1 +ttp: b419/782 bl:2.8027 bb:1.0416 rl:2.7538 rb:1.0651 dl:519-521 gd:1 +ttp: b418/782 bl:2.8136 bb:1.0732 rl:2.7538 rb:1.0651 dl:517-519 gd:1 +ttp: b417/782 bl:2.8188 bb:1.0571 rl:2.7539 rb:1.0651 dl:516-517 gd:1 +ttp: b416/782 bl:2.7616 bb:1.0366 rl:2.7539 rb:1.0651 dl:514-516 gd:1 +ttp: b415/782 bl:2.8508 bb:1.0831 rl:2.7540 rb:1.0651 dl:513-514 gd:1 +ttp: b414/782 bl:2.8172 bb:1.0860 rl:2.7541 rb:1.0651 dl:511-513 gd:1 +ttp: b413/782 bl:2.6495 bb:0.9990 rl:2.7540 rb:1.0650 dl:510-511 gd:1 +ttp: b412/782 bl:2.7056 bb:1.0508 rl:2.7539 rb:1.0650 dl:508-510 gd:1 +ttp: b411/782 bl:2.8205 bb:1.0753 rl:2.7540 rb:1.0650 dl:507-508 gd:1 +ttp: b410/782 bl:2.7775 bb:1.0547 rl:2.7540 rb:1.0650 dl:505-507 gd:1 +ttp: b409/782 bl:2.7116 bb:1.0476 rl:2.7540 rb:1.0650 dl:503-505 gd:1 +ttp: b408/782 bl:2.8378 bb:1.0855 rl:2.7541 rb:1.0650 dl:501-503 gd:1 +ttp: b407/782 bl:2.7793 bb:1.0580 rl:2.7541 rb:1.0650 dl:500-501 gd:1 +ttp: b406/782 bl:2.8389 bb:1.1053 rl:2.7542 rb:1.0650 dl:498-500 gd:1 +ttp: b405/782 bl:2.8244 bb:1.0675 rl:2.7542 rb:1.0650 dl:497-498 gd:1 +ttp: b404/782 bl:2.7820 bb:1.0675 rl:2.7542 rb:1.0650 dl:495-497 gd:1 +ttp: b403/782 bl:2.8173 bb:1.0528 rl:2.7543 rb:1.0650 dl:493-495 gd:1 +ttp: b402/782 bl:2.7533 bb:1.0374 rl:2.7543 rb:1.0650 dl:492-493 gd:1 +ttp: b401/782 bl:2.7409 bb:1.0610 rl:2.7543 rb:1.0650 dl:490-492 gd:1 +ttp: b400/782 bl:2.7936 bb:1.0656 rl:2.7543 rb:1.0650 dl:489-490 gd:1 +ttp: b399/782 bl:2.7467 bb:1.0404 rl:2.7543 rb:1.0650 dl:487-489 gd:1 +ttp: b398/782 bl:2.8817 bb:1.0945 rl:2.7544 rb:1.0650 dl:486-487 gd:1 +ttp: b397/782 bl:2.8948 bb:1.0997 rl:2.7546 rb:1.0650 dl:484-486 gd:1 +ttp: b396/782 bl:2.7543 bb:1.0540 rl:2.7546 rb:1.0650 dl:482-484 gd:1 +ttp: b395/782 bl:2.7367 bb:1.0450 rl:2.7546 rb:1.0650 dl:481-482 gd:1 +ttp: b394/782 bl:2.9003 bb:1.1185 rl:2.7547 rb:1.0651 dl:479-481 gd:1 +ttp: b393/782 bl:2.8592 bb:1.0889 rl:2.7548 rb:1.0651 dl:478-479 gd:1 +ttp: b392/782 bl:2.8014 bb:1.0817 rl:2.7548 rb:1.0651 dl:476-478 gd:1 +ttp: b391/782 bl:2.8124 bb:1.0953 rl:2.7549 rb:1.0651 dl:475-476 gd:1 +ttp: b390/782 bl:2.8129 bb:1.0911 rl:2.7549 rb:1.0651 dl:473-475 gd:1 +ttp: b389/782 bl:2.7960 bb:1.0650 rl:2.7550 rb:1.0651 dl:471-473 gd:1 +ttp: b388/782 bl:2.7880 bb:1.0697 rl:2.7550 rb:1.0652 dl:470-471 gd:1 +ttp: b387/782 bl:2.8365 bb:1.0737 rl:2.7551 rb:1.0652 dl:468-470 gd:1 +ttp: b386/782 bl:2.7256 bb:1.0648 rl:2.7550 rb:1.0652 dl:467-468 gd:1 +ttp: b385/782 bl:2.8900 bb:1.1012 rl:2.7551 rb:1.0652 dl:466-467 gd:1 +ttp: b384/782 bl:2.8464 bb:1.0920 rl:2.7552 rb:1.0652 dl:464-466 gd:1 +ttp: b383/782 bl:2.8362 bb:1.0862 rl:2.7553 rb:1.0652 dl:463-464 gd:1 +ttp: b382/782 bl:2.9131 bb:1.1340 rl:2.7554 rb:1.0653 dl:461-463 gd:1 +ttp: b381/782 bl:2.9060 bb:1.0912 rl:2.7556 rb:1.0653 dl:460-461 gd:1 +ttp: b380/782 bl:2.8478 bb:1.0783 rl:2.7556 rb:1.0653 dl:459-460 gd:1 +ttp: b379/782 bl:2.7662 bb:1.0592 rl:2.7556 rb:1.0653 dl:457-459 gd:1 +ttp: b378/782 bl:2.8231 bb:1.0984 rl:2.7557 rb:1.0653 dl:456-457 gd:1 +ttp: b377/782 bl:2.8022 bb:1.0865 rl:2.7557 rb:1.0654 dl:454-455 gd:1 +ttp: b376/782 bl:2.7201 bb:1.0446 rl:2.7557 rb:1.0653 dl:453-454 gd:1 +ttp: b375/782 bl:2.8128 bb:1.1085 rl:2.7558 rb:1.0654 dl:452-453 gd:1 +ttp: b374/782 bl:2.7493 bb:1.0683 rl:2.7558 rb:1.0654 dl:450-452 gd:1 +ttp: b373/782 bl:2.7606 bb:1.0775 rl:2.7558 rb:1.0654 dl:449-450 gd:1 +ttp: b372/782 bl:2.8396 bb:1.0705 rl:2.7558 rb:1.0654 dl:447-449 gd:1 +ttp: b371/782 bl:2.8029 bb:1.0713 rl:2.7559 rb:1.0654 dl:446-447 gd:1 +ttp: b370/782 bl:2.6803 bb:1.0428 rl:2.7558 rb:1.0654 dl:444-446 gd:1 +ttp: b369/782 bl:2.9294 bb:1.0874 rl:2.7559 rb:1.0654 dl:443-444 gd:1 +ttp: b368/782 bl:2.8534 bb:1.0887 rl:2.7560 rb:1.0654 dl:441-443 gd:1 +ttp: b367/782 bl:2.8338 bb:1.0643 rl:2.7561 rb:1.0654 dl:440-441 gd:1 +ttp: b366/782 bl:2.8812 bb:1.1279 rl:2.7562 rb:1.0655 dl:439-440 gd:1 +ttp: b365/782 bl:2.7747 bb:1.0814 rl:2.7562 rb:1.0655 dl:437-439 gd:1 +ttp: b364/782 bl:2.7488 bb:1.0722 rl:2.7562 rb:1.0655 dl:436-437 gd:1 +ttp: b363/782 bl:2.7438 bb:1.0941 rl:2.7562 rb:1.0655 dl:434-436 gd:1 +ttp: b362/782 bl:2.8172 bb:1.0652 rl:2.7562 rb:1.0655 dl:433-434 gd:1 +ttp: b361/782 bl:2.8189 bb:1.0778 rl:2.7563 rb:1.0655 dl:432-433 gd:1 +ttp: b360/782 bl:2.8416 bb:1.0836 rl:2.7564 rb:1.0655 dl:430-432 gd:1 +ttp: b359/782 bl:2.7987 bb:1.0817 rl:2.7564 rb:1.0656 dl:429-430 gd:1 +ttp: b358/782 bl:2.8234 bb:1.0913 rl:2.7564 rb:1.0656 dl:427-429 gd:1 +ttp: b357/782 bl:2.8594 bb:1.0819 rl:2.7565 rb:1.0656 dl:426-427 gd:1 +ttp: b356/782 bl:2.6937 bb:1.0466 rl:2.7565 rb:1.0656 dl:424-426 gd:1 +ttp: b355/782 bl:2.7071 bb:1.0667 rl:2.7564 rb:1.0656 dl:423-424 gd:1 +ttp: b354/782 bl:2.7895 bb:1.0822 rl:2.7565 rb:1.0656 dl:422-423 gd:1 +ttp: b353/782 bl:2.8033 bb:1.0985 rl:2.7565 rb:1.0656 dl:420-422 gd:1 +ttp: b352/782 bl:2.7579 bb:1.0963 rl:2.7565 rb:1.0656 dl:419-420 gd:1 +ttp: b351/782 bl:2.8420 bb:1.0938 rl:2.7566 rb:1.0657 dl:418-419 gd:1 +ttp: b350/782 bl:2.7386 bb:1.0622 rl:2.7565 rb:1.0656 dl:417-418 gd:1 +ttp: b349/782 bl:2.9113 bb:1.1062 rl:2.7567 rb:1.0657 dl:415-417 gd:1 +ttp: b348/782 bl:2.8071 bb:1.0668 rl:2.7567 rb:1.0657 dl:414-415 gd:1 +ttp: b347/782 bl:2.8671 bb:1.0928 rl:2.7568 rb:1.0657 dl:413-414 gd:1 +ttp: b346/782 bl:2.8515 bb:1.0882 rl:2.7569 rb:1.0657 dl:412-413 gd:1 +ttp: b345/782 bl:2.8700 bb:1.1130 rl:2.7569 rb:1.0658 dl:410-412 gd:1 +ttp: b344/782 bl:2.8924 bb:1.1087 rl:2.7570 rb:1.0658 dl:408-410 gd:1 +ttp: b343/782 bl:2.8058 bb:1.0706 rl:2.7571 rb:1.0658 dl:407-408 gd:1 +ttp: b342/782 bl:2.8709 bb:1.1045 rl:2.7572 rb:1.0658 dl:406-407 gd:1 +ttp: b341/782 bl:2.8757 bb:1.1009 rl:2.7572 rb:1.0658 dl:404-406 gd:1 +ttp: b340/782 bl:2.8220 bb:1.0916 rl:2.7573 rb:1.0659 dl:403-404 gd:1 +ttp: b339/782 bl:2.8280 bb:1.0739 rl:2.7573 rb:1.0659 dl:402-403 gd:1 +ttp: b338/782 bl:2.8456 bb:1.1099 rl:2.7574 rb:1.0659 dl:400-402 gd:1 +ttp: b337/782 bl:2.8318 bb:1.0782 rl:2.7575 rb:1.0659 dl:399-400 gd:1 +ttp: b336/782 bl:2.9486 bb:1.1652 rl:2.7576 rb:1.0660 dl:398-399 gd:1 +ttp: b335/782 bl:2.7186 bb:1.0896 rl:2.7576 rb:1.0660 dl:396-398 gd:1 +ttp: b334/782 bl:2.8659 bb:1.1028 rl:2.7576 rb:1.0660 dl:395-396 gd:1 +ttp: b333/782 bl:2.9032 bb:1.1307 rl:2.7577 rb:1.0661 dl:394-395 gd:1 +ttp: b332/782 bl:2.8230 bb:1.0962 rl:2.7578 rb:1.0661 dl:393-394 gd:1 +ttp: b331/782 bl:2.7839 bb:1.0687 rl:2.7578 rb:1.0661 dl:392-393 gd:1 +ttp: b330/782 bl:2.8712 bb:1.0946 rl:2.7579 rb:1.0661 dl:390-392 gd:1 +ttp: b329/782 bl:2.8372 bb:1.1067 rl:2.7579 rb:1.0661 dl:389-390 gd:1 +ttp: b328/782 bl:2.7917 bb:1.0825 rl:2.7580 rb:1.0661 dl:388-389 gd:1 +ttp: b327/782 bl:2.7814 bb:1.0798 rl:2.7580 rb:1.0662 dl:387-388 gd:1 +ttp: b326/782 bl:2.8551 bb:1.1284 rl:2.7580 rb:1.0662 dl:385-387 gd:1 +ttp: b325/782 bl:2.8558 bb:1.0970 rl:2.7581 rb:1.0662 dl:384-385 gd:1 +ttp: b324/782 bl:2.7718 bb:1.0572 rl:2.7581 rb:1.0662 dl:382-384 gd:1 +ttp: b323/782 bl:2.8156 bb:1.0473 rl:2.7582 rb:1.0662 dl:381-382 gd:1 +ttp: b322/782 bl:2.7625 bb:1.0795 rl:2.7582 rb:1.0662 dl:380-381 gd:1 +ttp: b321/782 bl:2.8085 bb:1.1036 rl:2.7582 rb:1.0662 dl:378-380 gd:1 +ttp: b320/782 bl:2.7587 bb:1.0762 rl:2.7582 rb:1.0662 dl:377-378 gd:1 +ttp: b319/782 bl:2.8348 bb:1.1121 rl:2.7582 rb:1.0663 dl:376-377 gd:1 +ttp: b318/782 bl:2.8085 bb:1.0653 rl:2.7583 rb:1.0663 dl:374-376 gd:1 +ttp: b317/782 bl:2.8779 bb:1.1125 rl:2.7584 rb:1.0663 dl:373-374 gd:1 +ttp: b316/782 bl:2.7824 bb:1.0942 rl:2.7584 rb:1.0663 dl:371-373 gd:1 +ttp: b315/782 bl:2.7140 bb:1.0671 rl:2.7583 rb:1.0663 dl:370-371 gd:1 +ttp: b314/782 bl:2.8063 bb:1.0666 rl:2.7584 rb:1.0663 dl:369-370 gd:1 +ttp: b313/782 bl:2.8282 bb:1.0892 rl:2.7584 rb:1.0663 dl:368-369 gd:1 +ttp: b312/782 bl:2.7382 bb:1.0689 rl:2.7584 rb:1.0663 dl:367-368 gd:1 +ttp: b311/782 bl:2.8565 bb:1.0943 rl:2.7585 rb:1.0664 dl:365-367 gd:1 +ttp: b310/782 bl:2.7978 bb:1.0839 rl:2.7585 rb:1.0664 dl:364-365 gd:1 +ttp: b309/782 bl:2.8314 bb:1.1047 rl:2.7585 rb:1.0664 dl:363-364 gd:1 +ttp: b308/782 bl:2.7998 bb:1.0877 rl:2.7586 rb:1.0664 dl:362-363 gd:1 +ttp: b307/782 bl:2.9003 bb:1.1087 rl:2.7587 rb:1.0664 dl:361-362 gd:1 +ttp: b306/782 bl:2.8803 bb:1.1395 rl:2.7587 rb:1.0665 dl:359-361 gd:1 +ttp: b305/782 bl:2.8613 bb:1.0855 rl:2.7588 rb:1.0665 dl:358-359 gd:1 +ttp: b304/782 bl:2.9041 bb:1.1310 rl:2.7589 rb:1.0665 dl:357-358 gd:1 +ttp: b303/782 bl:2.8110 bb:1.0890 rl:2.7589 rb:1.0665 dl:355-357 gd:1 +ttp: b302/782 bl:2.8363 bb:1.1000 rl:2.7590 rb:1.0666 dl:354-355 gd:1 +ttp: b301/782 bl:2.7948 bb:1.0868 rl:2.7590 rb:1.0666 dl:353-354 gd:1 +ttp: b300/782 bl:2.8600 bb:1.0901 rl:2.7591 rb:1.0666 dl:352-353 gd:1 +ttp: b299/782 bl:2.9039 bb:1.1260 rl:2.7591 rb:1.0666 dl:351-352 gd:1 +ttp: b298/782 bl:2.8486 bb:1.1026 rl:2.7592 rb:1.0666 dl:349-351 gd:1 +ttp: b297/782 bl:2.7993 bb:1.0605 rl:2.7592 rb:1.0666 dl:348-349 gd:1 +ttp: b296/782 bl:2.8094 bb:1.0865 rl:2.7593 rb:1.0667 dl:347-348 gd:1 +ttp: b295/782 bl:2.8421 bb:1.1206 rl:2.7593 rb:1.0667 dl:345-347 gd:1 +ttp: b293/782 bl:2.7669 bb:1.0689 rl:2.7593 rb:1.0667 dl:343-345 gd:1 +ttp: b294/782 bl:2.8472 bb:1.1010 rl:2.7594 rb:1.0667 dl:345-345 gd:1 +ttp: b292/782 bl:2.7940 bb:1.0826 rl:2.7594 rb:1.0667 dl:342-343 gd:1 +ttp: b291/782 bl:2.9564 bb:1.1161 rl:2.7595 rb:1.0667 dl:341-342 gd:1 +ttp: b290/782 bl:2.8717 bb:1.0880 rl:2.7596 rb:1.0668 dl:340-341 gd:1 +ttp: b289/782 bl:2.8391 bb:1.1239 rl:2.7596 rb:1.0668 dl:339-340 gd:1 +ttp: b288/782 bl:2.8168 bb:1.1059 rl:2.7596 rb:1.0668 dl:337-339 gd:1 +ttp: b287/782 bl:2.8595 bb:1.1155 rl:2.7597 rb:1.0668 dl:336-337 gd:1 +ttp: b286/782 bl:2.8982 bb:1.1009 rl:2.7598 rb:1.0669 dl:335-336 gd:1 +ttp: b285/782 bl:2.8835 bb:1.1285 rl:2.7599 rb:1.0669 dl:334-335 gd:1 +ttp: b284/782 bl:2.8808 bb:1.0861 rl:2.7599 rb:1.0669 dl:333-334 gd:1 +ttp: b283/782 bl:2.7990 bb:1.0737 rl:2.7600 rb:1.0669 dl:332-333 gd:1 +ttp: b282/782 bl:2.8265 bb:1.1256 rl:2.7600 rb:1.0669 dl:331-332 gd:1 +ttp: b281/782 bl:2.9300 bb:1.1552 rl:2.7601 rb:1.0670 dl:329-330 gd:1 +ttp: b279/782 bl:2.8614 bb:1.0937 rl:2.7601 rb:1.0670 dl:327-329 gd:1 +ttp: b280/782 bl:2.8110 bb:1.0910 rl:2.7602 rb:1.0670 dl:329-329 gd:1 +ttp: b278/782 bl:2.8930 bb:1.1407 rl:2.7602 rb:1.0671 dl:326-327 gd:1 +ttp: b277/782 bl:2.8108 bb:1.1070 rl:2.7603 rb:1.0671 dl:325-326 gd:1 +ttp: b276/782 bl:2.8533 bb:1.1059 rl:2.7603 rb:1.0671 dl:324-325 gd:1 +ttp: b275/782 bl:2.7604 bb:1.0676 rl:2.7603 rb:1.0671 dl:323-324 gd:1 +ttp: b274/782 bl:2.8125 bb:1.0919 rl:2.7604 rb:1.0671 dl:322-323 gd:1 +ttp: b273/782 bl:2.7762 bb:1.0634 rl:2.7604 rb:1.0671 dl:321-322 gd:1 +ttp: b272/782 bl:2.8637 bb:1.1109 rl:2.7604 rb:1.0671 dl:320-321 gd:1 +ttp: b271/782 bl:2.7777 bb:1.0705 rl:2.7604 rb:1.0671 dl:319-320 gd:1 +ttp: b270/782 bl:2.7752 bb:1.0891 rl:2.7604 rb:1.0672 dl:318-319 gd:1 +ttp: b269/782 bl:2.9264 bb:1.1280 rl:2.7605 rb:1.0672 dl:316-318 gd:1 +ttp: b268/782 bl:2.8697 bb:1.1033 rl:2.7606 rb:1.0672 dl:315-316 gd:1 +ttp: b267/782 bl:2.8605 bb:1.0968 rl:2.7606 rb:1.0672 dl:314-315 gd:1 +ttp: b266/782 bl:2.8550 bb:1.0979 rl:2.7607 rb:1.0672 dl:313-314 gd:1 +ttp: b265/782 bl:2.8429 bb:1.0943 rl:2.7607 rb:1.0673 dl:312-313 gd:1 +ttp: b264/782 bl:2.8971 bb:1.1467 rl:2.7608 rb:1.0673 dl:311-312 gd:1 +ttp: b263/782 bl:2.8262 bb:1.1008 rl:2.7608 rb:1.0673 dl:310-311 gd:1 +ttp: b262/782 bl:2.8670 bb:1.1195 rl:2.7609 rb:1.0673 dl:309-310 gd:1 +ttp: b261/782 bl:2.8686 bb:1.1220 rl:2.7610 rb:1.0674 dl:308-309 gd:1 +ttp: b260/782 bl:2.8308 bb:1.1042 rl:2.7610 rb:1.0674 dl:306-307 gd:1 +ttp: b259/782 bl:2.8658 bb:1.1429 rl:2.7610 rb:1.0674 dl:305-306 gd:1 +ttp: b258/782 bl:2.9584 bb:1.1666 rl:2.7612 rb:1.0675 dl:304-305 gd:1 +ttp: b257/782 bl:2.9248 bb:1.1136 rl:2.7612 rb:1.0675 dl:302-304 gd:1 +ttp: b256/782 bl:2.8950 bb:1.1349 rl:2.7613 rb:1.0675 dl:301-302 gd:1 +ttp: b255/782 bl:2.8599 bb:1.1286 rl:2.7614 rb:1.0676 dl:300-301 gd:1 +ttp: b254/782 bl:2.9015 bb:1.1429 rl:2.7614 rb:1.0676 dl:299-300 gd:1 +ttp: b253/782 bl:2.7563 bb:1.0824 rl:2.7614 rb:1.0676 dl:298-299 gd:1 +ttp: b252/782 bl:2.8977 bb:1.1291 rl:2.7615 rb:1.0676 dl:297-298 gd:1 +ttp: b251/782 bl:2.8788 bb:1.1103 rl:2.7616 rb:1.0677 dl:296-297 gd:1 +ttp: b250/782 bl:2.8823 bb:1.1454 rl:2.7616 rb:1.0677 dl:295-296 gd:1 +ttp: b249/782 bl:2.8906 bb:1.1513 rl:2.7617 rb:1.0677 dl:294-295 gd:1 +ttp: b248/782 bl:2.8900 bb:1.1029 rl:2.7617 rb:1.0678 dl:293-294 gd:1 +ttp: b247/782 bl:2.7905 bb:1.0782 rl:2.7618 rb:1.0678 dl:292-293 gd:1 +ttp: b246/782 bl:2.9043 bb:1.1376 rl:2.7618 rb:1.0678 dl:291-292 gd:1 +ttp: b245/782 bl:2.8774 bb:1.1049 rl:2.7619 rb:1.0678 dl:290-291 gd:1 +ttp: b244/782 bl:2.9522 bb:1.1579 rl:2.7620 rb:1.0679 dl:289-290 gd:1 +ttp: b243/782 bl:2.8266 bb:1.1024 rl:2.7620 rb:1.0679 dl:288-289 gd:1 +ttp: b242/782 bl:2.9060 bb:1.1111 rl:2.7621 rb:1.0679 dl:287-288 gd:1 +ttp: b241/782 bl:2.9111 bb:1.1277 rl:2.7621 rb:1.0679 dl:286-287 gd:1 +ttp: b240/782 bl:2.9088 bb:1.1545 rl:2.7622 rb:1.0680 dl:285-286 gd:1 +ttp: b239/782 bl:2.8783 bb:1.1288 rl:2.7623 rb:1.0680 dl:284-285 gd:1 +ttp: b238/782 bl:2.8880 bb:1.1456 rl:2.7623 rb:1.0680 dl:283-284 gd:1 +ttp: b237/782 bl:2.9175 bb:1.1470 rl:2.7624 rb:1.0681 dl:282-283 gd:1 +ttp: b236/782 bl:2.8528 bb:1.1088 rl:2.7625 rb:1.0681 dl:281-282 gd:1 +ttp: b235/782 bl:2.9332 bb:1.1149 rl:2.7625 rb:1.0681 dl:280-281 gd:1 +ttp: b234/782 bl:2.9200 bb:1.1575 rl:2.7626 rb:1.0682 dl:279-280 gd:1 +ttp: b233/782 bl:2.8599 bb:1.1237 rl:2.7627 rb:1.0682 dl:278-279 gd:1 +ttp: b232/782 bl:2.9362 bb:1.1357 rl:2.7627 rb:1.0682 dl:277-278 gd:1 +ttp: b231/782 bl:2.8301 bb:1.1038 rl:2.7628 rb:1.0682 dl:276-277 gd:1 +ttp: b230/782 bl:2.9087 bb:1.1131 rl:2.7628 rb:1.0683 dl:275-276 gd:1 +ttp: b229/782 bl:2.9004 bb:1.1409 rl:2.7629 rb:1.0683 dl:274-275 gd:1 +ttp: b228/782 bl:2.8762 bb:1.1382 rl:2.7629 rb:1.0683 dl:273-274 gd:1 +ttp: b227/782 bl:2.8052 bb:1.0879 rl:2.7630 rb:1.0683 dl:272-273 gd:1 +ttp: b226/782 bl:2.9496 bb:1.1475 rl:2.7631 rb:1.0684 dl:271-272 gd:1 +ttp: b225/782 bl:2.8845 bb:1.1234 rl:2.7631 rb:1.0684 dl:270-271 gd:1 +ttp: b224/782 bl:2.8168 bb:1.1065 rl:2.7631 rb:1.0684 dl:269-270 gd:1 +ttp: b223/782 bl:2.8263 bb:1.0881 rl:2.7632 rb:1.0684 dl:268-269 gd:1 +ttp: b222/782 bl:2.8752 bb:1.1170 rl:2.7632 rb:1.0684 dl:267-268 gd:1 +ttp: b221/782 bl:2.8477 bb:1.1429 rl:2.7632 rb:1.0685 dl:266-267 gd:1 +ttp: b220/782 bl:2.8663 bb:1.1092 rl:2.7633 rb:1.0685 dl:265-266 gd:1 +ttp: b219/782 bl:2.9094 bb:1.1351 rl:2.7634 rb:1.0685 dl:264-265 gd:1 +ttp: b218/782 bl:2.7365 bb:1.1004 rl:2.7633 rb:1.0685 dl:263-264 gd:1 +ttp: b217/782 bl:2.8851 bb:1.1301 rl:2.7634 rb:1.0686 dl:262-263 gd:1 +ttp: b216/782 bl:2.9336 bb:1.1163 rl:2.7635 rb:1.0686 dl:261-262 gd:1 +ttp: b215/782 bl:2.8528 bb:1.1447 rl:2.7635 rb:1.0686 dl:260-261 gd:1 +ttp: b214/782 bl:2.9384 bb:1.1304 rl:2.7636 rb:1.0686 dl:259-260 gd:1 +ttp: b213/782 bl:3.0137 bb:1.1759 rl:2.7637 rb:1.0687 dl:258-259 gd:1 +ttp: b212/782 bl:2.9336 bb:1.1482 rl:2.7638 rb:1.0687 dl:257-258 gd:1 +ttp: b211/782 bl:2.8921 bb:1.1522 rl:2.7638 rb:1.0687 dl:256-257 gd:1 +ttp: b210/782 bl:2.8564 bb:1.1241 rl:2.7639 rb:1.0688 dl:255-256 gd:1 +ttp: b209/782 bl:2.9176 bb:1.1550 rl:2.7639 rb:1.0688 dl:254-255 gd:1 +ttp: b207/782 bl:2.8449 bb:1.1191 rl:2.7640 rb:1.0688 dl:253-254 gd:1 +ttp: b208/782 bl:2.8323 bb:1.1183 rl:2.7640 rb:1.0688 dl:254-254 gd:1 +ttp: b206/782 bl:2.8875 bb:1.1177 rl:2.7640 rb:1.0689 dl:252-253 gd:1 +ttp: b205/782 bl:2.8452 bb:1.1101 rl:2.7641 rb:1.0689 dl:251-252 gd:1 +ttp: b204/782 bl:2.9148 bb:1.1339 rl:2.7641 rb:1.0689 dl:250-251 gd:1 +ttp: b203/782 bl:2.7795 bb:1.0919 rl:2.7641 rb:1.0689 dl:249-250 gd:1 +ttp: b202/782 bl:2.8633 bb:1.1318 rl:2.7642 rb:1.0689 dl:248-249 gd:1 +ttp: b201/782 bl:2.8735 bb:1.1200 rl:2.7642 rb:1.0690 dl:247-248 gd:1 +ttp: b199/782 bl:2.9487 bb:1.1302 rl:2.7643 rb:1.0690 dl:246-247 gd:1 +ttp: b200/782 bl:2.8489 bb:1.0950 rl:2.7643 rb:1.0690 dl:247-247 gd:1 +ttp: b198/782 bl:2.9832 bb:1.1537 rl:2.7644 rb:1.0690 dl:245-246 gd:1 +ttp: b197/782 bl:2.8555 bb:1.1261 rl:2.7645 rb:1.0691 dl:244-245 gd:1 +ttp: b196/782 bl:2.9175 bb:1.1690 rl:2.7645 rb:1.0691 dl:243-244 gd:1 +ttp: b195/782 bl:2.8562 bb:1.1179 rl:2.7646 rb:1.0691 dl:242-243 gd:1 +ttp: b194/782 bl:2.9039 bb:1.1021 rl:2.7646 rb:1.0691 dl:241-242 gd:1 +ttp: b193/782 bl:2.8890 bb:1.1640 rl:2.7647 rb:1.0692 dl:240-241 gd:1 +ttp: b192/782 bl:2.9123 bb:1.1479 rl:2.7647 rb:1.0692 dl:239-240 gd:1 +ttp: b191/782 bl:2.9383 bb:1.1474 rl:2.7648 rb:1.0692 dl:238-239 gd:1 +ttp: b190/782 bl:2.8806 bb:1.0950 rl:2.7648 rb:1.0692 dl:237-238 gd:1 +ttp: b188/782 bl:2.9065 bb:1.1514 rl:2.7649 rb:1.0693 dl:236-237 gd:1 +ttp: b189/782 bl:2.9636 bb:1.2028 rl:2.7650 rb:1.0693 dl:237-237 gd:1 +ttp: b187/782 bl:2.9038 bb:1.1197 rl:2.7650 rb:1.0693 dl:235-236 gd:1 +ttp: b186/782 bl:2.9404 bb:1.1748 rl:2.7651 rb:1.0694 dl:234-235 gd:1 +ttp: b185/782 bl:2.8631 bb:1.1238 rl:2.7651 rb:1.0694 dl:233-234 gd:1 +ttp: b184/782 bl:2.9037 bb:1.1530 rl:2.7652 rb:1.0694 dl:232-233 gd:1 +ttp: b183/782 bl:2.8678 bb:1.1447 rl:2.7652 rb:1.0695 dl:231-232 gd:1 +ttp: b182/782 bl:2.8507 bb:1.1340 rl:2.7653 rb:1.0695 dl:230-231 gd:1 +ttp: b180/782 bl:2.9126 bb:1.1358 rl:2.7653 rb:1.0695 dl:229-230 gd:1 +ttp: b181/782 bl:2.8875 bb:1.1602 rl:2.7654 rb:1.0695 dl:230-230 gd:1 +ttp: b179/782 bl:2.9512 bb:1.1528 rl:2.7654 rb:1.0696 dl:228-229 gd:1 +ttp: b178/782 bl:2.8540 bb:1.1383 rl:2.7655 rb:1.0696 dl:227-228 gd:1 +ttp: b177/782 bl:2.9339 bb:1.1512 rl:2.7655 rb:1.0696 dl:226-227 gd:1 +ttp: b176/782 bl:2.8232 bb:1.1076 rl:2.7655 rb:1.0696 dl:225-226 gd:1 +ttp: b175/782 bl:2.8435 bb:1.1146 rl:2.7656 rb:1.0697 dl:225-225 gd:1 +ttp: b173/782 bl:2.9627 bb:1.1518 rl:2.7656 rb:1.0697 dl:223-224 gd:1 +ttp: b174/782 bl:2.9722 bb:1.1539 rl:2.7657 rb:1.0697 dl:224-224 gd:1 +ttp: b172/782 bl:3.0149 bb:1.1857 rl:2.7658 rb:1.0698 dl:222-223 gd:1 +ttp: b171/782 bl:2.8916 bb:1.1122 rl:2.7659 rb:1.0698 dl:221-222 gd:1 +ttp: b170/782 bl:2.9972 bb:1.1720 rl:2.7659 rb:1.0698 dl:220-221 gd:1 +ttp: b169/782 bl:2.9078 bb:1.1618 rl:2.7660 rb:1.0698 dl:219-220 gd:1 +ttp: b168/782 bl:2.9271 bb:1.1471 rl:2.7661 rb:1.0699 dl:218-219 gd:1 +ttp: b166/782 bl:2.9661 bb:1.1435 rl:2.7661 rb:1.0699 dl:217-218 gd:1 +ttp: b167/782 bl:2.9621 bb:1.1841 rl:2.7662 rb:1.0699 dl:218-218 gd:1 +ttp: b165/782 bl:2.9359 bb:1.1617 rl:2.7663 rb:1.0700 dl:216-217 gd:1 +ttp: b164/782 bl:2.9638 bb:1.1461 rl:2.7663 rb:1.0700 dl:215-216 gd:1 +ttp: b163/782 bl:2.8795 bb:1.1302 rl:2.7664 rb:1.0700 dl:214-215 gd:1 +ttp: b162/782 bl:2.9684 bb:1.1518 rl:2.7664 rb:1.0700 dl:213-214 gd:1 +ttp: b161/782 bl:2.9610 bb:1.1783 rl:2.7665 rb:1.0701 dl:212-213 gd:1 +ttp: b159/782 bl:2.9999 bb:1.1818 rl:2.7666 rb:1.0701 dl:211-212 gd:1 +ttp: b160/782 bl:2.8733 bb:1.1293 rl:2.7666 rb:1.0701 dl:212-212 gd:1 +ttp: b158/782 bl:2.8861 bb:1.1425 rl:2.7667 rb:1.0702 dl:210-211 gd:1 +ttp: b157/782 bl:2.8250 bb:1.1135 rl:2.7667 rb:1.0702 dl:209-210 gd:1 +ttp: b156/782 bl:2.8976 bb:1.1111 rl:2.7667 rb:1.0702 dl:208-209 gd:1 +ttp: b155/782 bl:2.8807 bb:1.1322 rl:2.7668 rb:1.0702 dl:207-208 gd:1 +ttp: b153/782 bl:3.0185 bb:1.1644 rl:2.7668 rb:1.0702 dl:206-207 gd:1 +ttp: b154/782 bl:2.9928 bb:1.1584 rl:2.7669 rb:1.0703 dl:207-207 gd:1 +ttp: b152/782 bl:2.8908 bb:1.1279 rl:2.7670 rb:1.0703 dl:205-206 gd:1 +ttp: b151/782 bl:2.7967 bb:1.1022 rl:2.7670 rb:1.0703 dl:204-205 gd:1 +ttp: b149/782 bl:2.9727 bb:1.1720 rl:2.7670 rb:1.0703 dl:203-204 gd:1 +ttp: b150/782 bl:2.9524 bb:1.1605 rl:2.7671 rb:1.0704 dl:204-204 gd:1 +ttp: b148/782 bl:2.9861 bb:1.1606 rl:2.7672 rb:1.0704 dl:202-203 gd:1 +ttp: b147/782 bl:2.9252 bb:1.1573 rl:2.7672 rb:1.0704 dl:201-202 gd:1 +ttp: b146/782 bl:2.9026 bb:1.1523 rl:2.7673 rb:1.0704 dl:200-201 gd:1 +ttp: b144/782 bl:2.8337 bb:1.1271 rl:2.7673 rb:1.0705 dl:199-200 gd:1 +ttp: b145/782 bl:2.8897 bb:1.1338 rl:2.7673 rb:1.0705 dl:200-200 gd:1 +ttp: b143/782 bl:3.0285 bb:1.1996 rl:2.7674 rb:1.0705 dl:198-199 gd:1 +ttp: b142/782 bl:2.9663 bb:1.1629 rl:2.7675 rb:1.0706 dl:197-198 gd:1 +ttp: b141/782 bl:2.9042 bb:1.1449 rl:2.7675 rb:1.0706 dl:196-197 gd:1 +ttp: b140/782 bl:2.9616 bb:1.1694 rl:2.7676 rb:1.0706 dl:195-196 gd:1 +ttp: b138/782 bl:2.9146 bb:1.1601 rl:2.7676 rb:1.0706 dl:194-195 gd:1 +ttp: b139/782 bl:2.9809 bb:1.1537 rl:2.7677 rb:1.0707 dl:195-195 gd:1 +ttp: b137/782 bl:2.9612 bb:1.1933 rl:2.7678 rb:1.0707 dl:193-194 gd:1 +ttp: b136/782 bl:2.9703 bb:1.1850 rl:2.7678 rb:1.0707 dl:192-193 gd:1 +ttp: b135/782 bl:2.9307 bb:1.1418 rl:2.7679 rb:1.0708 dl:191-192 gd:1 +ttp: b134/782 bl:3.0319 bb:1.2126 rl:2.7680 rb:1.0708 dl:190-191 gd:1 +ttp: b133/782 bl:3.0205 bb:1.1932 rl:2.7680 rb:1.0708 dl:189-190 gd:1 +ttp: b131/782 bl:3.0394 bb:1.2080 rl:2.7681 rb:1.0709 dl:188-189 gd:1 +ttp: b132/782 bl:2.9521 bb:1.1361 rl:2.7682 rb:1.0709 dl:189-189 gd:1 +ttp: b130/782 bl:3.1484 bb:1.2376 rl:2.7683 rb:1.0709 dl:187-188 gd:1 +ttp: b128/782 bl:2.8394 bb:1.0901 rl:2.7683 rb:1.0710 dl:186-187 gd:1 +ttp: b129/782 bl:2.9418 bb:1.1810 rl:2.7684 rb:1.0710 dl:187-187 gd:1 +ttp: b127/782 bl:2.9039 bb:1.1479 rl:2.7684 rb:1.0710 dl:185-186 gd:1 +ttp: b125/782 bl:2.9939 bb:1.1864 rl:2.7685 rb:1.0710 dl:184-185 gd:1 +ttp: b126/782 bl:2.9402 bb:1.1946 rl:2.7685 rb:1.0711 dl:185-185 gd:1 +ttp: b124/782 bl:2.8710 bb:1.1484 rl:2.7686 rb:1.0711 dl:183-184 gd:1 +ttp: b123/782 bl:2.9624 bb:1.1836 rl:2.7686 rb:1.0711 dl:182-183 gd:1 +ttp: b122/782 bl:2.8973 bb:1.1592 rl:2.7686 rb:1.0712 dl:181-182 gd:1 +ttp: b120/782 bl:2.9737 bb:1.1684 rl:2.7687 rb:1.0712 dl:180-181 gd:1 +ttp: b121/782 bl:2.8486 bb:1.1286 rl:2.7687 rb:1.0712 dl:181-181 gd:1 +ttp: b119/782 bl:2.8133 bb:1.0894 rl:2.7687 rb:1.0712 dl:179-180 gd:1 +ttp: b118/782 bl:2.9633 bb:1.1577 rl:2.7688 rb:1.0712 dl:178-179 gd:1 +ttp: b116/782 bl:3.0123 bb:1.1912 rl:2.7689 rb:1.0713 dl:177-178 gd:1 +ttp: b117/782 bl:2.8606 bb:1.1466 rl:2.7689 rb:1.0713 dl:178-178 gd:1 +ttp: b115/782 bl:2.8644 bb:1.1558 rl:2.7689 rb:1.0713 dl:176-177 gd:1 +ttp: b113/782 bl:3.0303 bb:1.1915 rl:2.7690 rb:1.0713 dl:175-176 gd:1 +ttp: b114/782 bl:2.9997 bb:1.1876 rl:2.7691 rb:1.0714 dl:176-176 gd:1 +ttp: b112/782 bl:2.9800 bb:1.1526 rl:2.7691 rb:1.0714 dl:174-175 gd:1 +ttp: b111/782 bl:2.9727 bb:1.1861 rl:2.7692 rb:1.0714 dl:173-174 gd:1 +ttp: b109/782 bl:3.0719 bb:1.2106 rl:2.7693 rb:1.0715 dl:172-173 gd:1 +ttp: b110/782 bl:3.0279 bb:1.1756 rl:2.7693 rb:1.0715 dl:173-173 gd:1 +ttp: b108/782 bl:2.8619 bb:1.0991 rl:2.7694 rb:1.0715 dl:171-172 gd:1 +ttp: b106/782 bl:2.9255 bb:1.1816 rl:2.7694 rb:1.0715 dl:170-171 gd:1 +ttp: b107/782 bl:2.9168 bb:1.1450 rl:2.7694 rb:1.0715 dl:171-171 gd:1 +ttp: b105/782 bl:3.0603 bb:1.2259 rl:2.7695 rb:1.0716 dl:169-170 gd:1 +ttp: b104/782 bl:2.9946 bb:1.1653 rl:2.7696 rb:1.0716 dl:168-169 gd:1 +ttp: b102/782 bl:2.7624 bb:1.1123 rl:2.7696 rb:1.0716 dl:167-168 gd:1 +ttp: b103/782 bl:2.8929 bb:1.1196 rl:2.7696 rb:1.0716 dl:168-168 gd:1 +ttp: b101/782 bl:2.9510 bb:1.1582 rl:2.7697 rb:1.0717 dl:166-167 gd:1 +ttp: b100/782 bl:2.9477 bb:1.1571 rl:2.7697 rb:1.0717 dl:165-166 gd:1 +ttp: b99/782 bl:2.9864 bb:1.1874 rl:2.7698 rb:1.0717 dl:164-165 gd:1 +ttp: b97/782 bl:2.9961 bb:1.1704 rl:2.7698 rb:1.0717 dl:163-164 gd:1 +ttp: b98/782 bl:2.9860 bb:1.1852 rl:2.7699 rb:1.0718 dl:164-164 gd:1 +ttp: b96/782 bl:2.9497 bb:1.1529 rl:2.7699 rb:1.0718 dl:162-163 gd:1 +ttp: b95/782 bl:3.0189 bb:1.2294 rl:2.7700 rb:1.0718 dl:161-162 gd:1 +ttp: b94/782 bl:2.9868 bb:1.1779 rl:2.7700 rb:1.0719 dl:160-161 gd:1 +ttp: b92/782 bl:2.9055 bb:1.1751 rl:2.7701 rb:1.0719 dl:159-160 gd:1 +ttp: b93/782 bl:2.9577 bb:1.1864 rl:2.7701 rb:1.0719 dl:160-160 gd:1 +ttp: b91/782 bl:3.0408 bb:1.2170 rl:2.7702 rb:1.0719 dl:158-159 gd:1 +ttp: b89/782 bl:3.0115 bb:1.2010 rl:2.7703 rb:1.0720 dl:157-158 gd:1 +ttp: b90/782 bl:2.9978 bb:1.1825 rl:2.7703 rb:1.0720 dl:158-158 gd:1 +ttp: b88/782 bl:3.1022 bb:1.2079 rl:2.7704 rb:1.0720 dl:156-157 gd:1 +ttp: b87/782 bl:3.0165 bb:1.2057 rl:2.7705 rb:1.0721 dl:155-156 gd:1 +ttp: b86/782 bl:3.0397 bb:1.2651 rl:2.7705 rb:1.0721 dl:154-155 gd:1 +ttp: b84/782 bl:3.0152 bb:1.2149 rl:2.7706 rb:1.0721 dl:153-154 gd:1 +ttp: b85/782 bl:2.9739 bb:1.1940 rl:2.7706 rb:1.0722 dl:154-154 gd:1 +ttp: b83/782 bl:3.0337 bb:1.2123 rl:2.7707 rb:1.0722 dl:152-153 gd:1 +ttp: b82/782 bl:2.9742 bb:1.1972 rl:2.7707 rb:1.0722 dl:151-152 gd:1 +ttp: b80/782 bl:2.9126 bb:1.1930 rl:2.7708 rb:1.0723 dl:150-151 gd:1 +ttp: b81/782 bl:2.9346 bb:1.1671 rl:2.7708 rb:1.0723 dl:151-151 gd:1 +ttp: b79/782 bl:3.0330 bb:1.2042 rl:2.7709 rb:1.0723 dl:149-150 gd:1 +ttp: b78/782 bl:2.9102 bb:1.1291 rl:2.7709 rb:1.0723 dl:148-149 gd:1 +ttp: b76/782 bl:3.0527 bb:1.2247 rl:2.7710 rb:1.0724 dl:147-148 gd:1 +ttp: b77/782 bl:3.0290 bb:1.1704 rl:2.7710 rb:1.0724 dl:148-148 gd:1 +ttp: b75/782 bl:3.0918 bb:1.2139 rl:2.7711 rb:1.0724 dl:146-147 gd:1 +ttp: b74/782 bl:3.1173 bb:1.2742 rl:2.7712 rb:1.0725 dl:145-146 gd:1 +ttp: b73/782 bl:3.0571 bb:1.2094 rl:2.7713 rb:1.0725 dl:144-145 gd:1 +ttp: b71/782 bl:2.9578 bb:1.1540 rl:2.7713 rb:1.0725 dl:143-144 gd:1 +ttp: b72/782 bl:2.9335 bb:1.1921 rl:2.7713 rb:1.0725 dl:144-144 gd:1 +ttp: b70/782 bl:3.0645 bb:1.1645 rl:2.7714 rb:1.0726 dl:142-143 gd:1 +ttp: b68/782 bl:3.1062 bb:1.2067 rl:2.7715 rb:1.0726 dl:141-142 gd:1 +ttp: b69/782 bl:3.1092 bb:1.2336 rl:2.7716 rb:1.0726 dl:142-142 gd:1 +ttp: b67/782 bl:3.0705 bb:1.2411 rl:2.7716 rb:1.0727 dl:140-141 gd:1 +ttp: b66/782 bl:3.1106 bb:1.2742 rl:2.7717 rb:1.0727 dl:139-140 gd:1 +ttp: b64/782 bl:2.9964 bb:1.2420 rl:2.7718 rb:1.0727 dl:138-139 gd:1 +ttp: b65/782 bl:3.0363 bb:1.2191 rl:2.7718 rb:1.0728 dl:139-139 gd:1 +ttp: b63/782 bl:3.0267 bb:1.2207 rl:2.7719 rb:1.0728 dl:137-138 gd:1 +ttp: b62/782 bl:2.9896 bb:1.2093 rl:2.7719 rb:1.0728 dl:136-137 gd:1 +ttp: b61/782 bl:2.9341 bb:1.1471 rl:2.7719 rb:1.0728 dl:135-136 gd:1 +ttp: b60/782 bl:3.0732 bb:1.2333 rl:2.7720 rb:1.0729 dl:134-135 gd:1 +ttp: b58/782 bl:2.9815 bb:1.2298 rl:2.7721 rb:1.0729 dl:133-134 gd:1 +ttp: b59/782 bl:3.0571 bb:1.1941 rl:2.7721 rb:1.0729 dl:134-134 gd:1 +ttp: b57/782 bl:3.0410 bb:1.2258 rl:2.7722 rb:1.0730 dl:132-133 gd:1 +ttp: b56/782 bl:3.0585 bb:1.2054 rl:2.7722 rb:1.0730 dl:131-132 gd:1 +ttp: b55/782 bl:3.0799 bb:1.2369 rl:2.7723 rb:1.0730 dl:130-131 gd:1 +ttp: b53/782 bl:3.1227 bb:1.2311 rl:2.7724 rb:1.0731 dl:129-130 gd:1 +ttp: b54/782 bl:3.1130 bb:1.2747 rl:2.7724 rb:1.0731 dl:130-130 gd:1 +ttp: b52/782 bl:3.0579 bb:1.1965 rl:2.7725 rb:1.0731 dl:128-129 gd:1 +ttp: b51/782 bl:3.0561 bb:1.2216 rl:2.7726 rb:1.0731 dl:127-128 gd:1 +ttp: b50/782 bl:2.9903 bb:1.2275 rl:2.7726 rb:1.0732 dl:126-127 gd:1 +ttp: b48/782 bl:2.9966 bb:1.1715 rl:2.7726 rb:1.0732 dl:125-126 gd:1 +ttp: b49/782 bl:2.9761 bb:1.1741 rl:2.7727 rb:1.0732 dl:126-126 gd:1 +ttp: b47/782 bl:2.9477 bb:1.1780 rl:2.7727 rb:1.0732 dl:124-125 gd:1 +ttp: b46/782 bl:3.1329 bb:1.2251 rl:2.7728 rb:1.0733 dl:123-124 gd:1 +ttp: b45/782 bl:3.0960 bb:1.2386 rl:2.7728 rb:1.0733 dl:122-123 gd:1 +ttp: b43/782 bl:2.9816 bb:1.1857 rl:2.7729 rb:1.0733 dl:121-122 gd:1 +ttp: b44/782 bl:3.1549 bb:1.2294 rl:2.7730 rb:1.0733 dl:122-122 gd:1 +ttp: b42/782 bl:3.1152 bb:1.2471 rl:2.7730 rb:1.0734 dl:120-121 gd:1 +ttp: b41/782 bl:3.1459 bb:1.2857 rl:2.7731 rb:1.0734 dl:119-120 gd:1 +ttp: b39/782 bl:3.1431 bb:1.2421 rl:2.7732 rb:1.0734 dl:118-119 gd:1 +ttp: b40/782 bl:3.0125 bb:1.2117 rl:2.7732 rb:1.0735 dl:119-119 gd:1 +ttp: b38/782 bl:3.0464 bb:1.2159 rl:2.7733 rb:1.0735 dl:117-118 gd:1 +ttp: b37/782 bl:3.0921 bb:1.2139 rl:2.7733 rb:1.0735 dl:116-117 gd:1 +ttp: b36/782 bl:2.9953 bb:1.2249 rl:2.7734 rb:1.0735 dl:115-116 gd:1 +ttp: b34/782 bl:3.0880 bb:1.2501 rl:2.7734 rb:1.0736 dl:114-115 gd:1 +ttp: b35/782 bl:3.0184 bb:1.1986 rl:2.7735 rb:1.0736 dl:115-115 gd:1 +ttp: b33/782 bl:3.0927 bb:1.2107 rl:2.7735 rb:1.0736 dl:113-114 gd:1 +ttp: b32/782 bl:3.0295 bb:1.2111 rl:2.7736 rb:1.0736 dl:112-113 gd:1 +ttp: b31/782 bl:3.1922 bb:1.2650 rl:2.7736 rb:1.0737 dl:111-112 gd:1 +ttp: b30/782 bl:3.1216 bb:1.2504 rl:2.7737 rb:1.0737 dl:110-111 gd:1 +ttp: b29/782 bl:3.0659 bb:1.2495 rl:2.7737 rb:1.0737 dl:109-110 gd:1 +ttp: b28/782 bl:3.0203 bb:1.2162 rl:2.7738 rb:1.0738 dl:108-109 gd:1 +ttp: b27/782 bl:3.1015 bb:1.2382 rl:2.7738 rb:1.0738 dl:107-108 gd:1 +ttp: b25/782 bl:3.3075 bb:1.3108 rl:2.7739 rb:1.0738 dl:106-107 gd:1 +ttp: b26/782 bl:3.0819 bb:1.2565 rl:2.7740 rb:1.0739 dl:107-107 gd:1 +ttp: b24/782 bl:3.0709 bb:1.2149 rl:2.7740 rb:1.0739 dl:105-106 gd:1 +ttp: b23/782 bl:3.1466 bb:1.2541 rl:2.7741 rb:1.0739 dl:104-105 gd:1 +ttp: b22/782 bl:3.1661 bb:1.2342 rl:2.7742 rb:1.0739 dl:103-104 gd:1 +ttp: b21/782 bl:3.2055 bb:1.2464 rl:2.7742 rb:1.0740 dl:102-103 gd:1 +ttp: b20/782 bl:3.1298 bb:1.2658 rl:2.7743 rb:1.0740 dl:101-102 gd:1 +ttp: b19/782 bl:3.1407 bb:1.2267 rl:2.7743 rb:1.0740 dl:100-101 gd:1 +ttp: b18/782 bl:3.1293 bb:1.2676 rl:2.7744 rb:1.0740 dl:99-100 gd:1 +ttp: b17/782 bl:3.1242 bb:1.2384 rl:2.7745 rb:1.0741 dl:98-99 gd:1 +ttp: b16/782 bl:3.0432 bb:1.2134 rl:2.7745 rb:1.0741 dl:97-98 gd:1 +ttp: b15/782 bl:3.2464 bb:1.2420 rl:2.7746 rb:1.0741 dl:95-97 gd:1 +ttp: b14/782 bl:3.1309 bb:1.2314 rl:2.7746 rb:1.0741 dl:94-95 gd:1 +ttp: b13/782 bl:3.1594 bb:1.2716 rl:2.7747 rb:1.0742 dl:93-94 gd:1 +ttp: b12/782 bl:3.1861 bb:1.2419 rl:2.7747 rb:1.0742 dl:92-93 gd:1 +ttp: b11/782 bl:3.2414 bb:1.2667 rl:2.7748 rb:1.0742 dl:90-92 gd:1 +ttp: b10/782 bl:3.1184 bb:1.2322 rl:2.7748 rb:1.0742 dl:89-90 gd:1 +ttp: b9/782 bl:3.2135 bb:1.2735 rl:2.7749 rb:1.0743 dl:87-89 gd:1 +ttp: b8/782 bl:3.2667 bb:1.2628 rl:2.7750 rb:1.0743 dl:86-87 gd:1 +ttp: b7/782 bl:3.2299 bb:1.2393 rl:2.7750 rb:1.0743 dl:84-86 gd:1 +ttp: b6/782 bl:3.2758 bb:1.2780 rl:2.7751 rb:1.0743 dl:82-84 gd:1 +ttp: b5/782 bl:3.3100 bb:1.2911 rl:2.7752 rb:1.0744 dl:80-82 gd:1 +ttp: b4/782 bl:3.2206 bb:1.2349 rl:2.7752 rb:1.0744 dl:78-80 gd:1 +ttp: b3/782 bl:3.3306 bb:1.2631 rl:2.7753 rb:1.0744 dl:75-78 gd:1 +ttp: b2/782 bl:3.1523 bb:1.1692 rl:2.7753 rb:1.0744 dl:70-75 gd:1 +ttp: b1/782 bl:3.3721 bb:1.2515 rl:2.7754 rb:1.0744 dl:45-70 gd:1 +quantized_ttt_phased val_loss:2.77538874 val_bpb:1.07443947 eval_time:3134722ms +total_eval_time:3134.7s diff --git a/train_h200_seed314.log b/train_h200_seed314.log new file mode 100644 index 0000000000..6779ba7079 --- /dev/null +++ b/train_h200_seed314.log @@ -0,0 +1,1318 @@ +Hyperparameters: + adam_eps: 1e-08 + adam_wd: 0.02 + artifact_dir: + attn_clip_sigmas: 13.0 + attn_out_gate_enabled: False + attn_out_gate_src: proj + beta1: 0.9 + beta2: 0.95 + bigram_blend_enabled: True + bigram_blend_lambda: 0.03 + caseops_enabled: False + compressor: brotli + data_dir: ./data/ + datasets_dir: ./data/datasets/fineweb10B_sp8192 + distributed: False + ema_decay: 0.9965 + embed_bits: 7 + embed_clip_sigmas: 14.0 + embed_lr: 0.6 + embed_wd: 0.085 + enable_looping_at: 0.35 + entropy_weighted_loss: False + eval_seq_len: 2048 + eval_stride: 64 + ewl_max_weight: 3.0 + ewl_min_weight: 0.3 + fused_ce_enabled: True + gate_window: 12 + gated_attn_enabled: False + gated_attn_init_std: 0.01 + gated_attn_quant_gate: False + global_ttt_batch_seqs: 32 + global_ttt_chunk_tokens: 32768 + global_ttt_epochs: 1 + global_ttt_grad_clip: 1.0 + global_ttt_lr: 0.001 + global_ttt_momentum: 0.9 + global_ttt_respect_doc_boundaries: True + global_ttt_warmup_chunks: 0 + global_ttt_warmup_start_lr: 0.0 + gptq_calibration_batches: 16 + gptq_reserve_seconds: 4.0 + grad_accum_steps: 8 + grad_clip_norm: 0.3 + is_main_process: True + iterations: 5000 + leaky_relu_slope: 0.3 + ln_scale: True + local_rank: 0 + logfile: logs/d8d5bf92-eaa0-4721-90ab-6a2d83d26700.txt + logit_softcap: 30.0 + loop_end: 5 + loop_start: 3 + lqer_asym_enabled: True + lqer_asym_group: 64 + lqer_enabled: True + lqer_factor_bits: 4 + lqer_rank: 4 + lqer_top_k: 3 + matrix_bits: 6 + matrix_clip_sigmas: 12.85 + matrix_lr: 0.026 + max_wallclock_seconds: 0.0 + min_lr: 0.1 + mlp_clip_sigmas: 11.5 + mlp_mult: 4.0 + model_dim: 512 + model_path: final_model.pt + muon_backend_steps: 5 + muon_momentum: 0.97 + muon_momentum_warmup_start: 0.92 + muon_momentum_warmup_steps: 1500 + muon_row_normalize: True + muon_wd: 0.095 + num_heads: 8 + num_kv_heads: 4 + num_layers: 11 + num_loops: 2 + parallel_final_lane: mean + parallel_start_layer: 8 + phased_ttt_num_phases: 1 + phased_ttt_prefix_docs: 2000 + qk_gain_init: 5.25 + quantized_model_path: final_model.int6.ptz + rank: 0 + rope_base: 10000.0 + rope_dims: 16 + rope_train_seq_len: 2048 + rope_yarn: False + run_id: d8d5bf92-eaa0-4721-90ab-6a2d83d26700 + scalar_lr: 0.02 + seed: 314 + skip_gates_enabled: True + smear_gate_enabled: True + sparse_attn_gate_enabled: True + sparse_attn_gate_init_std: 0.0 + sparse_attn_gate_scale: 1.0 + temp_scale_enabled: False + temp_scale_ent_high: 6.0 + temp_scale_ent_low: 2.0 + temp_scale_high: 1.15 + temp_scale_low: 0.85 + tie_embeddings: True + tied_embed_init_std: 0.005 + tied_embed_lr: 0.03 + tokenizer_path: ./data/tokenizers/fineweb_8192_bpe.model + train_batch_tokens: 786432 + train_files: ./data/datasets/fineweb10B_sp8192/fineweb_train_*.bin + train_log_every: 500 + train_seq_len: 2048 + ttt_batch_size: 64 + ttt_beta1: 0.0 + ttt_beta2: 0.999 + ttt_chunk_size: 48 + ttt_enabled: True + ttt_eval_batches: + ttt_eval_seq_len: 2048 + ttt_grad_steps: 1 + ttt_k_lora: True + ttt_lora_lr: 0.0001 + ttt_lora_rank: 96 + ttt_mlp_lora: True + ttt_o_lora: True + ttt_optimizer: adam + ttt_weight_decay: 1.0 + val_batch_tokens: 524288 + val_bytes_files: ./data/datasets/fineweb10B_sp8192/fineweb_val_bytes_*.bin + val_doc_fraction: 1.0 + val_files: ./data/datasets/fineweb10B_sp8192/fineweb_val_*.bin + val_loss_every: 5000 + vocab_size: 8192 + warmdown_frac: 0.75 + warmup_steps: 20 + world_size: 1 + xsa_last_n: 11 +train_shards: 5 +val_tokens: 40540160 +model_params:35945671 +warmup_cu_buckets:64,128,192,256 iters_each:3 +warmup_step: 1/20 +warmup_step: 2/20 +warmup_step: 3/20 +warmup_step: 4/20 +warmup_step: 5/20 +warmup_step: 6/20 +warmup_step: 10/20 +warmup_step: 20/20 +loop_warmup:enabled encoder:[0, 1, 2, 3, 4, 5, 3, 4] decoder:[5, 3, 4, 5, 6, 7, 8, 9, 10] +loop_warmup_step: 1/20 +loop_warmup_step: 2/20 +loop_warmup_step: 3/20 +loop_warmup_step: 4/20 +loop_warmup_step: 5/20 +loop_warmup_step: 6/20 +loop_warmup_step: 10/20 +loop_warmup_step: 20/20 +bigram_blend:enabled lambda=0.03 +0/5000 val_loss: 7.9757 val_bpb: 3.0875 +1/5000 train_loss: 9.0069 train_time: 0.0m tok/s: 1293926 +2/5000 train_loss: 12.4125 train_time: 0.0m tok/s: 1234958 +3/5000 train_loss: 11.1948 train_time: 0.0m tok/s: 1208906 +4/5000 train_loss: 9.6848 train_time: 0.0m tok/s: 1197792 +5/5000 train_loss: 8.4887 train_time: 0.1m tok/s: 1192290 +500/5000 train_loss: 3.3151 train_time: 5.7m tok/s: 1156763 +1000/5000 train_loss: 3.2201 train_time: 11.3m tok/s: 1155853 +1500/5000 train_loss: 3.2322 train_time: 17.0m tok/s: 1155481 +layer_loop:enabled step:1750 frac:0.350 encoder:[0, 1, 2, 3, 4, 5, 3, 4] decoder:[5, 3, 4, 5, 6, 7, 8, 9, 10] +2000/5000 train_loss: 3.0781 train_time: 24.1m tok/s: 1087134 +2500/5000 train_loss: 3.0215 train_time: 32.7m tok/s: 1003143 +3000/5000 train_loss: 2.9499 train_time: 41.2m tok/s: 955318 +3500/5000 train_loss: 2.8876 train_time: 49.7m tok/s: 923886 +4000/5000 train_loss: 2.7554 train_time: 58.1m tok/s: 901717 +4500/5000 train_loss: 2.7691 train_time: 66.6m tok/s: 885132 +5000/5000 train_loss: 2.8088 train_time: 75.2m tok/s: 871959 +bigram_blend:enabled lambda=0.03 +5000/5000 val_loss: 2.7864 val_bpb: 1.0787 +peak memory allocated: 42423 MiB reserved: 59798 MiB +ema:applying EMA weights +bigram_blend:enabled lambda=0.03 +diagnostic pre-quantization post-ema val_loss:2.75113786 val_bpb:1.06502265 eval_time:49948ms +Serialized model: 135417533 bytes +Code size (uncompressed): 164068 bytes +Code size (compressed): 32949 bytes +GPTQ:collecting Hessians from calibration data... +GPTQ:collected 67 Hessians in 3.4s +Quantized weights: + gptq (int6): blocks.attn.c_k.weight, blocks.attn.c_q.weight, blocks.attn.c_v.weight, blocks.attn.proj.weight, blocks.mlp.fc.weight, blocks.mlp.proj.weight + gptq (int6)+lqer_asym: blocks.mlp.fc.weight + gptq (int7)+lqer_asym: tok_emb.weight + passthrough (float16): blocks.attn.attn_gate_w, blocks.attn.q_gain, blocks.attn_scale, blocks.mlp_scale, blocks.resid_mix, parallel_post_lambdas, parallel_resid_lambdas, skip_gates, skip_weights, smear_gate.weight, smear_lambda +Serialized model quantized+brotli: 16115062 bytes +Total submission size quantized+brotli: 16148011 bytes +bigram_blend:enabled lambda=0.03 +diagnostic quantized val_loss:2.77666730 val_bpb:1.07490562 eval_time:33188ms +ttt_lora:warming up compile (random tokens, no val data) +ttt_lora:compile warmup done (2.8s) + +beginning TTT eval timer +ttt_phased: total_docs:50000 prefix_docs:2000 suffix_docs:48000 num_phases:1 boundaries:[2000] +ttp: b782/782 bl:2.5601 bb:1.0336 rl:2.5601 rb:1.0336 dl:26524-79464 gd:0 +ttp: b781/782 bl:2.5664 bb:1.0595 rl:2.5641 rb:1.0500 dl:14510-25988 gd:0 +ttp: b780/782 bl:2.6243 bb:1.0767 rl:2.5821 rb:1.0580 dl:11071-14414 gd:0 +ttp: b779/782 bl:2.6509 bb:1.0797 rl:2.5953 rb:1.0622 dl:9037-11049 gd:0 +ttp: b778/782 bl:2.7932 bb:1.1174 rl:2.6230 rb:1.0701 dl:7961-8997 gd:0 +ttp: b777/782 bl:2.7328 bb:1.0931 rl:2.6352 rb:1.0727 dl:7190-7938 gd:0 +ttp: b776/782 bl:2.7258 bb:1.0906 rl:2.6434 rb:1.0743 dl:6364-7180 gd:0 +ttp: b775/782 bl:2.6922 bb:1.0658 rl:2.6471 rb:1.0736 dl:5853-6355 gd:0 +ttp: b774/782 bl:2.7339 bb:1.0797 rl:2.6528 rb:1.0740 dl:5552-5852 gd:0 +ttp: b773/782 bl:2.6577 bb:1.0784 rl:2.6531 rb:1.0743 dl:5203-5550 gd:0 +ttp: b772/782 bl:2.7691 bb:1.1076 rl:2.6592 rb:1.0761 dl:4937-5193 gd:0 +ttp: b771/782 bl:2.7682 bb:1.0823 rl:2.6643 rb:1.0764 dl:4701-4937 gd:0 +ttp: b770/782 bl:2.6653 bb:1.0547 rl:2.6644 rb:1.0754 dl:4479-4698 gd:0 +ttp: b769/782 bl:2.7772 bb:1.0991 rl:2.6689 rb:1.0764 dl:4307-4479 gd:0 +ttp: b768/782 bl:2.7129 bb:1.0887 rl:2.6705 rb:1.0768 dl:4128-4306 gd:0 +ttp: b767/782 bl:2.7602 bb:1.1020 rl:2.6735 rb:1.0777 dl:3963-4123 gd:0 +ttp: b766/782 bl:2.6503 bb:1.0376 rl:2.6728 rb:1.0764 dl:3846-3962 gd:0 +ttp: b765/782 bl:2.7885 bb:1.0951 rl:2.6763 rb:1.0770 dl:3743-3845 gd:0 +ttp: b764/782 bl:2.7715 bb:1.1012 rl:2.6790 rb:1.0777 dl:3639-3742 gd:0 +ttp: b763/782 bl:2.8004 bb:1.1050 rl:2.6822 rb:1.0784 dl:3536-3637 gd:0 +ttp: b762/782 bl:2.8307 bb:1.0774 rl:2.6860 rb:1.0784 dl:3431-3533 gd:0 +ttp: b761/782 bl:2.7551 bb:1.0657 rl:2.6877 rb:1.0781 dl:3336-3430 gd:0 +ttp: b760/782 bl:2.8490 bb:1.1189 rl:2.6913 rb:1.0790 dl:3255-3334 gd:0 +ttp: b759/782 bl:2.7303 bb:1.1020 rl:2.6922 rb:1.0795 dl:3188-3253 gd:0 +ttp: b758/782 bl:2.8857 bb:1.0888 rl:2.6963 rb:1.0797 dl:3108-3187 gd:0 +ttp: b757/782 bl:2.6433 bb:1.0216 rl:2.6952 rb:1.0785 dl:3033-3108 gd:0 +ttp: b756/782 bl:2.7869 bb:1.0802 rl:2.6970 rb:1.0786 dl:2973-3032 gd:0 +ttp: b755/782 bl:2.6928 bb:1.0433 rl:2.6969 rb:1.0779 dl:2899-2972 gd:0 +ttp: b754/782 bl:2.6920 bb:1.0563 rl:2.6968 rb:1.0775 dl:2839-2899 gd:0 +ttp: b753/782 bl:2.7509 bb:1.0492 rl:2.6977 rb:1.0770 dl:2795-2838 gd:0 +ttp: b752/782 bl:2.7684 bb:1.0622 rl:2.6989 rb:1.0767 dl:2740-2793 gd:0 +ttp: b751/782 bl:2.7974 bb:1.0739 rl:2.7005 rb:1.0767 dl:2689-2740 gd:0 +ttpp: phase:1/1 pd:2000 gd:2000 t:1653.6s +tttg: c1/333 lr:0.001000 t:0.1s +tttg: c2/333 lr:0.001000 t:0.2s +tttg: c3/333 lr:0.001000 t:0.3s +tttg: c4/333 lr:0.001000 t:0.5s +tttg: c5/333 lr:0.001000 t:0.6s +tttg: c6/333 lr:0.000999 t:0.7s +tttg: c7/333 lr:0.000999 t:0.8s +tttg: c8/333 lr:0.000999 t:0.9s +tttg: c9/333 lr:0.000999 t:1.0s +tttg: c10/333 lr:0.000998 t:1.1s +tttg: c11/333 lr:0.000998 t:1.2s +tttg: c12/333 lr:0.000997 t:1.4s +tttg: c13/333 lr:0.000997 t:1.5s +tttg: c14/333 lr:0.000996 t:1.6s +tttg: c15/333 lr:0.000996 t:1.7s +tttg: c16/333 lr:0.000995 t:1.8s +tttg: c17/333 lr:0.000994 t:1.9s +tttg: c18/333 lr:0.000994 t:2.0s +tttg: c19/333 lr:0.000993 t:2.1s +tttg: c20/333 lr:0.000992 t:2.3s +tttg: c21/333 lr:0.000991 t:2.4s +tttg: c22/333 lr:0.000990 t:2.5s +tttg: c23/333 lr:0.000989 t:2.6s +tttg: c24/333 lr:0.000988 t:2.7s +tttg: c25/333 lr:0.000987 t:2.8s +tttg: c26/333 lr:0.000986 t:2.9s +tttg: c27/333 lr:0.000985 t:3.0s +tttg: c28/333 lr:0.000984 t:3.2s +tttg: c29/333 lr:0.000983 t:3.3s +tttg: c30/333 lr:0.000981 t:3.4s +tttg: c31/333 lr:0.000980 t:3.5s +tttg: c32/333 lr:0.000979 t:3.6s +tttg: c33/333 lr:0.000977 t:3.7s +tttg: c34/333 lr:0.000976 t:3.8s +tttg: c35/333 lr:0.000974 t:3.9s +tttg: c36/333 lr:0.000973 t:4.1s +tttg: c37/333 lr:0.000971 t:4.2s +tttg: c38/333 lr:0.000970 t:4.3s +tttg: c39/333 lr:0.000968 t:4.4s +tttg: c40/333 lr:0.000966 t:4.5s +tttg: c41/333 lr:0.000965 t:4.6s +tttg: c42/333 lr:0.000963 t:4.7s +tttg: c43/333 lr:0.000961 t:4.8s +tttg: c44/333 lr:0.000959 t:5.0s +tttg: c45/333 lr:0.000957 t:5.1s +tttg: c46/333 lr:0.000955 t:5.2s +tttg: c47/333 lr:0.000953 t:5.3s +tttg: c48/333 lr:0.000951 t:5.4s +tttg: c49/333 lr:0.000949 t:5.5s +tttg: c50/333 lr:0.000947 t:5.6s +tttg: c51/333 lr:0.000945 t:5.8s +tttg: c52/333 lr:0.000943 t:5.9s +tttg: c53/333 lr:0.000941 t:6.0s +tttg: c54/333 lr:0.000938 t:6.1s +tttg: c55/333 lr:0.000936 t:6.2s +tttg: c56/333 lr:0.000934 t:6.3s +tttg: c57/333 lr:0.000931 t:6.4s +tttg: c58/333 lr:0.000929 t:6.5s +tttg: c59/333 lr:0.000927 t:6.7s +tttg: c60/333 lr:0.000924 t:6.8s +tttg: c61/333 lr:0.000922 t:6.9s +tttg: c62/333 lr:0.000919 t:7.0s +tttg: c63/333 lr:0.000916 t:7.1s +tttg: c64/333 lr:0.000914 t:7.2s +tttg: c65/333 lr:0.000911 t:7.3s +tttg: c66/333 lr:0.000908 t:7.5s +tttg: c67/333 lr:0.000906 t:7.6s +tttg: c68/333 lr:0.000903 t:7.7s +tttg: c69/333 lr:0.000900 t:7.8s +tttg: c70/333 lr:0.000897 t:7.9s +tttg: c71/333 lr:0.000894 t:8.0s +tttg: c72/333 lr:0.000891 t:8.1s +tttg: c73/333 lr:0.000888 t:8.2s +tttg: c74/333 lr:0.000885 t:8.4s +tttg: c75/333 lr:0.000882 t:8.5s +tttg: c76/333 lr:0.000879 t:8.6s +tttg: c77/333 lr:0.000876 t:8.7s +tttg: c78/333 lr:0.000873 t:8.8s +tttg: c79/333 lr:0.000870 t:8.9s +tttg: c80/333 lr:0.000867 t:9.0s +tttg: c81/333 lr:0.000863 t:9.1s +tttg: c82/333 lr:0.000860 t:9.3s +tttg: c83/333 lr:0.000857 t:9.4s +tttg: c84/333 lr:0.000854 t:9.5s +tttg: c85/333 lr:0.000850 t:9.6s +tttg: c86/333 lr:0.000847 t:9.7s +tttg: c87/333 lr:0.000843 t:9.8s +tttg: c88/333 lr:0.000840 t:9.9s +tttg: c89/333 lr:0.000836 t:10.0s +tttg: c90/333 lr:0.000833 t:10.2s +tttg: c91/333 lr:0.000829 t:10.3s +tttg: c92/333 lr:0.000826 t:10.4s +tttg: c93/333 lr:0.000822 t:10.5s +tttg: c94/333 lr:0.000819 t:10.6s +tttg: c95/333 lr:0.000815 t:10.7s +tttg: c96/333 lr:0.000811 t:10.8s +tttg: c97/333 lr:0.000807 t:11.0s +tttg: c98/333 lr:0.000804 t:11.1s +tttg: c99/333 lr:0.000800 t:11.2s +tttg: c100/333 lr:0.000796 t:11.3s +tttg: c101/333 lr:0.000792 t:11.4s +tttg: c102/333 lr:0.000789 t:11.5s +tttg: c103/333 lr:0.000785 t:11.6s +tttg: c104/333 lr:0.000781 t:11.7s +tttg: c105/333 lr:0.000777 t:11.9s +tttg: c106/333 lr:0.000773 t:12.0s +tttg: c107/333 lr:0.000769 t:12.1s +tttg: c108/333 lr:0.000765 t:12.2s +tttg: c109/333 lr:0.000761 t:12.3s +tttg: c110/333 lr:0.000757 t:12.4s +tttg: c111/333 lr:0.000753 t:12.5s +tttg: c112/333 lr:0.000749 t:12.7s +tttg: c113/333 lr:0.000745 t:12.8s +tttg: c114/333 lr:0.000740 t:12.9s +tttg: c115/333 lr:0.000736 t:13.0s +tttg: c116/333 lr:0.000732 t:13.1s +tttg: c117/333 lr:0.000728 t:13.2s +tttg: c118/333 lr:0.000724 t:13.3s +tttg: c119/333 lr:0.000719 t:13.4s +tttg: c120/333 lr:0.000715 t:13.6s +tttg: c121/333 lr:0.000711 t:13.7s +tttg: c122/333 lr:0.000707 t:13.8s +tttg: c123/333 lr:0.000702 t:13.9s +tttg: c124/333 lr:0.000698 t:14.0s +tttg: c125/333 lr:0.000694 t:14.1s +tttg: c126/333 lr:0.000689 t:14.2s +tttg: c127/333 lr:0.000685 t:14.4s +tttg: c128/333 lr:0.000680 t:14.5s +tttg: c129/333 lr:0.000676 t:14.6s +tttg: c130/333 lr:0.000672 t:14.7s +tttg: c131/333 lr:0.000667 t:14.8s +tttg: c132/333 lr:0.000663 t:14.9s +tttg: c133/333 lr:0.000658 t:15.1s +tttg: c134/333 lr:0.000654 t:15.2s +tttg: c135/333 lr:0.000649 t:15.3s +tttg: c136/333 lr:0.000645 t:15.4s +tttg: c137/333 lr:0.000640 t:15.5s +tttg: c138/333 lr:0.000635 t:15.6s +tttg: c139/333 lr:0.000631 t:15.7s +tttg: c140/333 lr:0.000626 t:15.9s +tttg: c141/333 lr:0.000622 t:16.0s +tttg: c142/333 lr:0.000617 t:16.1s +tttg: c143/333 lr:0.000613 t:16.2s +tttg: c144/333 lr:0.000608 t:16.3s +tttg: c145/333 lr:0.000603 t:16.4s +tttg: c146/333 lr:0.000599 t:16.5s +tttg: c147/333 lr:0.000594 t:16.6s +tttg: c148/333 lr:0.000589 t:16.8s +tttg: c149/333 lr:0.000585 t:16.9s +tttg: c150/333 lr:0.000580 t:17.0s +tttg: c151/333 lr:0.000575 t:17.1s +tttg: c152/333 lr:0.000571 t:17.2s +tttg: c153/333 lr:0.000566 t:17.3s +tttg: c154/333 lr:0.000561 t:17.4s +tttg: c155/333 lr:0.000557 t:17.5s +tttg: c156/333 lr:0.000552 t:17.7s +tttg: c157/333 lr:0.000547 t:17.8s +tttg: c158/333 lr:0.000543 t:17.9s +tttg: c159/333 lr:0.000538 t:18.0s +tttg: c160/333 lr:0.000533 t:18.1s +tttg: c161/333 lr:0.000528 t:18.2s +tttg: c162/333 lr:0.000524 t:18.3s +tttg: c163/333 lr:0.000519 t:18.5s +tttg: c164/333 lr:0.000514 t:18.6s +tttg: c165/333 lr:0.000509 t:18.7s +tttg: c166/333 lr:0.000505 t:18.8s +tttg: c167/333 lr:0.000500 t:18.9s +tttg: c168/333 lr:0.000495 t:19.0s +tttg: c169/333 lr:0.000491 t:19.1s +tttg: c170/333 lr:0.000486 t:19.2s +tttg: c171/333 lr:0.000481 t:19.4s +tttg: c172/333 lr:0.000476 t:19.5s +tttg: c173/333 lr:0.000472 t:19.6s +tttg: c174/333 lr:0.000467 t:19.7s +tttg: c175/333 lr:0.000462 t:19.8s +tttg: c176/333 lr:0.000457 t:19.9s +tttg: c177/333 lr:0.000453 t:20.0s +tttg: c178/333 lr:0.000448 t:20.1s +tttg: c179/333 lr:0.000443 t:20.3s +tttg: c180/333 lr:0.000439 t:20.4s +tttg: c181/333 lr:0.000434 t:20.5s +tttg: c182/333 lr:0.000429 t:20.6s +tttg: c183/333 lr:0.000425 t:20.7s +tttg: c184/333 lr:0.000420 t:20.8s +tttg: c185/333 lr:0.000415 t:20.9s +tttg: c186/333 lr:0.000411 t:21.1s +tttg: c187/333 lr:0.000406 t:21.2s +tttg: c188/333 lr:0.000401 t:21.3s +tttg: c189/333 lr:0.000397 t:21.4s +tttg: c190/333 lr:0.000392 t:21.5s +tttg: c191/333 lr:0.000387 t:21.6s +tttg: c192/333 lr:0.000383 t:21.7s +tttg: c193/333 lr:0.000378 t:21.8s +tttg: c194/333 lr:0.000374 t:22.0s +tttg: c195/333 lr:0.000369 t:22.1s +tttg: c196/333 lr:0.000365 t:22.2s +tttg: c197/333 lr:0.000360 t:22.3s +tttg: c198/333 lr:0.000355 t:22.4s +tttg: c199/333 lr:0.000351 t:22.5s +tttg: c200/333 lr:0.000346 t:22.6s +tttg: c201/333 lr:0.000342 t:22.7s +tttg: c202/333 lr:0.000337 t:22.9s +tttg: c203/333 lr:0.000333 t:23.0s +tttg: c204/333 lr:0.000328 t:23.1s +tttg: c205/333 lr:0.000324 t:23.2s +tttg: c206/333 lr:0.000320 t:23.3s +tttg: c207/333 lr:0.000315 t:23.4s +tttg: c208/333 lr:0.000311 t:23.5s +tttg: c209/333 lr:0.000306 t:23.7s +tttg: c210/333 lr:0.000302 t:23.8s +tttg: c211/333 lr:0.000298 t:23.9s +tttg: c212/333 lr:0.000293 t:24.0s +tttg: c213/333 lr:0.000289 t:24.1s +tttg: c214/333 lr:0.000285 t:24.2s +tttg: c215/333 lr:0.000281 t:24.3s +tttg: c216/333 lr:0.000276 t:24.4s +tttg: c217/333 lr:0.000272 t:24.6s +tttg: c218/333 lr:0.000268 t:24.7s +tttg: c219/333 lr:0.000264 t:24.8s +tttg: c220/333 lr:0.000260 t:24.9s +tttg: c221/333 lr:0.000255 t:25.0s +tttg: c222/333 lr:0.000251 t:25.1s +tttg: c223/333 lr:0.000247 t:25.2s +tttg: c224/333 lr:0.000243 t:25.3s +tttg: c225/333 lr:0.000239 t:25.5s +tttg: c226/333 lr:0.000235 t:25.6s +tttg: c227/333 lr:0.000231 t:25.7s +tttg: c228/333 lr:0.000227 t:25.8s +tttg: c229/333 lr:0.000223 t:25.9s +tttg: c230/333 lr:0.000219 t:26.0s +tttg: c231/333 lr:0.000215 t:26.1s +tttg: c232/333 lr:0.000211 t:26.3s +tttg: c233/333 lr:0.000208 t:26.4s +tttg: c234/333 lr:0.000204 t:26.5s +tttg: c235/333 lr:0.000200 t:26.6s +tttg: c236/333 lr:0.000196 t:26.7s +tttg: c237/333 lr:0.000193 t:26.8s +tttg: c238/333 lr:0.000189 t:26.9s +tttg: c239/333 lr:0.000185 t:27.0s +tttg: c240/333 lr:0.000181 t:27.2s +tttg: c241/333 lr:0.000178 t:27.3s +tttg: c242/333 lr:0.000174 t:27.4s +tttg: c243/333 lr:0.000171 t:27.5s +tttg: c244/333 lr:0.000167 t:27.6s +tttg: c245/333 lr:0.000164 t:27.7s +tttg: c246/333 lr:0.000160 t:27.8s +tttg: c247/333 lr:0.000157 t:28.0s +tttg: c248/333 lr:0.000153 t:28.1s +tttg: c249/333 lr:0.000150 t:28.2s +tttg: c250/333 lr:0.000146 t:28.3s +tttg: c251/333 lr:0.000143 t:28.4s +tttg: c252/333 lr:0.000140 t:28.5s +tttg: c253/333 lr:0.000137 t:28.6s +tttg: c254/333 lr:0.000133 t:28.7s +tttg: c255/333 lr:0.000130 t:28.9s +tttg: c256/333 lr:0.000127 t:29.0s +tttg: c257/333 lr:0.000124 t:29.1s +tttg: c258/333 lr:0.000121 t:29.2s +tttg: c259/333 lr:0.000118 t:29.3s +tttg: c260/333 lr:0.000115 t:29.4s +tttg: c261/333 lr:0.000112 t:29.5s +tttg: c262/333 lr:0.000109 t:29.6s +tttg: c263/333 lr:0.000106 t:29.8s +tttg: c264/333 lr:0.000103 t:29.9s +tttg: c265/333 lr:0.000100 t:30.0s +tttg: c266/333 lr:0.000097 t:30.1s +tttg: c267/333 lr:0.000094 t:30.2s +tttg: c268/333 lr:0.000092 t:30.3s +tttg: c269/333 lr:0.000089 t:30.4s +tttg: c270/333 lr:0.000086 t:30.6s +tttg: c271/333 lr:0.000084 t:30.7s +tttg: c272/333 lr:0.000081 t:30.8s +tttg: c273/333 lr:0.000078 t:30.9s +tttg: c274/333 lr:0.000076 t:31.0s +tttg: c275/333 lr:0.000073 t:31.1s +tttg: c276/333 lr:0.000071 t:31.2s +tttg: c277/333 lr:0.000069 t:31.3s +tttg: c278/333 lr:0.000066 t:31.5s +tttg: c279/333 lr:0.000064 t:31.6s +tttg: c280/333 lr:0.000062 t:31.7s +tttg: c281/333 lr:0.000059 t:31.8s +tttg: c282/333 lr:0.000057 t:31.9s +tttg: c283/333 lr:0.000055 t:32.0s +tttg: c284/333 lr:0.000053 t:32.1s +tttg: c285/333 lr:0.000051 t:32.3s +tttg: c286/333 lr:0.000049 t:32.4s +tttg: c287/333 lr:0.000047 t:32.5s +tttg: c288/333 lr:0.000045 t:32.6s +tttg: c289/333 lr:0.000043 t:32.7s +tttg: c290/333 lr:0.000041 t:32.8s +tttg: c291/333 lr:0.000039 t:32.9s +tttg: c292/333 lr:0.000037 t:33.1s +tttg: c293/333 lr:0.000035 t:33.2s +tttg: c294/333 lr:0.000034 t:33.3s +tttg: c295/333 lr:0.000032 t:33.4s +tttg: c296/333 lr:0.000030 t:33.5s +tttg: c297/333 lr:0.000029 t:33.6s +tttg: c298/333 lr:0.000027 t:33.7s +tttg: c299/333 lr:0.000026 t:33.9s +tttg: c300/333 lr:0.000024 t:34.0s +tttg: c301/333 lr:0.000023 t:34.1s +tttg: c302/333 lr:0.000021 t:34.2s +tttg: c303/333 lr:0.000020 t:34.3s +tttg: c304/333 lr:0.000019 t:34.4s +tttg: c305/333 lr:0.000017 t:34.5s +tttg: c306/333 lr:0.000016 t:34.6s +tttg: c307/333 lr:0.000015 t:34.8s +tttg: c308/333 lr:0.000014 t:34.9s +tttg: c309/333 lr:0.000013 t:35.0s +tttg: c310/333 lr:0.000012 t:35.1s +tttg: c311/333 lr:0.000011 t:35.2s +tttg: c312/333 lr:0.000010 t:35.3s +tttg: c313/333 lr:0.000009 t:35.4s +tttg: c314/333 lr:0.000008 t:35.6s +tttg: c315/333 lr:0.000007 t:35.7s +tttg: c316/333 lr:0.000006 t:35.8s +tttg: c317/333 lr:0.000006 t:35.9s +tttg: c318/333 lr:0.000005 t:36.0s +tttg: c319/333 lr:0.000004 t:36.1s +tttg: c320/333 lr:0.000004 t:36.2s +tttg: c321/333 lr:0.000003 t:36.3s +tttg: c322/333 lr:0.000003 t:36.5s +tttg: c323/333 lr:0.000002 t:36.6s +tttg: c324/333 lr:0.000002 t:36.7s +tttg: c325/333 lr:0.000001 t:36.8s +tttg: c326/333 lr:0.000001 t:36.9s +tttg: c327/333 lr:0.000001 t:37.0s +tttg: c328/333 lr:0.000001 t:37.1s +tttg: c329/333 lr:0.000000 t:37.3s +tttg: c330/333 lr:0.000000 t:37.4s +tttg: c331/333 lr:0.000000 t:37.5s +tttg: c332/333 lr:0.000000 t:37.6s +ttpr: phase:1/1 t:1693.9s +ttp: b750/782 bl:2.8366 bb:1.0701 rl:2.7025 rb:1.0766 dl:2638-2688 gd:1 +ttp: b749/782 bl:2.8362 bb:1.0912 rl:2.7045 rb:1.0768 dl:2580-2638 gd:1 +ttp: b748/782 bl:2.8066 bb:1.0747 rl:2.7060 rb:1.0768 dl:2539-2578 gd:1 +ttp: b747/782 bl:2.7876 bb:1.0602 rl:2.7071 rb:1.0765 dl:2501-2538 gd:1 +ttp: b746/782 bl:2.6787 bb:1.0547 rl:2.7067 rb:1.0762 dl:2459-2501 gd:1 +ttp: b745/782 bl:2.7911 bb:1.0910 rl:2.7079 rb:1.0764 dl:2421-2458 gd:1 +ttp: b744/782 bl:2.6577 bb:1.0588 rl:2.7072 rb:1.0762 dl:2388-2419 gd:1 +ttp: b743/782 bl:2.7153 bb:1.0454 rl:2.7073 rb:1.0758 dl:2355-2388 gd:1 +ttp: b742/782 bl:2.7874 bb:1.0677 rl:2.7083 rb:1.0757 dl:2319-2353 gd:1 +ttp: b741/782 bl:2.8086 bb:1.1056 rl:2.7095 rb:1.0761 dl:2286-2319 gd:1 +ttp: b740/782 bl:2.7383 bb:1.0338 rl:2.7098 rb:1.0756 dl:2254-2285 gd:1 +ttp: b739/782 bl:2.8272 bb:1.0735 rl:2.7111 rb:1.0755 dl:2227-2253 gd:1 +ttp: b738/782 bl:2.7497 bb:1.0549 rl:2.7115 rb:1.0753 dl:2194-2227 gd:1 +ttp: b737/782 bl:2.8018 bb:1.0684 rl:2.7125 rb:1.0752 dl:2165-2193 gd:1 +ttp: b736/782 bl:2.6763 bb:1.0432 rl:2.7121 rb:1.0749 dl:2140-2165 gd:1 +ttp: b735/782 bl:2.8341 bb:1.0793 rl:2.7134 rb:1.0749 dl:2116-2140 gd:1 +ttp: b734/782 bl:2.7766 bb:1.0588 rl:2.7140 rb:1.0748 dl:2091-2115 gd:1 +ttp: b733/782 bl:2.7597 bb:1.0531 rl:2.7145 rb:1.0745 dl:2062-2090 gd:1 +ttp: b732/782 bl:2.8232 bb:1.0988 rl:2.7155 rb:1.0748 dl:2041-2062 gd:1 +ttp: b731/782 bl:2.7771 bb:1.0599 rl:2.7161 rb:1.0746 dl:2017-2041 gd:1 +ttp: b730/782 bl:2.7708 bb:1.0908 rl:2.7166 rb:1.0748 dl:1995-2016 gd:1 +ttp: b729/782 bl:2.7233 bb:1.0378 rl:2.7166 rb:1.0744 dl:1978-1994 gd:1 +ttp: b728/782 bl:2.7598 bb:1.0689 rl:2.7170 rb:1.0744 dl:1960-1977 gd:1 +ttp: b727/782 bl:2.7746 bb:1.0562 rl:2.7175 rb:1.0742 dl:1936-1960 gd:1 +ttp: b726/782 bl:2.7997 bb:1.0655 rl:2.7182 rb:1.0741 dl:1915-1936 gd:1 +ttp: b725/782 bl:2.7625 bb:1.0708 rl:2.7186 rb:1.0741 dl:1900-1915 gd:1 +ttp: b724/782 bl:2.7555 bb:1.0533 rl:2.7189 rb:1.0739 dl:1885-1900 gd:1 +ttp: b723/782 bl:2.7832 bb:1.0618 rl:2.7194 rb:1.0738 dl:1861-1885 gd:1 +ttp: b722/782 bl:2.7738 bb:1.0605 rl:2.7198 rb:1.0737 dl:1846-1861 gd:1 +ttp: b721/782 bl:2.7510 bb:1.0269 rl:2.7201 rb:1.0733 dl:1832-1846 gd:1 +ttp: b720/782 bl:2.8254 bb:1.0792 rl:2.7209 rb:1.0734 dl:1816-1832 gd:1 +ttp: b719/782 bl:2.6830 bb:1.0288 rl:2.7206 rb:1.0730 dl:1793-1816 gd:1 +ttp: b718/782 bl:2.7804 bb:1.0718 rl:2.7210 rb:1.0730 dl:1773-1792 gd:1 +ttp: b717/782 bl:2.7912 bb:1.0512 rl:2.7215 rb:1.0729 dl:1754-1773 gd:1 +ttp: b716/782 bl:2.8089 bb:1.0366 rl:2.7222 rb:1.0726 dl:1739-1754 gd:1 +ttp: b715/782 bl:2.6453 bb:1.0394 rl:2.7216 rb:1.0724 dl:1725-1739 gd:1 +ttp: b714/782 bl:2.8124 bb:1.0705 rl:2.7223 rb:1.0723 dl:1711-1725 gd:1 +ttp: b713/782 bl:2.8341 bb:1.0456 rl:2.7230 rb:1.0721 dl:1697-1711 gd:1 +ttp: b712/782 bl:2.8325 bb:1.0784 rl:2.7238 rb:1.0722 dl:1684-1697 gd:1 +ttp: b711/782 bl:2.7761 bb:1.0453 rl:2.7241 rb:1.0720 dl:1673-1683 gd:1 +ttp: b710/782 bl:2.7611 bb:1.0705 rl:2.7243 rb:1.0720 dl:1661-1673 gd:1 +ttp: b709/782 bl:2.7852 bb:1.0582 rl:2.7247 rb:1.0719 dl:1649-1661 gd:1 +ttp: b708/782 bl:2.7259 bb:1.0476 rl:2.7247 rb:1.0717 dl:1639-1649 gd:1 +ttp: b707/782 bl:2.7734 bb:1.0841 rl:2.7251 rb:1.0718 dl:1627-1638 gd:1 +ttp: b706/782 bl:2.7174 bb:1.0446 rl:2.7250 rb:1.0717 dl:1617-1627 gd:1 +ttp: b705/782 bl:2.7865 bb:1.0733 rl:2.7254 rb:1.0717 dl:1606-1617 gd:1 +ttp: b704/782 bl:2.7470 bb:1.0245 rl:2.7255 rb:1.0714 dl:1595-1606 gd:1 +ttp: b703/782 bl:2.9201 bb:1.1045 rl:2.7267 rb:1.0716 dl:1582-1594 gd:1 +ttp: b702/782 bl:2.8052 bb:1.0671 rl:2.7271 rb:1.0715 dl:1572-1581 gd:1 +ttp: b701/782 bl:2.7569 bb:1.0485 rl:2.7273 rb:1.0714 dl:1562-1572 gd:1 +ttp: b700/782 bl:2.6744 bb:1.0438 rl:2.7270 rb:1.0712 dl:1552-1562 gd:1 +ttp: b699/782 bl:2.8201 bb:1.0437 rl:2.7275 rb:1.0711 dl:1543-1552 gd:1 +ttp: b698/782 bl:2.7841 bb:1.0319 rl:2.7279 rb:1.0708 dl:1534-1543 gd:1 +ttp: b697/782 bl:2.7641 bb:1.0414 rl:2.7281 rb:1.0707 dl:1522-1534 gd:1 +ttp: b696/782 bl:2.8098 bb:1.0740 rl:2.7285 rb:1.0707 dl:1513-1522 gd:1 +ttp: b695/782 bl:2.7837 bb:1.0792 rl:2.7288 rb:1.0707 dl:1504-1513 gd:1 +ttp: b694/782 bl:2.7629 bb:1.0664 rl:2.7290 rb:1.0707 dl:1494-1504 gd:1 +ttp: b693/782 bl:2.8172 bb:1.1051 rl:2.7294 rb:1.0709 dl:1485-1494 gd:1 +ttp: b692/782 bl:2.7675 bb:1.0501 rl:2.7296 rb:1.0708 dl:1477-1484 gd:1 +ttp: b691/782 bl:2.6958 bb:1.0409 rl:2.7295 rb:1.0706 dl:1467-1476 gd:1 +ttp: b690/782 bl:2.8385 bb:1.0636 rl:2.7300 rb:1.0706 dl:1458-1467 gd:1 +ttp: b689/782 bl:2.7800 bb:1.0636 rl:2.7303 rb:1.0706 dl:1450-1458 gd:1 +ttp: b688/782 bl:2.7480 bb:1.0484 rl:2.7304 rb:1.0704 dl:1441-1450 gd:1 +ttp: b687/782 bl:2.7186 bb:1.0499 rl:2.7303 rb:1.0703 dl:1432-1441 gd:1 +ttp: b686/782 bl:2.8051 bb:1.0539 rl:2.7307 rb:1.0703 dl:1422-1432 gd:1 +ttp: b685/782 bl:2.7760 bb:1.0640 rl:2.7309 rb:1.0702 dl:1414-1422 gd:1 +ttp: b684/782 bl:2.7973 bb:1.0754 rl:2.7312 rb:1.0702 dl:1407-1414 gd:1 +ttp: b683/782 bl:2.7775 bb:1.0694 rl:2.7314 rb:1.0702 dl:1400-1406 gd:1 +ttp: b682/782 bl:2.8080 bb:1.0726 rl:2.7318 rb:1.0703 dl:1393-1400 gd:1 +ttp: b681/782 bl:2.8187 bb:1.0701 rl:2.7322 rb:1.0703 dl:1383-1393 gd:1 +ttp: b680/782 bl:2.8023 bb:1.0542 rl:2.7325 rb:1.0702 dl:1375-1383 gd:1 +ttp: b679/782 bl:2.8521 bb:1.0866 rl:2.7331 rb:1.0703 dl:1368-1374 gd:1 +ttp: b678/782 bl:2.7870 bb:1.0491 rl:2.7333 rb:1.0702 dl:1361-1368 gd:1 +ttp: b677/782 bl:2.8705 bb:1.1128 rl:2.7339 rb:1.0703 dl:1353-1360 gd:1 +ttp: b676/782 bl:2.7913 bb:1.0665 rl:2.7342 rb:1.0703 dl:1347-1353 gd:1 +ttp: b675/782 bl:2.8408 bb:1.0664 rl:2.7346 rb:1.0703 dl:1341-1347 gd:1 +ttp: b674/782 bl:2.7860 bb:1.0572 rl:2.7349 rb:1.0703 dl:1334-1341 gd:1 +ttp: b673/782 bl:2.8155 bb:1.0573 rl:2.7352 rb:1.0702 dl:1327-1334 gd:1 +ttp: b672/782 bl:2.9050 bb:1.1083 rl:2.7359 rb:1.0704 dl:1321-1327 gd:1 +ttp: b671/782 bl:2.8837 bb:1.1176 rl:2.7366 rb:1.0706 dl:1316-1321 gd:1 +ttp: b670/782 bl:2.8287 bb:1.0577 rl:2.7369 rb:1.0705 dl:1308-1315 gd:1 +ttp: b669/782 bl:2.7813 bb:1.0547 rl:2.7371 rb:1.0704 dl:1301-1308 gd:1 +ttp: b668/782 bl:2.7947 bb:1.0593 rl:2.7374 rb:1.0704 dl:1295-1301 gd:1 +ttp: b667/782 bl:2.8213 bb:1.1052 rl:2.7377 rb:1.0705 dl:1288-1295 gd:1 +ttp: b666/782 bl:2.8212 bb:1.0603 rl:2.7380 rb:1.0705 dl:1282-1288 gd:1 +ttp: b665/782 bl:2.7386 bb:1.0321 rl:2.7380 rb:1.0703 dl:1275-1282 gd:1 +ttp: b664/782 bl:2.7021 bb:1.0418 rl:2.7379 rb:1.0702 dl:1270-1275 gd:1 +ttp: b663/782 bl:2.7960 bb:1.0612 rl:2.7381 rb:1.0702 dl:1264-1269 gd:1 +ttp: b662/782 bl:2.8091 bb:1.0719 rl:2.7384 rb:1.0702 dl:1258-1263 gd:1 +ttp: b661/782 bl:2.7170 bb:1.0187 rl:2.7383 rb:1.0700 dl:1251-1258 gd:1 +ttp: b660/782 bl:2.8545 bb:1.0922 rl:2.7388 rb:1.0701 dl:1245-1250 gd:1 +ttp: b659/782 bl:2.7138 bb:1.0219 rl:2.7387 rb:1.0699 dl:1239-1245 gd:1 +ttp: b658/782 bl:2.8131 bb:1.0767 rl:2.7389 rb:1.0699 dl:1234-1239 gd:1 +ttp: b657/782 bl:2.7839 bb:1.0454 rl:2.7391 rb:1.0698 dl:1227-1234 gd:1 +ttp: b656/782 bl:2.7475 bb:1.0372 rl:2.7391 rb:1.0697 dl:1220-1227 gd:1 +ttp: b655/782 bl:2.6847 bb:1.0212 rl:2.7389 rb:1.0695 dl:1215-1220 gd:1 +ttp: b654/782 bl:2.7320 bb:1.0371 rl:2.7389 rb:1.0694 dl:1209-1215 gd:1 +ttp: b653/782 bl:2.7575 bb:1.0345 rl:2.7390 rb:1.0692 dl:1203-1209 gd:1 +ttp: b652/782 bl:2.7978 bb:1.0720 rl:2.7392 rb:1.0693 dl:1198-1203 gd:1 +ttp: b651/782 bl:2.7208 bb:1.0450 rl:2.7391 rb:1.0692 dl:1193-1198 gd:1 +ttp: b650/782 bl:2.7932 bb:1.0755 rl:2.7393 rb:1.0692 dl:1188-1193 gd:1 +ttp: b649/782 bl:2.8111 bb:1.0603 rl:2.7396 rb:1.0692 dl:1183-1188 gd:1 +ttp: b648/782 bl:2.7475 bb:1.0415 rl:2.7396 rb:1.0691 dl:1177-1182 gd:1 +ttp: b647/782 bl:2.7556 bb:1.0490 rl:2.7397 rb:1.0690 dl:1171-1177 gd:1 +ttp: b646/782 bl:2.7671 bb:1.0715 rl:2.7397 rb:1.0690 dl:1166-1171 gd:1 +ttp: b645/782 bl:2.7962 bb:1.0940 rl:2.7399 rb:1.0691 dl:1160-1166 gd:1 +ttp: b644/782 bl:2.7317 bb:1.0307 rl:2.7399 rb:1.0690 dl:1155-1160 gd:1 +ttp: b643/782 bl:2.7931 bb:1.0649 rl:2.7401 rb:1.0689 dl:1150-1155 gd:1 +ttp: b642/782 bl:2.7800 bb:1.0815 rl:2.7402 rb:1.0690 dl:1144-1150 gd:1 +ttp: b641/782 bl:2.7705 bb:1.0431 rl:2.7403 rb:1.0689 dl:1140-1144 gd:1 +ttp: b640/782 bl:2.7870 bb:1.0847 rl:2.7405 rb:1.0689 dl:1134-1140 gd:1 +ttp: b639/782 bl:2.8563 bb:1.0820 rl:2.7408 rb:1.0690 dl:1129-1134 gd:1 +ttp: b638/782 bl:2.8432 bb:1.0487 rl:2.7412 rb:1.0689 dl:1123-1129 gd:1 +ttp: b637/782 bl:2.8049 bb:1.0807 rl:2.7414 rb:1.0690 dl:1120-1123 gd:1 +ttp: b636/782 bl:2.7608 bb:1.0709 rl:2.7414 rb:1.0690 dl:1116-1120 gd:1 +ttp: b635/782 bl:2.7415 bb:1.0612 rl:2.7414 rb:1.0689 dl:1111-1116 gd:1 +ttp: b634/782 bl:2.7011 bb:1.0428 rl:2.7413 rb:1.0689 dl:1105-1111 gd:1 +ttp: b633/782 bl:2.8318 bb:1.1049 rl:2.7416 rb:1.0690 dl:1101-1105 gd:1 +ttp: b632/782 bl:2.7384 bb:1.0286 rl:2.7416 rb:1.0688 dl:1096-1101 gd:1 +ttp: b631/782 bl:2.7654 bb:1.0618 rl:2.7417 rb:1.0688 dl:1092-1096 gd:1 +ttp: b630/782 bl:2.8337 bb:1.0612 rl:2.7419 rb:1.0688 dl:1087-1092 gd:1 +ttp: b629/782 bl:2.7274 bb:1.0449 rl:2.7419 rb:1.0687 dl:1082-1086 gd:1 +ttp: b628/782 bl:2.7725 bb:1.0485 rl:2.7420 rb:1.0687 dl:1078-1082 gd:1 +ttp: b627/782 bl:2.7339 bb:1.0349 rl:2.7420 rb:1.0686 dl:1073-1077 gd:1 +ttp: b626/782 bl:2.8169 bb:1.0467 rl:2.7422 rb:1.0685 dl:1068-1073 gd:1 +ttp: b625/782 bl:2.6693 bb:1.0029 rl:2.7420 rb:1.0683 dl:1064-1068 gd:1 +ttp: b624/782 bl:2.7932 bb:1.0747 rl:2.7421 rb:1.0683 dl:1060-1064 gd:1 +ttp: b623/782 bl:2.7915 bb:1.0744 rl:2.7422 rb:1.0683 dl:1055-1060 gd:1 +ttp: b622/782 bl:2.8466 bb:1.0772 rl:2.7425 rb:1.0684 dl:1050-1055 gd:1 +ttp: b621/782 bl:2.8434 bb:1.0890 rl:2.7428 rb:1.0684 dl:1046-1050 gd:1 +ttp: b620/782 bl:2.7816 bb:1.0424 rl:2.7429 rb:1.0683 dl:1041-1046 gd:1 +ttp: b619/782 bl:2.7956 bb:1.0591 rl:2.7431 rb:1.0683 dl:1037-1041 gd:1 +ttp: b618/782 bl:2.7361 bb:1.0489 rl:2.7431 rb:1.0683 dl:1031-1037 gd:1 +ttp: b617/782 bl:2.7420 bb:1.0378 rl:2.7431 rb:1.0682 dl:1027-1031 gd:1 +ttp: b616/782 bl:2.8536 bb:1.0882 rl:2.7434 rb:1.0682 dl:1024-1027 gd:1 +ttp: b615/782 bl:2.8396 bb:1.0662 rl:2.7436 rb:1.0682 dl:1020-1023 gd:1 +ttp: b614/782 bl:2.7890 bb:1.0667 rl:2.7437 rb:1.0682 dl:1016-1020 gd:1 +ttp: b613/782 bl:2.8245 bb:1.0631 rl:2.7440 rb:1.0682 dl:1012-1016 gd:1 +ttp: b612/782 bl:2.8246 bb:1.0433 rl:2.7442 rb:1.0681 dl:1007-1012 gd:1 +ttp: b611/782 bl:2.7651 bb:1.0704 rl:2.7442 rb:1.0681 dl:1004-1007 gd:1 +ttp: b610/782 bl:2.8330 bb:1.0636 rl:2.7445 rb:1.0681 dl:999-1004 gd:1 +ttp: b609/782 bl:2.7929 bb:1.0602 rl:2.7446 rb:1.0681 dl:994-999 gd:1 +ttp: b608/782 bl:2.7369 bb:1.0329 rl:2.7446 rb:1.0680 dl:990-994 gd:1 +ttp: b607/782 bl:2.6934 bb:1.0381 rl:2.7444 rb:1.0679 dl:986-990 gd:1 +ttp: b606/782 bl:2.8169 bb:1.0838 rl:2.7446 rb:1.0680 dl:982-986 gd:1 +ttp: b605/782 bl:2.7417 bb:1.0576 rl:2.7446 rb:1.0679 dl:978-982 gd:1 +ttp: b604/782 bl:2.7269 bb:1.0367 rl:2.7446 rb:1.0679 dl:974-978 gd:1 +ttp: b603/782 bl:2.8344 bb:1.0857 rl:2.7448 rb:1.0679 dl:971-974 gd:1 +ttp: b602/782 bl:2.7802 bb:1.0399 rl:2.7449 rb:1.0678 dl:966-971 gd:1 +ttp: b601/782 bl:2.7694 bb:1.0643 rl:2.7449 rb:1.0678 dl:963-966 gd:1 +ttp: b600/782 bl:2.7950 bb:1.0610 rl:2.7451 rb:1.0678 dl:958-963 gd:1 +ttp: b599/782 bl:2.7380 bb:1.0516 rl:2.7451 rb:1.0678 dl:954-958 gd:1 +ttp: b598/782 bl:2.8097 bb:1.0702 rl:2.7452 rb:1.0678 dl:950-954 gd:1 +ttp: b597/782 bl:2.7744 bb:1.0417 rl:2.7453 rb:1.0677 dl:947-950 gd:1 +ttp: b596/782 bl:2.7756 bb:1.0630 rl:2.7454 rb:1.0677 dl:943-947 gd:1 +ttp: b595/782 bl:2.7336 bb:1.0569 rl:2.7453 rb:1.0677 dl:940-943 gd:1 +ttp: b594/782 bl:2.9069 bb:1.1040 rl:2.7457 rb:1.0678 dl:937-940 gd:1 +ttp: b593/782 bl:2.7991 bb:1.0468 rl:2.7458 rb:1.0677 dl:933-937 gd:1 +ttp: b592/782 bl:2.7892 bb:1.0501 rl:2.7459 rb:1.0677 dl:930-933 gd:1 +ttp: b591/782 bl:2.6679 bb:1.0081 rl:2.7457 rb:1.0675 dl:927-930 gd:1 +ttp: b590/782 bl:2.7337 bb:1.0288 rl:2.7457 rb:1.0674 dl:924-927 gd:1 +ttp: b589/782 bl:2.7546 bb:1.0545 rl:2.7457 rb:1.0674 dl:921-924 gd:1 +ttp: b588/782 bl:2.7452 bb:1.0473 rl:2.7457 rb:1.0674 dl:917-921 gd:1 +ttp: b587/782 bl:2.7796 bb:1.0614 rl:2.7458 rb:1.0673 dl:914-917 gd:1 +ttp: b586/782 bl:2.7258 bb:1.0142 rl:2.7458 rb:1.0672 dl:911-914 gd:1 +ttp: b585/782 bl:2.7670 bb:1.0669 rl:2.7458 rb:1.0672 dl:908-911 gd:1 +ttp: b584/782 bl:2.7676 bb:1.0398 rl:2.7459 rb:1.0672 dl:904-907 gd:1 +ttp: b583/782 bl:2.7997 bb:1.0921 rl:2.7460 rb:1.0672 dl:901-904 gd:1 +ttp: b582/782 bl:2.8633 bb:1.0922 rl:2.7462 rb:1.0673 dl:897-901 gd:1 +ttp: b581/782 bl:2.7249 bb:1.0165 rl:2.7462 rb:1.0672 dl:894-897 gd:1 +ttp: b580/782 bl:2.7276 bb:1.0363 rl:2.7462 rb:1.0671 dl:891-894 gd:1 +ttp: b579/782 bl:2.6359 bb:1.0046 rl:2.7459 rb:1.0669 dl:888-891 gd:1 +ttp: b578/782 bl:2.8079 bb:1.0698 rl:2.7461 rb:1.0670 dl:884-887 gd:1 +ttp: b577/782 bl:2.7535 bb:1.0414 rl:2.7461 rb:1.0669 dl:880-884 gd:1 +ttp: b576/782 bl:2.7781 bb:1.0463 rl:2.7461 rb:1.0669 dl:877-880 gd:1 +ttp: b575/782 bl:2.7966 bb:1.0530 rl:2.7462 rb:1.0668 dl:874-877 gd:1 +ttp: b574/782 bl:2.7853 bb:1.0404 rl:2.7463 rb:1.0668 dl:871-874 gd:1 +ttp: b573/782 bl:2.9313 bb:1.0728 rl:2.7467 rb:1.0668 dl:868-871 gd:1 +ttp: b572/782 bl:2.9429 bb:1.1200 rl:2.7471 rb:1.0669 dl:865-868 gd:1 +ttp: b571/782 bl:2.7069 bb:1.0326 rl:2.7470 rb:1.0668 dl:862-865 gd:1 +ttp: b570/782 bl:2.7725 bb:1.0790 rl:2.7471 rb:1.0668 dl:858-862 gd:1 +ttp: b569/782 bl:2.7615 bb:1.0551 rl:2.7471 rb:1.0668 dl:855-858 gd:1 +ttp: b568/782 bl:2.7994 bb:1.0560 rl:2.7472 rb:1.0668 dl:852-855 gd:1 +ttp: b567/782 bl:2.6720 bb:1.0292 rl:2.7471 rb:1.0667 dl:849-852 gd:1 +ttp: b566/782 bl:2.7184 bb:1.0331 rl:2.7470 rb:1.0667 dl:846-849 gd:1 +ttp: b565/782 bl:2.7733 bb:1.0627 rl:2.7471 rb:1.0666 dl:843-846 gd:1 +ttp: b564/782 bl:2.8633 bb:1.1078 rl:2.7473 rb:1.0667 dl:840-843 gd:1 +ttp: b563/782 bl:2.8027 bb:1.0632 rl:2.7474 rb:1.0667 dl:837-840 gd:1 +ttp: b562/782 bl:2.7135 bb:1.0256 rl:2.7473 rb:1.0666 dl:834-837 gd:1 +ttp: b561/782 bl:2.7128 bb:1.0639 rl:2.7473 rb:1.0666 dl:831-834 gd:1 +ttp: b560/782 bl:2.8166 bb:1.0908 rl:2.7474 rb:1.0667 dl:828-831 gd:1 +ttp: b559/782 bl:2.7539 bb:1.0466 rl:2.7474 rb:1.0666 dl:824-827 gd:1 +ttp: b558/782 bl:2.7004 bb:1.0228 rl:2.7473 rb:1.0666 dl:821-824 gd:1 +ttp: b557/782 bl:2.8017 bb:1.0446 rl:2.7474 rb:1.0665 dl:818-821 gd:1 +ttp: b556/782 bl:2.8385 bb:1.0852 rl:2.7476 rb:1.0665 dl:815-818 gd:1 +ttp: b555/782 bl:2.7607 bb:1.0536 rl:2.7476 rb:1.0665 dl:812-815 gd:1 +ttp: b554/782 bl:2.7363 bb:1.0300 rl:2.7476 rb:1.0664 dl:809-812 gd:1 +ttp: b553/782 bl:2.7668 bb:1.0600 rl:2.7476 rb:1.0664 dl:806-809 gd:1 +ttp: b552/782 bl:2.8034 bb:1.0448 rl:2.7477 rb:1.0664 dl:804-806 gd:1 +ttp: b551/782 bl:2.8248 bb:1.0647 rl:2.7479 rb:1.0664 dl:801-804 gd:1 +ttp: b550/782 bl:2.8053 bb:1.0764 rl:2.7480 rb:1.0664 dl:798-801 gd:1 +ttp: b549/782 bl:2.7672 bb:1.0647 rl:2.7480 rb:1.0664 dl:795-798 gd:1 +ttp: b548/782 bl:2.7626 bb:1.0475 rl:2.7481 rb:1.0664 dl:793-795 gd:1 +ttp: b547/782 bl:2.7337 bb:1.0324 rl:2.7480 rb:1.0663 dl:790-793 gd:1 +ttp: b546/782 bl:2.8262 bb:1.0731 rl:2.7482 rb:1.0663 dl:788-790 gd:1 +ttp: b545/782 bl:2.7882 bb:1.0543 rl:2.7482 rb:1.0663 dl:785-788 gd:1 +ttp: b544/782 bl:2.7552 bb:1.0434 rl:2.7483 rb:1.0663 dl:782-785 gd:1 +ttp: b543/782 bl:2.7875 bb:1.0464 rl:2.7483 rb:1.0662 dl:779-782 gd:1 +ttp: b542/782 bl:2.8347 bb:1.0738 rl:2.7485 rb:1.0662 dl:777-779 gd:1 +ttp: b541/782 bl:2.8017 bb:1.0604 rl:2.7486 rb:1.0662 dl:774-776 gd:1 +ttp: b540/782 bl:2.7011 bb:1.0191 rl:2.7485 rb:1.0661 dl:771-774 gd:1 +ttp: b539/782 bl:2.7310 bb:1.0456 rl:2.7485 rb:1.0661 dl:769-771 gd:1 +ttp: b538/782 bl:2.6854 bb:1.0386 rl:2.7483 rb:1.0661 dl:767-769 gd:1 +ttp: b537/782 bl:2.7142 bb:1.0263 rl:2.7483 rb:1.0660 dl:764-767 gd:1 +ttp: b536/782 bl:2.7869 bb:1.0750 rl:2.7484 rb:1.0660 dl:762-764 gd:1 +ttp: b535/782 bl:2.7891 bb:1.0575 rl:2.7484 rb:1.0660 dl:759-762 gd:1 +ttp: b534/782 bl:2.8194 bb:1.0725 rl:2.7485 rb:1.0660 dl:757-759 gd:1 +ttp: b533/782 bl:2.7716 bb:1.0352 rl:2.7486 rb:1.0659 dl:754-757 gd:1 +ttp: b532/782 bl:2.8173 bb:1.0579 rl:2.7487 rb:1.0659 dl:752-754 gd:1 +ttp: b531/782 bl:2.7754 bb:1.0527 rl:2.7487 rb:1.0659 dl:750-752 gd:1 +ttp: b530/782 bl:2.8060 bb:1.0387 rl:2.7488 rb:1.0659 dl:747-750 gd:1 +ttp: b529/782 bl:2.7775 bb:1.0579 rl:2.7489 rb:1.0658 dl:745-747 gd:1 +ttp: b528/782 bl:2.7533 bb:1.0314 rl:2.7489 rb:1.0658 dl:742-745 gd:1 +ttp: b527/782 bl:2.7437 bb:1.0426 rl:2.7489 rb:1.0658 dl:739-742 gd:1 +ttp: b526/782 bl:2.7681 bb:1.0570 rl:2.7489 rb:1.0657 dl:737-739 gd:1 +ttp: b525/782 bl:2.7857 bb:1.0718 rl:2.7490 rb:1.0657 dl:735-737 gd:1 +ttp: b524/782 bl:2.8135 bb:1.0514 rl:2.7491 rb:1.0657 dl:732-735 gd:1 +ttp: b523/782 bl:2.8126 bb:1.0562 rl:2.7492 rb:1.0657 dl:730-732 gd:1 +ttp: b522/782 bl:2.8236 bb:1.0854 rl:2.7493 rb:1.0657 dl:727-730 gd:1 +ttp: b521/782 bl:2.7705 bb:1.0512 rl:2.7493 rb:1.0657 dl:725-727 gd:1 +ttp: b520/782 bl:2.7894 bb:1.0571 rl:2.7494 rb:1.0657 dl:723-725 gd:1 +ttp: b519/782 bl:2.7288 bb:1.0348 rl:2.7494 rb:1.0657 dl:720-723 gd:1 +ttp: b518/782 bl:2.7295 bb:1.0513 rl:2.7493 rb:1.0656 dl:717-720 gd:1 +ttp: b517/782 bl:2.7779 bb:1.0515 rl:2.7494 rb:1.0656 dl:715-717 gd:1 +ttp: b516/782 bl:2.8610 bb:1.0771 rl:2.7495 rb:1.0656 dl:713-715 gd:1 +ttp: b515/782 bl:2.7879 bb:1.0746 rl:2.7496 rb:1.0656 dl:710-713 gd:1 +ttp: b514/782 bl:2.9119 bb:1.0983 rl:2.7499 rb:1.0657 dl:707-710 gd:1 +ttp: b513/782 bl:2.7376 bb:1.0133 rl:2.7498 rb:1.0656 dl:705-707 gd:1 +ttp: b512/782 bl:2.7863 bb:1.0578 rl:2.7499 rb:1.0656 dl:703-705 gd:1 +ttp: b511/782 bl:2.7694 bb:1.0460 rl:2.7499 rb:1.0656 dl:700-703 gd:1 +ttp: b510/782 bl:2.7573 bb:1.0198 rl:2.7499 rb:1.0655 dl:698-700 gd:1 +ttp: b509/782 bl:2.7487 bb:1.0697 rl:2.7499 rb:1.0655 dl:695-698 gd:1 +ttp: b508/782 bl:2.7581 bb:1.0307 rl:2.7499 rb:1.0654 dl:693-695 gd:1 +ttp: b507/782 bl:2.7567 bb:1.0407 rl:2.7500 rb:1.0654 dl:690-693 gd:1 +ttp: b506/782 bl:2.8108 bb:1.0767 rl:2.7500 rb:1.0654 dl:688-690 gd:1 +ttp: b505/782 bl:2.7820 bb:1.0629 rl:2.7501 rb:1.0654 dl:686-688 gd:1 +ttp: b504/782 bl:2.8660 bb:1.0982 rl:2.7503 rb:1.0655 dl:685-686 gd:1 +ttp: b503/782 bl:2.8287 bb:1.0772 rl:2.7504 rb:1.0655 dl:683-685 gd:1 +ttp: b502/782 bl:2.8295 bb:1.0625 rl:2.7505 rb:1.0655 dl:680-682 gd:1 +ttp: b501/782 bl:2.7905 bb:1.0395 rl:2.7505 rb:1.0654 dl:677-680 gd:1 +ttp: b500/782 bl:2.8370 bb:1.0837 rl:2.7507 rb:1.0655 dl:675-677 gd:1 +ttp: b499/782 bl:2.7879 bb:1.0521 rl:2.7507 rb:1.0654 dl:673-675 gd:1 +ttp: b498/782 bl:2.6787 bb:1.0370 rl:2.7506 rb:1.0654 dl:671-673 gd:1 +ttp: b497/782 bl:2.8340 bb:1.0806 rl:2.7507 rb:1.0654 dl:668-671 gd:1 +ttp: b496/782 bl:2.8344 bb:1.0505 rl:2.7509 rb:1.0654 dl:666-668 gd:1 +ttp: b495/782 bl:2.7660 bb:1.0561 rl:2.7509 rb:1.0654 dl:664-666 gd:1 +ttp: b494/782 bl:2.7953 bb:1.0538 rl:2.7509 rb:1.0654 dl:661-664 gd:1 +ttp: b493/782 bl:2.8466 bb:1.1163 rl:2.7511 rb:1.0654 dl:659-661 gd:1 +ttp: b492/782 bl:2.8104 bb:1.0569 rl:2.7512 rb:1.0654 dl:657-659 gd:1 +ttp: b491/782 bl:2.7389 bb:1.0320 rl:2.7511 rb:1.0654 dl:655-657 gd:1 +ttp: b490/782 bl:2.8551 bb:1.0910 rl:2.7513 rb:1.0654 dl:653-655 gd:1 +ttp: b489/782 bl:2.7969 bb:1.0815 rl:2.7513 rb:1.0654 dl:651-653 gd:1 +ttp: b488/782 bl:2.8225 bb:1.0519 rl:2.7514 rb:1.0654 dl:649-651 gd:1 +ttp: b487/782 bl:2.8068 bb:1.0723 rl:2.7515 rb:1.0654 dl:647-649 gd:1 +ttp: b486/782 bl:2.7931 bb:1.0601 rl:2.7516 rb:1.0654 dl:645-646 gd:1 +ttp: b485/782 bl:2.7876 bb:1.0491 rl:2.7516 rb:1.0654 dl:643-645 gd:1 +ttp: b484/782 bl:2.8047 bb:1.0704 rl:2.7517 rb:1.0654 dl:641-643 gd:1 +ttp: b483/782 bl:2.7494 bb:1.0514 rl:2.7517 rb:1.0654 dl:639-641 gd:1 +ttp: b482/782 bl:2.7574 bb:1.0821 rl:2.7517 rb:1.0654 dl:637-639 gd:1 +ttp: b481/782 bl:2.7974 bb:1.0998 rl:2.7517 rb:1.0655 dl:635-637 gd:1 +ttp: b480/782 bl:2.7964 bb:1.0557 rl:2.7518 rb:1.0654 dl:632-635 gd:1 +ttp: b479/782 bl:2.7095 bb:1.0341 rl:2.7518 rb:1.0654 dl:630-632 gd:1 +ttp: b478/782 bl:2.7958 bb:1.0529 rl:2.7518 rb:1.0654 dl:628-630 gd:1 +ttp: b477/782 bl:2.7746 bb:1.0541 rl:2.7518 rb:1.0654 dl:626-628 gd:1 +ttp: b476/782 bl:2.7556 bb:1.0524 rl:2.7518 rb:1.0654 dl:624-626 gd:1 +ttp: b475/782 bl:2.7262 bb:1.0221 rl:2.7518 rb:1.0653 dl:622-623 gd:1 +ttp: b474/782 bl:2.7592 bb:1.0521 rl:2.7518 rb:1.0653 dl:620-622 gd:1 +ttp: b473/782 bl:2.8356 bb:1.0789 rl:2.7519 rb:1.0653 dl:618-620 gd:1 +ttp: b472/782 bl:2.8027 bb:1.0714 rl:2.7520 rb:1.0653 dl:616-618 gd:1 +ttp: b471/782 bl:2.8462 bb:1.0724 rl:2.7521 rb:1.0653 dl:614-616 gd:1 +ttp: b470/782 bl:2.8707 bb:1.0966 rl:2.7523 rb:1.0654 dl:611-613 gd:1 +ttp: b469/782 bl:2.8019 bb:1.1142 rl:2.7523 rb:1.0654 dl:610-611 gd:1 +ttp: b468/782 bl:2.7932 bb:1.0603 rl:2.7524 rb:1.0654 dl:608-610 gd:1 +ttp: b467/782 bl:2.7972 bb:1.0567 rl:2.7524 rb:1.0654 dl:606-608 gd:1 +ttp: b466/782 bl:2.8047 bb:1.0663 rl:2.7525 rb:1.0654 dl:604-606 gd:1 +ttp: b465/782 bl:2.8102 bb:1.0600 rl:2.7526 rb:1.0654 dl:602-604 gd:1 +ttp: b464/782 bl:2.7101 bb:1.0740 rl:2.7525 rb:1.0654 dl:600-602 gd:1 +ttp: b463/782 bl:2.8052 bb:1.0768 rl:2.7526 rb:1.0654 dl:599-600 gd:1 +ttp: b462/782 bl:2.8687 bb:1.0688 rl:2.7527 rb:1.0654 dl:597-599 gd:1 +ttp: b461/782 bl:2.7766 bb:1.0589 rl:2.7527 rb:1.0654 dl:595-597 gd:1 +ttp: b460/782 bl:2.7990 bb:1.0617 rl:2.7528 rb:1.0654 dl:593-595 gd:1 +ttp: b459/782 bl:2.7436 bb:1.0411 rl:2.7528 rb:1.0654 dl:591-593 gd:1 +ttp: b458/782 bl:2.8167 bb:1.0677 rl:2.7529 rb:1.0654 dl:589-591 gd:1 +ttp: b457/782 bl:2.7641 bb:1.0495 rl:2.7529 rb:1.0654 dl:587-589 gd:1 +ttp: b456/782 bl:2.8141 bb:1.0687 rl:2.7529 rb:1.0654 dl:586-587 gd:1 +ttp: b455/782 bl:2.8027 bb:1.0748 rl:2.7530 rb:1.0654 dl:584-586 gd:1 +ttp: b454/782 bl:2.8367 bb:1.0741 rl:2.7531 rb:1.0654 dl:582-584 gd:1 +ttp: b453/782 bl:2.7545 bb:1.0570 rl:2.7531 rb:1.0654 dl:580-582 gd:1 +ttp: b452/782 bl:2.7424 bb:1.0579 rl:2.7531 rb:1.0654 dl:579-580 gd:1 +ttp: b451/782 bl:2.7772 bb:1.0639 rl:2.7531 rb:1.0654 dl:576-579 gd:1 +ttp: b450/782 bl:2.7612 bb:1.0306 rl:2.7531 rb:1.0653 dl:575-576 gd:1 +ttp: b449/782 bl:2.7984 bb:1.0533 rl:2.7532 rb:1.0653 dl:573-575 gd:1 +ttp: b448/782 bl:2.7295 bb:1.0369 rl:2.7531 rb:1.0653 dl:571-573 gd:1 +ttp: b447/782 bl:2.8323 bb:1.0892 rl:2.7532 rb:1.0653 dl:569-571 gd:1 +ttp: b446/782 bl:2.8285 bb:1.0918 rl:2.7533 rb:1.0653 dl:568-569 gd:1 +ttp: b445/782 bl:2.7690 bb:1.0646 rl:2.7533 rb:1.0653 dl:566-568 gd:1 +ttp: b444/782 bl:2.6709 bb:1.0120 rl:2.7532 rb:1.0653 dl:564-566 gd:1 +ttp: b443/782 bl:2.7797 bb:1.0588 rl:2.7533 rb:1.0653 dl:562-564 gd:1 +ttp: b442/782 bl:2.8189 bb:1.0587 rl:2.7533 rb:1.0653 dl:560-562 gd:1 +ttp: b441/782 bl:2.7098 bb:1.0431 rl:2.7533 rb:1.0652 dl:559-560 gd:1 +ttp: b440/782 bl:2.8649 bb:1.0938 rl:2.7534 rb:1.0653 dl:556-559 gd:1 +ttp: b439/782 bl:2.7513 bb:1.0427 rl:2.7534 rb:1.0652 dl:555-556 gd:1 +ttp: b438/782 bl:2.7182 bb:1.0576 rl:2.7534 rb:1.0652 dl:553-555 gd:1 +ttp: b437/782 bl:2.8757 bb:1.0611 rl:2.7535 rb:1.0652 dl:551-553 gd:1 +ttp: b436/782 bl:2.8448 bb:1.0673 rl:2.7536 rb:1.0652 dl:549-551 gd:1 +ttp: b435/782 bl:2.7312 bb:1.0517 rl:2.7536 rb:1.0652 dl:547-549 gd:1 +ttp: b434/782 bl:2.7284 bb:1.0426 rl:2.7536 rb:1.0652 dl:545-547 gd:1 +ttp: b433/782 bl:2.7764 bb:1.0656 rl:2.7536 rb:1.0652 dl:544-545 gd:1 +ttp: b432/782 bl:2.7641 bb:1.0516 rl:2.7536 rb:1.0652 dl:542-544 gd:1 +ttp: b431/782 bl:2.7531 bb:1.0628 rl:2.7536 rb:1.0652 dl:540-542 gd:1 +ttp: b430/782 bl:2.7589 bb:1.0473 rl:2.7536 rb:1.0652 dl:539-540 gd:1 +ttp: b429/782 bl:2.7574 bb:1.0821 rl:2.7536 rb:1.0652 dl:537-539 gd:1 +ttp: b428/782 bl:2.8258 bb:1.0691 rl:2.7537 rb:1.0652 dl:535-537 gd:1 +ttp: b427/782 bl:2.7561 bb:1.0649 rl:2.7537 rb:1.0652 dl:533-535 gd:1 +ttp: b426/782 bl:2.7284 bb:1.0677 rl:2.7537 rb:1.0652 dl:532-533 gd:1 +ttp: b425/782 bl:2.7627 bb:1.0511 rl:2.7537 rb:1.0652 dl:530-532 gd:1 +ttp: b424/782 bl:2.7932 bb:1.0795 rl:2.7537 rb:1.0652 dl:528-530 gd:1 +ttp: b423/782 bl:2.7452 bb:1.0311 rl:2.7537 rb:1.0651 dl:526-528 gd:1 +ttp: b422/782 bl:2.7300 bb:1.0410 rl:2.7537 rb:1.0651 dl:524-526 gd:1 +ttp: b421/782 bl:2.7906 bb:1.0527 rl:2.7537 rb:1.0651 dl:523-524 gd:1 +ttp: b420/782 bl:2.7792 bb:1.0584 rl:2.7537 rb:1.0651 dl:521-522 gd:1 +ttp: b419/782 bl:2.8026 bb:1.0416 rl:2.7538 rb:1.0651 dl:519-521 gd:1 +ttp: b418/782 bl:2.8135 bb:1.0732 rl:2.7538 rb:1.0651 dl:517-519 gd:1 +ttp: b417/782 bl:2.8189 bb:1.0571 rl:2.7539 rb:1.0651 dl:516-517 gd:1 +ttp: b416/782 bl:2.7615 bb:1.0366 rl:2.7539 rb:1.0650 dl:514-516 gd:1 +ttp: b415/782 bl:2.8510 bb:1.0832 rl:2.7540 rb:1.0651 dl:513-514 gd:1 +ttp: b414/782 bl:2.8169 bb:1.0859 rl:2.7541 rb:1.0651 dl:511-513 gd:1 +ttp: b413/782 bl:2.6494 bb:0.9990 rl:2.7540 rb:1.0650 dl:510-511 gd:1 +ttp: b412/782 bl:2.7058 bb:1.0508 rl:2.7539 rb:1.0650 dl:508-510 gd:1 +ttp: b411/782 bl:2.8203 bb:1.0753 rl:2.7540 rb:1.0650 dl:507-508 gd:1 +ttp: b410/782 bl:2.7776 bb:1.0547 rl:2.7540 rb:1.0650 dl:505-507 gd:1 +ttp: b409/782 bl:2.7115 bb:1.0476 rl:2.7540 rb:1.0650 dl:503-505 gd:1 +ttp: b408/782 bl:2.8380 bb:1.0856 rl:2.7541 rb:1.0650 dl:501-503 gd:1 +ttp: b407/782 bl:2.7797 bb:1.0582 rl:2.7541 rb:1.0650 dl:500-501 gd:1 +ttp: b406/782 bl:2.8386 bb:1.1052 rl:2.7542 rb:1.0650 dl:498-500 gd:1 +ttp: b405/782 bl:2.8245 bb:1.0676 rl:2.7542 rb:1.0650 dl:497-498 gd:1 +ttp: b404/782 bl:2.7821 bb:1.0676 rl:2.7542 rb:1.0650 dl:495-497 gd:1 +ttp: b403/782 bl:2.8175 bb:1.0529 rl:2.7543 rb:1.0650 dl:493-495 gd:1 +ttp: b402/782 bl:2.7527 bb:1.0371 rl:2.7543 rb:1.0650 dl:492-493 gd:1 +ttp: b401/782 bl:2.7412 bb:1.0611 rl:2.7543 rb:1.0650 dl:490-492 gd:1 +ttp: b400/782 bl:2.7937 bb:1.0656 rl:2.7543 rb:1.0650 dl:489-490 gd:1 +ttp: b399/782 bl:2.7467 bb:1.0404 rl:2.7543 rb:1.0650 dl:487-489 gd:1 +ttp: b398/782 bl:2.8815 bb:1.0944 rl:2.7544 rb:1.0650 dl:486-487 gd:1 +ttp: b397/782 bl:2.8950 bb:1.0998 rl:2.7546 rb:1.0650 dl:484-486 gd:1 +ttp: b396/782 bl:2.7541 bb:1.0539 rl:2.7546 rb:1.0650 dl:482-484 gd:1 +ttp: b395/782 bl:2.7367 bb:1.0450 rl:2.7546 rb:1.0650 dl:481-482 gd:1 +ttp: b394/782 bl:2.9003 bb:1.1185 rl:2.7547 rb:1.0651 dl:479-481 gd:1 +ttp: b393/782 bl:2.8593 bb:1.0889 rl:2.7548 rb:1.0651 dl:478-479 gd:1 +ttp: b392/782 bl:2.8011 bb:1.0816 rl:2.7548 rb:1.0651 dl:476-478 gd:1 +ttp: b391/782 bl:2.8127 bb:1.0954 rl:2.7549 rb:1.0651 dl:475-476 gd:1 +ttp: b390/782 bl:2.8130 bb:1.0911 rl:2.7549 rb:1.0651 dl:473-475 gd:1 +ttp: b389/782 bl:2.7962 bb:1.0651 rl:2.7550 rb:1.0651 dl:471-473 gd:1 +ttp: b388/782 bl:2.7878 bb:1.0697 rl:2.7550 rb:1.0652 dl:470-471 gd:1 +ttp: b387/782 bl:2.8365 bb:1.0737 rl:2.7551 rb:1.0652 dl:468-470 gd:1 +ttp: b386/782 bl:2.7255 bb:1.0648 rl:2.7550 rb:1.0652 dl:467-468 gd:1 +ttp: b385/782 bl:2.8903 bb:1.1013 rl:2.7551 rb:1.0652 dl:466-467 gd:1 +ttp: b384/782 bl:2.8462 bb:1.0919 rl:2.7552 rb:1.0652 dl:464-466 gd:1 +ttp: b383/782 bl:2.8359 bb:1.0861 rl:2.7553 rb:1.0652 dl:463-464 gd:1 +ttp: b382/782 bl:2.9137 bb:1.1342 rl:2.7554 rb:1.0653 dl:461-463 gd:1 +ttp: b381/782 bl:2.9063 bb:1.0913 rl:2.7556 rb:1.0653 dl:460-461 gd:1 +ttp: b380/782 bl:2.8481 bb:1.0784 rl:2.7556 rb:1.0653 dl:459-460 gd:1 +ttp: b379/782 bl:2.7661 bb:1.0592 rl:2.7556 rb:1.0653 dl:457-459 gd:1 +ttp: b378/782 bl:2.8235 bb:1.0986 rl:2.7557 rb:1.0653 dl:456-457 gd:1 +ttp: b377/782 bl:2.8018 bb:1.0864 rl:2.7557 rb:1.0654 dl:454-455 gd:1 +ttp: b376/782 bl:2.7196 bb:1.0444 rl:2.7557 rb:1.0653 dl:453-454 gd:1 +ttp: b375/782 bl:2.8128 bb:1.1084 rl:2.7558 rb:1.0654 dl:452-453 gd:1 +ttp: b374/782 bl:2.7493 bb:1.0683 rl:2.7558 rb:1.0654 dl:450-452 gd:1 +ttp: b373/782 bl:2.7602 bb:1.0774 rl:2.7558 rb:1.0654 dl:449-450 gd:1 +ttp: b372/782 bl:2.8401 bb:1.0707 rl:2.7558 rb:1.0654 dl:447-449 gd:1 +ttp: b371/782 bl:2.8032 bb:1.0714 rl:2.7559 rb:1.0654 dl:446-447 gd:1 +ttp: b370/782 bl:2.6802 bb:1.0428 rl:2.7558 rb:1.0654 dl:444-446 gd:1 +ttp: b369/782 bl:2.9291 bb:1.0873 rl:2.7559 rb:1.0654 dl:443-444 gd:1 +ttp: b368/782 bl:2.8532 bb:1.0886 rl:2.7560 rb:1.0654 dl:441-443 gd:1 +ttp: b367/782 bl:2.8334 bb:1.0642 rl:2.7561 rb:1.0654 dl:440-441 gd:1 +ttp: b366/782 bl:2.8812 bb:1.1279 rl:2.7562 rb:1.0655 dl:439-440 gd:1 +ttp: b365/782 bl:2.7748 bb:1.0814 rl:2.7562 rb:1.0655 dl:437-439 gd:1 +ttp: b364/782 bl:2.7493 bb:1.0724 rl:2.7562 rb:1.0655 dl:436-437 gd:1 +ttp: b363/782 bl:2.7435 bb:1.0940 rl:2.7562 rb:1.0655 dl:434-436 gd:1 +ttp: b362/782 bl:2.8170 bb:1.0651 rl:2.7562 rb:1.0655 dl:433-434 gd:1 +ttp: b361/782 bl:2.8189 bb:1.0778 rl:2.7563 rb:1.0655 dl:432-433 gd:1 +ttp: b360/782 bl:2.8415 bb:1.0836 rl:2.7564 rb:1.0655 dl:430-432 gd:1 +ttp: b359/782 bl:2.7988 bb:1.0817 rl:2.7564 rb:1.0655 dl:429-430 gd:1 +ttp: b358/782 bl:2.8236 bb:1.0914 rl:2.7564 rb:1.0656 dl:427-429 gd:1 +ttp: b357/782 bl:2.8592 bb:1.0819 rl:2.7565 rb:1.0656 dl:426-427 gd:1 +ttp: b356/782 bl:2.6941 bb:1.0468 rl:2.7565 rb:1.0656 dl:424-426 gd:1 +ttp: b355/782 bl:2.7070 bb:1.0666 rl:2.7564 rb:1.0656 dl:423-424 gd:1 +ttp: b354/782 bl:2.7894 bb:1.0822 rl:2.7565 rb:1.0656 dl:422-423 gd:1 +ttp: b353/782 bl:2.8036 bb:1.0986 rl:2.7565 rb:1.0656 dl:420-422 gd:1 +ttp: b352/782 bl:2.7581 bb:1.0964 rl:2.7565 rb:1.0656 dl:419-420 gd:1 +ttp: b351/782 bl:2.8423 bb:1.0939 rl:2.7566 rb:1.0657 dl:418-419 gd:1 +ttp: b350/782 bl:2.7393 bb:1.0625 rl:2.7565 rb:1.0656 dl:417-418 gd:1 +ttp: b349/782 bl:2.9120 bb:1.1065 rl:2.7567 rb:1.0657 dl:415-417 gd:1 +ttp: b348/782 bl:2.8075 bb:1.0669 rl:2.7567 rb:1.0657 dl:414-415 gd:1 +ttp: b347/782 bl:2.8668 bb:1.0927 rl:2.7568 rb:1.0657 dl:413-414 gd:1 +ttp: b346/782 bl:2.8509 bb:1.0880 rl:2.7569 rb:1.0657 dl:412-413 gd:1 +ttp: b345/782 bl:2.8698 bb:1.1129 rl:2.7569 rb:1.0658 dl:410-412 gd:1 +ttp: b344/782 bl:2.8923 bb:1.1087 rl:2.7570 rb:1.0658 dl:408-410 gd:1 +ttp: b343/782 bl:2.8055 bb:1.0705 rl:2.7571 rb:1.0658 dl:407-408 gd:1 +ttp: b342/782 bl:2.8708 bb:1.1045 rl:2.7572 rb:1.0658 dl:406-407 gd:1 +ttp: b341/782 bl:2.8760 bb:1.1010 rl:2.7572 rb:1.0658 dl:404-406 gd:1 +ttp: b340/782 bl:2.8221 bb:1.0917 rl:2.7573 rb:1.0659 dl:403-404 gd:1 +ttp: b339/782 bl:2.8286 bb:1.0741 rl:2.7573 rb:1.0659 dl:402-403 gd:1 +ttp: b338/782 bl:2.8455 bb:1.1099 rl:2.7574 rb:1.0659 dl:400-402 gd:1 +ttp: b337/782 bl:2.8321 bb:1.0783 rl:2.7575 rb:1.0659 dl:399-400 gd:1 +ttp: b336/782 bl:2.9482 bb:1.1651 rl:2.7576 rb:1.0660 dl:398-399 gd:1 +ttp: b335/782 bl:2.7182 bb:1.0894 rl:2.7576 rb:1.0660 dl:396-398 gd:1 +ttp: b334/782 bl:2.8656 bb:1.1027 rl:2.7576 rb:1.0660 dl:395-396 gd:1 +ttp: b333/782 bl:2.9036 bb:1.1308 rl:2.7577 rb:1.0661 dl:394-395 gd:1 +ttp: b332/782 bl:2.8231 bb:1.0962 rl:2.7578 rb:1.0661 dl:393-394 gd:1 +ttp: b331/782 bl:2.7840 bb:1.0687 rl:2.7578 rb:1.0661 dl:392-393 gd:1 +ttp: b330/782 bl:2.8717 bb:1.0948 rl:2.7579 rb:1.0661 dl:390-392 gd:1 +ttp: b329/782 bl:2.8375 bb:1.1068 rl:2.7579 rb:1.0661 dl:389-390 gd:1 +ttp: b328/782 bl:2.7918 bb:1.0825 rl:2.7580 rb:1.0661 dl:388-389 gd:1 +ttp: b327/782 bl:2.7810 bb:1.0796 rl:2.7580 rb:1.0662 dl:387-388 gd:1 +ttp: b326/782 bl:2.8553 bb:1.1285 rl:2.7580 rb:1.0662 dl:385-387 gd:1 +ttp: b325/782 bl:2.8557 bb:1.0970 rl:2.7581 rb:1.0662 dl:384-385 gd:1 +ttp: b324/782 bl:2.7717 bb:1.0572 rl:2.7581 rb:1.0662 dl:382-384 gd:1 +ttp: b323/782 bl:2.8160 bb:1.0474 rl:2.7582 rb:1.0662 dl:381-382 gd:1 +ttp: b322/782 bl:2.7625 bb:1.0795 rl:2.7582 rb:1.0662 dl:380-381 gd:1 +ttp: b321/782 bl:2.8088 bb:1.1037 rl:2.7582 rb:1.0662 dl:378-380 gd:1 +ttp: b320/782 bl:2.7591 bb:1.0763 rl:2.7582 rb:1.0662 dl:377-378 gd:1 +ttp: b319/782 bl:2.8351 bb:1.1122 rl:2.7582 rb:1.0663 dl:376-377 gd:1 +ttp: b318/782 bl:2.8085 bb:1.0653 rl:2.7583 rb:1.0663 dl:374-376 gd:1 +ttp: b317/782 bl:2.8781 bb:1.1126 rl:2.7584 rb:1.0663 dl:373-374 gd:1 +ttp: b316/782 bl:2.7825 bb:1.0942 rl:2.7584 rb:1.0663 dl:371-373 gd:1 +ttp: b315/782 bl:2.7143 bb:1.0673 rl:2.7583 rb:1.0663 dl:370-371 gd:1 +ttp: b314/782 bl:2.8063 bb:1.0666 rl:2.7584 rb:1.0663 dl:369-370 gd:1 +ttp: b313/782 bl:2.8281 bb:1.0892 rl:2.7584 rb:1.0663 dl:368-369 gd:1 +ttp: b312/782 bl:2.7386 bb:1.0691 rl:2.7584 rb:1.0663 dl:367-368 gd:1 +ttp: b311/782 bl:2.8560 bb:1.0941 rl:2.7585 rb:1.0664 dl:365-367 gd:1 +ttp: b310/782 bl:2.7975 bb:1.0838 rl:2.7585 rb:1.0664 dl:364-365 gd:1 +ttp: b309/782 bl:2.8315 bb:1.1047 rl:2.7585 rb:1.0664 dl:363-364 gd:1 +ttp: b308/782 bl:2.7995 bb:1.0876 rl:2.7586 rb:1.0664 dl:362-363 gd:1 +ttp: b307/782 bl:2.9011 bb:1.1090 rl:2.7587 rb:1.0664 dl:361-362 gd:1 +ttp: b306/782 bl:2.8806 bb:1.1397 rl:2.7587 rb:1.0665 dl:359-361 gd:1 +ttp: b305/782 bl:2.8607 bb:1.0853 rl:2.7588 rb:1.0665 dl:358-359 gd:1 +ttp: b304/782 bl:2.9040 bb:1.1310 rl:2.7589 rb:1.0665 dl:357-358 gd:1 +ttp: b303/782 bl:2.8113 bb:1.0891 rl:2.7589 rb:1.0665 dl:355-357 gd:1 +ttp: b302/782 bl:2.8363 bb:1.1000 rl:2.7590 rb:1.0666 dl:354-355 gd:1 +ttp: b301/782 bl:2.7947 bb:1.0868 rl:2.7590 rb:1.0666 dl:353-354 gd:1 +ttp: b300/782 bl:2.8600 bb:1.0901 rl:2.7591 rb:1.0666 dl:352-353 gd:1 +ttp: b299/782 bl:2.9037 bb:1.1260 rl:2.7591 rb:1.0666 dl:351-352 gd:1 +ttp: b298/782 bl:2.8486 bb:1.1026 rl:2.7592 rb:1.0666 dl:349-351 gd:1 +ttp: b297/782 bl:2.7997 bb:1.0607 rl:2.7592 rb:1.0666 dl:348-349 gd:1 +ttp: b296/782 bl:2.8090 bb:1.0863 rl:2.7593 rb:1.0667 dl:347-348 gd:1 +ttp: b295/782 bl:2.8419 bb:1.1205 rl:2.7593 rb:1.0667 dl:345-347 gd:1 +ttp: b293/782 bl:2.7665 bb:1.0688 rl:2.7593 rb:1.0667 dl:343-345 gd:1 +ttp: b294/782 bl:2.8478 bb:1.1013 rl:2.7594 rb:1.0667 dl:345-345 gd:1 +ttp: b292/782 bl:2.7938 bb:1.0825 rl:2.7594 rb:1.0667 dl:342-343 gd:1 +ttp: b291/782 bl:2.9563 bb:1.1161 rl:2.7595 rb:1.0667 dl:341-342 gd:1 +ttp: b290/782 bl:2.8713 bb:1.0878 rl:2.7596 rb:1.0668 dl:340-341 gd:1 +ttp: b289/782 bl:2.8385 bb:1.1237 rl:2.7596 rb:1.0668 dl:339-340 gd:1 +ttp: b288/782 bl:2.8165 bb:1.1058 rl:2.7596 rb:1.0668 dl:337-339 gd:1 +ttp: b287/782 bl:2.8598 bb:1.1156 rl:2.7597 rb:1.0668 dl:336-337 gd:1 +ttp: b286/782 bl:2.8983 bb:1.1010 rl:2.7598 rb:1.0669 dl:335-336 gd:1 +ttp: b285/782 bl:2.8835 bb:1.1285 rl:2.7599 rb:1.0669 dl:334-335 gd:1 +ttp: b284/782 bl:2.8811 bb:1.0861 rl:2.7599 rb:1.0669 dl:333-334 gd:1 +ttp: b283/782 bl:2.7992 bb:1.0738 rl:2.7600 rb:1.0669 dl:332-333 gd:1 +ttp: b282/782 bl:2.8263 bb:1.1255 rl:2.7600 rb:1.0669 dl:331-332 gd:1 +ttp: b281/782 bl:2.9306 bb:1.1555 rl:2.7601 rb:1.0670 dl:329-330 gd:1 +ttp: b279/782 bl:2.8615 bb:1.0937 rl:2.7601 rb:1.0670 dl:327-329 gd:1 +ttp: b280/782 bl:2.8107 bb:1.0908 rl:2.7602 rb:1.0670 dl:329-329 gd:1 +ttp: b278/782 bl:2.8929 bb:1.1407 rl:2.7602 rb:1.0671 dl:326-327 gd:1 +ttp: b277/782 bl:2.8111 bb:1.1072 rl:2.7603 rb:1.0671 dl:325-326 gd:1 +ttp: b276/782 bl:2.8536 bb:1.1060 rl:2.7603 rb:1.0671 dl:324-325 gd:1 +ttp: b275/782 bl:2.7602 bb:1.0675 rl:2.7603 rb:1.0671 dl:323-324 gd:1 +ttp: b274/782 bl:2.8123 bb:1.0918 rl:2.7604 rb:1.0671 dl:322-323 gd:1 +ttp: b273/782 bl:2.7758 bb:1.0632 rl:2.7604 rb:1.0671 dl:321-322 gd:1 +ttp: b272/782 bl:2.8639 bb:1.1110 rl:2.7604 rb:1.0671 dl:320-321 gd:1 +ttp: b271/782 bl:2.7774 bb:1.0704 rl:2.7604 rb:1.0671 dl:319-320 gd:1 +ttp: b270/782 bl:2.7751 bb:1.0891 rl:2.7604 rb:1.0672 dl:318-319 gd:1 +ttp: b269/782 bl:2.9272 bb:1.1283 rl:2.7605 rb:1.0672 dl:316-318 gd:1 +ttp: b268/782 bl:2.8697 bb:1.1033 rl:2.7606 rb:1.0672 dl:315-316 gd:1 +ttp: b267/782 bl:2.8611 bb:1.0971 rl:2.7606 rb:1.0672 dl:314-315 gd:1 +ttp: b266/782 bl:2.8547 bb:1.0978 rl:2.7607 rb:1.0672 dl:313-314 gd:1 +ttp: b265/782 bl:2.8432 bb:1.0944 rl:2.7607 rb:1.0673 dl:312-313 gd:1 +ttp: b264/782 bl:2.8972 bb:1.1467 rl:2.7608 rb:1.0673 dl:311-312 gd:1 +ttp: b263/782 bl:2.8261 bb:1.1007 rl:2.7608 rb:1.0673 dl:310-311 gd:1 +ttp: b262/782 bl:2.8672 bb:1.1196 rl:2.7609 rb:1.0673 dl:309-310 gd:1 +ttp: b261/782 bl:2.8684 bb:1.1220 rl:2.7610 rb:1.0674 dl:308-309 gd:1 +ttp: b260/782 bl:2.8306 bb:1.1041 rl:2.7610 rb:1.0674 dl:306-307 gd:1 +ttp: b259/782 bl:2.8660 bb:1.1429 rl:2.7610 rb:1.0674 dl:305-306 gd:1 +ttp: b258/782 bl:2.9582 bb:1.1664 rl:2.7612 rb:1.0675 dl:304-305 gd:1 +ttp: b257/782 bl:2.9245 bb:1.1135 rl:2.7612 rb:1.0675 dl:302-304 gd:1 +ttp: b256/782 bl:2.8948 bb:1.1347 rl:2.7613 rb:1.0675 dl:301-302 gd:1 +ttp: b255/782 bl:2.8602 bb:1.1287 rl:2.7614 rb:1.0676 dl:300-301 gd:1 +ttp: b254/782 bl:2.9016 bb:1.1429 rl:2.7614 rb:1.0676 dl:299-300 gd:1 +ttp: b253/782 bl:2.7566 bb:1.0825 rl:2.7614 rb:1.0676 dl:298-299 gd:1 +ttp: b252/782 bl:2.8975 bb:1.1290 rl:2.7615 rb:1.0676 dl:297-298 gd:1 +ttp: b251/782 bl:2.8781 bb:1.1101 rl:2.7616 rb:1.0677 dl:296-297 gd:1 +ttp: b250/782 bl:2.8825 bb:1.1454 rl:2.7616 rb:1.0677 dl:295-296 gd:1 +ttp: b249/782 bl:2.8907 bb:1.1514 rl:2.7617 rb:1.0677 dl:294-295 gd:1 +ttp: b248/782 bl:2.8901 bb:1.1029 rl:2.7617 rb:1.0678 dl:293-294 gd:1 +ttp: b247/782 bl:2.7912 bb:1.0785 rl:2.7618 rb:1.0678 dl:292-293 gd:1 +ttp: b246/782 bl:2.9040 bb:1.1375 rl:2.7618 rb:1.0678 dl:291-292 gd:1 +ttp: b245/782 bl:2.8776 bb:1.1050 rl:2.7619 rb:1.0678 dl:290-291 gd:1 +ttp: b244/782 bl:2.9525 bb:1.1580 rl:2.7620 rb:1.0679 dl:289-290 gd:1 +ttp: b243/782 bl:2.8266 bb:1.1024 rl:2.7620 rb:1.0679 dl:288-289 gd:1 +ttp: b242/782 bl:2.9058 bb:1.1110 rl:2.7621 rb:1.0679 dl:287-288 gd:1 +ttp: b241/782 bl:2.9112 bb:1.1277 rl:2.7621 rb:1.0679 dl:286-287 gd:1 +ttp: b240/782 bl:2.9087 bb:1.1544 rl:2.7622 rb:1.0680 dl:285-286 gd:1 +ttp: b239/782 bl:2.8783 bb:1.1288 rl:2.7623 rb:1.0680 dl:284-285 gd:1 +ttp: b238/782 bl:2.8872 bb:1.1453 rl:2.7623 rb:1.0680 dl:283-284 gd:1 +ttp: b237/782 bl:2.9182 bb:1.1473 rl:2.7624 rb:1.0681 dl:282-283 gd:1 +ttp: b236/782 bl:2.8531 bb:1.1089 rl:2.7625 rb:1.0681 dl:281-282 gd:1 +ttp: b235/782 bl:2.9332 bb:1.1150 rl:2.7625 rb:1.0681 dl:280-281 gd:1 +ttp: b234/782 bl:2.9201 bb:1.1576 rl:2.7626 rb:1.0682 dl:279-280 gd:1 +ttp: b233/782 bl:2.8596 bb:1.1235 rl:2.7627 rb:1.0682 dl:278-279 gd:1 +ttp: b232/782 bl:2.9360 bb:1.1356 rl:2.7627 rb:1.0682 dl:277-278 gd:1 +ttp: b231/782 bl:2.8302 bb:1.1038 rl:2.7628 rb:1.0682 dl:276-277 gd:1 +ttp: b230/782 bl:2.9085 bb:1.1130 rl:2.7628 rb:1.0683 dl:275-276 gd:1 +ttp: b229/782 bl:2.8999 bb:1.1407 rl:2.7629 rb:1.0683 dl:274-275 gd:1 +ttp: b228/782 bl:2.8768 bb:1.1384 rl:2.7629 rb:1.0683 dl:273-274 gd:1 +ttp: b227/782 bl:2.8046 bb:1.0877 rl:2.7630 rb:1.0683 dl:272-273 gd:1 +ttp: b226/782 bl:2.9493 bb:1.1474 rl:2.7631 rb:1.0684 dl:271-272 gd:1 +ttp: b225/782 bl:2.8843 bb:1.1234 rl:2.7631 rb:1.0684 dl:270-271 gd:1 +ttp: b224/782 bl:2.8172 bb:1.1066 rl:2.7631 rb:1.0684 dl:269-270 gd:1 +ttp: b223/782 bl:2.8262 bb:1.0881 rl:2.7632 rb:1.0684 dl:268-269 gd:1 +ttp: b222/782 bl:2.8748 bb:1.1169 rl:2.7632 rb:1.0684 dl:267-268 gd:1 +ttp: b221/782 bl:2.8477 bb:1.1429 rl:2.7632 rb:1.0685 dl:266-267 gd:1 +ttp: b220/782 bl:2.8658 bb:1.1090 rl:2.7633 rb:1.0685 dl:265-266 gd:1 +ttp: b219/782 bl:2.9093 bb:1.1351 rl:2.7634 rb:1.0685 dl:264-265 gd:1 +ttp: b218/782 bl:2.7370 bb:1.1006 rl:2.7633 rb:1.0685 dl:263-264 gd:1 +ttp: b217/782 bl:2.8857 bb:1.1303 rl:2.7634 rb:1.0686 dl:262-263 gd:1 +ttp: b216/782 bl:2.9335 bb:1.1163 rl:2.7635 rb:1.0686 dl:261-262 gd:1 +ttp: b215/782 bl:2.8526 bb:1.1446 rl:2.7635 rb:1.0686 dl:260-261 gd:1 +ttp: b214/782 bl:2.9386 bb:1.1305 rl:2.7636 rb:1.0686 dl:259-260 gd:1 +ttp: b213/782 bl:3.0137 bb:1.1759 rl:2.7637 rb:1.0687 dl:258-259 gd:1 +ttp: b212/782 bl:2.9336 bb:1.1482 rl:2.7638 rb:1.0687 dl:257-258 gd:1 +ttp: b211/782 bl:2.8919 bb:1.1521 rl:2.7638 rb:1.0687 dl:256-257 gd:1 +ttp: b210/782 bl:2.8564 bb:1.1241 rl:2.7639 rb:1.0688 dl:255-256 gd:1 +ttp: b209/782 bl:2.9173 bb:1.1549 rl:2.7639 rb:1.0688 dl:254-255 gd:1 +ttp: b207/782 bl:2.8446 bb:1.1190 rl:2.7640 rb:1.0688 dl:253-254 gd:1 +ttp: b208/782 bl:2.8327 bb:1.1185 rl:2.7640 rb:1.0688 dl:254-254 gd:1 +ttp: b206/782 bl:2.8871 bb:1.1175 rl:2.7640 rb:1.0689 dl:252-253 gd:1 +ttp: b205/782 bl:2.8456 bb:1.1103 rl:2.7641 rb:1.0689 dl:251-252 gd:1 +ttp: b204/782 bl:2.9152 bb:1.1340 rl:2.7641 rb:1.0689 dl:250-251 gd:1 +ttp: b203/782 bl:2.7791 bb:1.0917 rl:2.7641 rb:1.0689 dl:249-250 gd:1 +ttp: b202/782 bl:2.8635 bb:1.1319 rl:2.7642 rb:1.0689 dl:248-249 gd:1 +ttp: b201/782 bl:2.8731 bb:1.1199 rl:2.7642 rb:1.0690 dl:247-248 gd:1 +ttp: b199/782 bl:2.9487 bb:1.1301 rl:2.7643 rb:1.0690 dl:246-247 gd:1 +ttp: b200/782 bl:2.8488 bb:1.0949 rl:2.7643 rb:1.0690 dl:247-247 gd:1 +ttp: b198/782 bl:2.9833 bb:1.1538 rl:2.7644 rb:1.0690 dl:245-246 gd:1 +ttp: b197/782 bl:2.8555 bb:1.1261 rl:2.7645 rb:1.0691 dl:244-245 gd:1 +ttp: b196/782 bl:2.9175 bb:1.1690 rl:2.7645 rb:1.0691 dl:243-244 gd:1 +ttp: b195/782 bl:2.8558 bb:1.1178 rl:2.7646 rb:1.0691 dl:242-243 gd:1 +ttp: b194/782 bl:2.9044 bb:1.1023 rl:2.7646 rb:1.0691 dl:241-242 gd:1 +ttp: b193/782 bl:2.8891 bb:1.1640 rl:2.7647 rb:1.0692 dl:240-241 gd:1 +ttp: b192/782 bl:2.9128 bb:1.1482 rl:2.7647 rb:1.0692 dl:239-240 gd:1 +ttp: b191/782 bl:2.9377 bb:1.1472 rl:2.7648 rb:1.0692 dl:238-239 gd:1 +ttp: b190/782 bl:2.8809 bb:1.0952 rl:2.7648 rb:1.0692 dl:237-238 gd:1 +ttp: b188/782 bl:2.9062 bb:1.1513 rl:2.7649 rb:1.0693 dl:236-237 gd:1 +ttp: b189/782 bl:2.9636 bb:1.2028 rl:2.7650 rb:1.0693 dl:237-237 gd:1 +ttp: b187/782 bl:2.9036 bb:1.1196 rl:2.7650 rb:1.0693 dl:235-236 gd:1 +ttp: b186/782 bl:2.9408 bb:1.1750 rl:2.7651 rb:1.0694 dl:234-235 gd:1 +ttp: b185/782 bl:2.8634 bb:1.1240 rl:2.7651 rb:1.0694 dl:233-234 gd:1 +ttp: b184/782 bl:2.9033 bb:1.1528 rl:2.7652 rb:1.0694 dl:232-233 gd:1 +ttp: b183/782 bl:2.8673 bb:1.1445 rl:2.7652 rb:1.0695 dl:231-232 gd:1 +ttp: b182/782 bl:2.8500 bb:1.1337 rl:2.7653 rb:1.0695 dl:230-231 gd:1 +ttp: b180/782 bl:2.9123 bb:1.1357 rl:2.7653 rb:1.0695 dl:229-230 gd:1 +ttp: b181/782 bl:2.8876 bb:1.1603 rl:2.7654 rb:1.0695 dl:230-230 gd:1 +ttp: b179/782 bl:2.9517 bb:1.1530 rl:2.7654 rb:1.0696 dl:228-229 gd:1 +ttp: b178/782 bl:2.8542 bb:1.1383 rl:2.7655 rb:1.0696 dl:227-228 gd:1 +ttp: b177/782 bl:2.9341 bb:1.1513 rl:2.7655 rb:1.0696 dl:226-227 gd:1 +ttp: b176/782 bl:2.8237 bb:1.1078 rl:2.7655 rb:1.0696 dl:225-226 gd:1 +ttp: b175/782 bl:2.8433 bb:1.1146 rl:2.7656 rb:1.0697 dl:225-225 gd:1 +ttp: b173/782 bl:2.9633 bb:1.1520 rl:2.7656 rb:1.0697 dl:223-224 gd:1 +ttp: b174/782 bl:2.9724 bb:1.1540 rl:2.7657 rb:1.0697 dl:224-224 gd:1 +ttp: b172/782 bl:3.0151 bb:1.1858 rl:2.7658 rb:1.0698 dl:222-223 gd:1 +ttp: b171/782 bl:2.8913 bb:1.1120 rl:2.7659 rb:1.0698 dl:221-222 gd:1 +ttp: b170/782 bl:2.9964 bb:1.1717 rl:2.7659 rb:1.0698 dl:220-221 gd:1 +ttp: b169/782 bl:2.9075 bb:1.1617 rl:2.7660 rb:1.0698 dl:219-220 gd:1 +ttp: b168/782 bl:2.9271 bb:1.1471 rl:2.7661 rb:1.0699 dl:218-219 gd:1 +ttp: b166/782 bl:2.9669 bb:1.1438 rl:2.7661 rb:1.0699 dl:217-218 gd:1 +ttp: b167/782 bl:2.9623 bb:1.1841 rl:2.7662 rb:1.0699 dl:218-218 gd:1 +ttp: b165/782 bl:2.9368 bb:1.1621 rl:2.7663 rb:1.0700 dl:216-217 gd:1 +ttp: b164/782 bl:2.9636 bb:1.1461 rl:2.7663 rb:1.0700 dl:215-216 gd:1 +ttp: b163/782 bl:2.8796 bb:1.1302 rl:2.7664 rb:1.0700 dl:214-215 gd:1 +ttp: b162/782 bl:2.9684 bb:1.1518 rl:2.7664 rb:1.0700 dl:213-214 gd:1 +ttp: b161/782 bl:2.9606 bb:1.1781 rl:2.7665 rb:1.0701 dl:212-213 gd:1 +ttp: b159/782 bl:2.9998 bb:1.1818 rl:2.7666 rb:1.0701 dl:211-212 gd:1 +ttp: b160/782 bl:2.8733 bb:1.1293 rl:2.7666 rb:1.0701 dl:212-212 gd:1 +ttp: b158/782 bl:2.8867 bb:1.1427 rl:2.7667 rb:1.0702 dl:210-211 gd:1 +ttp: b157/782 bl:2.8246 bb:1.1133 rl:2.7667 rb:1.0702 dl:209-210 gd:1 +ttp: b156/782 bl:2.8973 bb:1.1110 rl:2.7667 rb:1.0702 dl:208-209 gd:1 +ttp: b155/782 bl:2.8799 bb:1.1319 rl:2.7668 rb:1.0702 dl:207-208 gd:1 +ttp: b153/782 bl:3.0185 bb:1.1644 rl:2.7668 rb:1.0702 dl:206-207 gd:1 +ttp: b154/782 bl:2.9924 bb:1.1583 rl:2.7669 rb:1.0703 dl:207-207 gd:1 +ttp: b152/782 bl:2.8907 bb:1.1278 rl:2.7670 rb:1.0703 dl:205-206 gd:1 +ttp: b151/782 bl:2.7967 bb:1.1022 rl:2.7670 rb:1.0703 dl:204-205 gd:1 +ttp: b149/782 bl:2.9726 bb:1.1719 rl:2.7670 rb:1.0703 dl:203-204 gd:1 +ttp: b150/782 bl:2.9518 bb:1.1603 rl:2.7671 rb:1.0704 dl:204-204 gd:1 +ttp: b148/782 bl:2.9857 bb:1.1605 rl:2.7672 rb:1.0704 dl:202-203 gd:1 +ttp: b147/782 bl:2.9255 bb:1.1574 rl:2.7672 rb:1.0704 dl:201-202 gd:1 +ttp: b146/782 bl:2.9025 bb:1.1522 rl:2.7673 rb:1.0704 dl:200-201 gd:1 +ttp: b144/782 bl:2.8339 bb:1.1272 rl:2.7673 rb:1.0705 dl:199-200 gd:1 +ttp: b145/782 bl:2.8897 bb:1.1338 rl:2.7673 rb:1.0705 dl:200-200 gd:1 +ttp: b143/782 bl:3.0282 bb:1.1995 rl:2.7674 rb:1.0705 dl:198-199 gd:1 +ttp: b142/782 bl:2.9668 bb:1.1631 rl:2.7675 rb:1.0706 dl:197-198 gd:1 +ttp: b141/782 bl:2.9042 bb:1.1450 rl:2.7675 rb:1.0706 dl:196-197 gd:1 +ttp: b140/782 bl:2.9620 bb:1.1695 rl:2.7676 rb:1.0706 dl:195-196 gd:1 +ttp: b138/782 bl:2.9142 bb:1.1600 rl:2.7676 rb:1.0706 dl:194-195 gd:1 +ttp: b139/782 bl:2.9809 bb:1.1537 rl:2.7677 rb:1.0707 dl:195-195 gd:1 +ttp: b137/782 bl:2.9617 bb:1.1935 rl:2.7678 rb:1.0707 dl:193-194 gd:1 +ttp: b136/782 bl:2.9698 bb:1.1848 rl:2.7678 rb:1.0707 dl:192-193 gd:1 +ttp: b135/782 bl:2.9310 bb:1.1419 rl:2.7679 rb:1.0708 dl:191-192 gd:1 +ttp: b134/782 bl:3.0322 bb:1.2127 rl:2.7680 rb:1.0708 dl:190-191 gd:1 +ttp: b133/782 bl:3.0204 bb:1.1931 rl:2.7680 rb:1.0708 dl:189-190 gd:1 +ttp: b131/782 bl:3.0396 bb:1.2081 rl:2.7681 rb:1.0709 dl:188-189 gd:1 +ttp: b132/782 bl:2.9519 bb:1.1360 rl:2.7682 rb:1.0709 dl:189-189 gd:1 +ttp: b130/782 bl:3.1484 bb:1.2376 rl:2.7683 rb:1.0709 dl:187-188 gd:1 +ttp: b128/782 bl:2.8394 bb:1.0901 rl:2.7683 rb:1.0710 dl:186-187 gd:1 +ttp: b129/782 bl:2.9427 bb:1.1813 rl:2.7684 rb:1.0710 dl:187-187 gd:1 +ttp: b127/782 bl:2.9036 bb:1.1478 rl:2.7684 rb:1.0710 dl:185-186 gd:1 +ttp: b125/782 bl:2.9947 bb:1.1868 rl:2.7685 rb:1.0710 dl:184-185 gd:1 +ttp: b126/782 bl:2.9395 bb:1.1944 rl:2.7685 rb:1.0711 dl:185-185 gd:1 +ttp: b124/782 bl:2.8708 bb:1.1483 rl:2.7686 rb:1.0711 dl:183-184 gd:1 +ttp: b123/782 bl:2.9626 bb:1.1836 rl:2.7686 rb:1.0711 dl:182-183 gd:1 +ttp: b122/782 bl:2.8970 bb:1.1591 rl:2.7686 rb:1.0712 dl:181-182 gd:1 +ttp: b120/782 bl:2.9737 bb:1.1685 rl:2.7687 rb:1.0712 dl:180-181 gd:1 +ttp: b121/782 bl:2.8485 bb:1.1285 rl:2.7687 rb:1.0712 dl:181-181 gd:1 +ttp: b119/782 bl:2.8137 bb:1.0896 rl:2.7687 rb:1.0712 dl:179-180 gd:1 +ttp: b118/782 bl:2.9630 bb:1.1576 rl:2.7688 rb:1.0712 dl:178-179 gd:1 +ttp: b116/782 bl:3.0123 bb:1.1912 rl:2.7689 rb:1.0713 dl:177-178 gd:1 +ttp: b117/782 bl:2.8602 bb:1.1464 rl:2.7689 rb:1.0713 dl:178-178 gd:1 +ttp: b115/782 bl:2.8643 bb:1.1558 rl:2.7689 rb:1.0713 dl:176-177 gd:1 +ttp: b113/782 bl:3.0306 bb:1.1916 rl:2.7690 rb:1.0713 dl:175-176 gd:1 +ttp: b114/782 bl:2.9996 bb:1.1875 rl:2.7691 rb:1.0714 dl:176-176 gd:1 +ttp: b112/782 bl:2.9795 bb:1.1524 rl:2.7691 rb:1.0714 dl:174-175 gd:1 +ttp: b111/782 bl:2.9730 bb:1.1862 rl:2.7692 rb:1.0714 dl:173-174 gd:1 +ttp: b109/782 bl:3.0723 bb:1.2107 rl:2.7693 rb:1.0715 dl:172-173 gd:1 +ttp: b110/782 bl:3.0269 bb:1.1752 rl:2.7693 rb:1.0715 dl:173-173 gd:1 +ttp: b108/782 bl:2.8621 bb:1.0992 rl:2.7694 rb:1.0715 dl:171-172 gd:1 +ttp: b106/782 bl:2.9251 bb:1.1815 rl:2.7694 rb:1.0715 dl:170-171 gd:1 +ttp: b107/782 bl:2.9169 bb:1.1450 rl:2.7694 rb:1.0715 dl:171-171 gd:1 +ttp: b105/782 bl:3.0599 bb:1.2257 rl:2.7695 rb:1.0716 dl:169-170 gd:1 +ttp: b104/782 bl:2.9944 bb:1.1652 rl:2.7696 rb:1.0716 dl:168-169 gd:1 +ttp: b102/782 bl:2.7628 bb:1.1125 rl:2.7696 rb:1.0716 dl:167-168 gd:1 +ttp: b103/782 bl:2.8933 bb:1.1198 rl:2.7696 rb:1.0716 dl:168-168 gd:1 +ttp: b101/782 bl:2.9514 bb:1.1584 rl:2.7697 rb:1.0717 dl:166-167 gd:1 +ttp: b100/782 bl:2.9465 bb:1.1566 rl:2.7697 rb:1.0717 dl:165-166 gd:1 +ttp: b99/782 bl:2.9868 bb:1.1876 rl:2.7698 rb:1.0717 dl:164-165 gd:1 +ttp: b97/782 bl:2.9965 bb:1.1706 rl:2.7698 rb:1.0717 dl:163-164 gd:1 +ttp: b98/782 bl:2.9863 bb:1.1853 rl:2.7699 rb:1.0718 dl:164-164 gd:1 +ttp: b96/782 bl:2.9502 bb:1.1531 rl:2.7699 rb:1.0718 dl:162-163 gd:1 +ttp: b95/782 bl:3.0194 bb:1.2296 rl:2.7700 rb:1.0718 dl:161-162 gd:1 +ttp: b94/782 bl:2.9866 bb:1.1778 rl:2.7700 rb:1.0719 dl:160-161 gd:1 +ttp: b92/782 bl:2.9056 bb:1.1752 rl:2.7701 rb:1.0719 dl:159-160 gd:1 +ttp: b93/782 bl:2.9574 bb:1.1863 rl:2.7701 rb:1.0719 dl:160-160 gd:1 +ttp: b91/782 bl:3.0404 bb:1.2169 rl:2.7702 rb:1.0719 dl:158-159 gd:1 +ttp: b89/782 bl:3.0117 bb:1.2011 rl:2.7703 rb:1.0720 dl:157-158 gd:1 +ttp: b90/782 bl:2.9975 bb:1.1823 rl:2.7703 rb:1.0720 dl:158-158 gd:1 +ttp: b88/782 bl:3.1025 bb:1.2080 rl:2.7704 rb:1.0720 dl:156-157 gd:1 +ttp: b87/782 bl:3.0175 bb:1.2061 rl:2.7705 rb:1.0721 dl:155-156 gd:1 +ttp: b86/782 bl:3.0398 bb:1.2652 rl:2.7705 rb:1.0721 dl:154-155 gd:1 +ttp: b84/782 bl:3.0149 bb:1.2147 rl:2.7706 rb:1.0721 dl:153-154 gd:1 +ttp: b85/782 bl:2.9738 bb:1.1940 rl:2.7706 rb:1.0722 dl:154-154 gd:1 +ttp: b83/782 bl:3.0344 bb:1.2126 rl:2.7707 rb:1.0722 dl:152-153 gd:1 +ttp: b82/782 bl:2.9739 bb:1.1971 rl:2.7707 rb:1.0722 dl:151-152 gd:1 +ttp: b80/782 bl:2.9117 bb:1.1927 rl:2.7708 rb:1.0723 dl:150-151 gd:1 +ttp: b81/782 bl:2.9345 bb:1.1670 rl:2.7708 rb:1.0723 dl:151-151 gd:1 +ttp: b79/782 bl:3.0332 bb:1.2042 rl:2.7709 rb:1.0723 dl:149-150 gd:1 +ttp: b78/782 bl:2.9099 bb:1.1290 rl:2.7709 rb:1.0723 dl:148-149 gd:1 +ttp: b76/782 bl:3.0526 bb:1.2247 rl:2.7710 rb:1.0724 dl:147-148 gd:1 +ttp: b77/782 bl:3.0286 bb:1.1702 rl:2.7710 rb:1.0724 dl:148-148 gd:1 +ttp: b75/782 bl:3.0913 bb:1.2137 rl:2.7711 rb:1.0724 dl:146-147 gd:1 +ttp: b74/782 bl:3.1168 bb:1.2740 rl:2.7712 rb:1.0725 dl:145-146 gd:1 +ttp: b73/782 bl:3.0565 bb:1.2091 rl:2.7713 rb:1.0725 dl:144-145 gd:1 +ttp: b71/782 bl:2.9576 bb:1.1539 rl:2.7713 rb:1.0725 dl:143-144 gd:1 +ttp: b72/782 bl:2.9340 bb:1.1923 rl:2.7713 rb:1.0725 dl:144-144 gd:1 +ttp: b70/782 bl:3.0643 bb:1.1644 rl:2.7714 rb:1.0726 dl:142-143 gd:1 +ttp: b68/782 bl:3.1052 bb:1.2064 rl:2.7715 rb:1.0726 dl:141-142 gd:1 +ttp: b69/782 bl:3.1098 bb:1.2338 rl:2.7716 rb:1.0726 dl:142-142 gd:1 +ttp: b67/782 bl:3.0702 bb:1.2410 rl:2.7716 rb:1.0727 dl:140-141 gd:1 +ttp: b66/782 bl:3.1102 bb:1.2740 rl:2.7717 rb:1.0727 dl:139-140 gd:1 +ttp: b64/782 bl:2.9964 bb:1.2420 rl:2.7718 rb:1.0727 dl:138-139 gd:1 +ttp: b65/782 bl:3.0368 bb:1.2193 rl:2.7718 rb:1.0728 dl:139-139 gd:1 +ttp: b63/782 bl:3.0264 bb:1.2206 rl:2.7719 rb:1.0728 dl:137-138 gd:1 +ttp: b62/782 bl:2.9897 bb:1.2093 rl:2.7719 rb:1.0728 dl:136-137 gd:1 +ttp: b61/782 bl:2.9348 bb:1.1474 rl:2.7719 rb:1.0728 dl:135-136 gd:1 +ttp: b60/782 bl:3.0733 bb:1.2334 rl:2.7720 rb:1.0729 dl:134-135 gd:1 +ttp: b58/782 bl:2.9820 bb:1.2300 rl:2.7721 rb:1.0729 dl:133-134 gd:1 +ttp: b59/782 bl:3.0566 bb:1.1939 rl:2.7721 rb:1.0729 dl:134-134 gd:1 +ttp: b57/782 bl:3.0414 bb:1.2260 rl:2.7722 rb:1.0730 dl:132-133 gd:1 +ttp: b56/782 bl:3.0594 bb:1.2058 rl:2.7722 rb:1.0730 dl:131-132 gd:1 +ttp: b55/782 bl:3.0800 bb:1.2370 rl:2.7723 rb:1.0730 dl:130-131 gd:1 +ttp: b53/782 bl:3.1230 bb:1.2312 rl:2.7724 rb:1.0731 dl:129-130 gd:1 +ttp: b54/782 bl:3.1119 bb:1.2743 rl:2.7724 rb:1.0731 dl:130-130 gd:1 +ttp: b52/782 bl:3.0574 bb:1.1963 rl:2.7725 rb:1.0731 dl:128-129 gd:1 +ttp: b51/782 bl:3.0557 bb:1.2214 rl:2.7726 rb:1.0731 dl:127-128 gd:1 +ttp: b50/782 bl:2.9899 bb:1.2273 rl:2.7726 rb:1.0732 dl:126-127 gd:1 +ttp: b48/782 bl:2.9965 bb:1.1715 rl:2.7726 rb:1.0732 dl:125-126 gd:1 +ttp: b49/782 bl:2.9750 bb:1.1737 rl:2.7727 rb:1.0732 dl:126-126 gd:1 +ttp: b47/782 bl:2.9483 bb:1.1782 rl:2.7727 rb:1.0732 dl:124-125 gd:1 +ttp: b46/782 bl:3.1327 bb:1.2250 rl:2.7728 rb:1.0733 dl:123-124 gd:1 +ttp: b45/782 bl:3.0959 bb:1.2385 rl:2.7728 rb:1.0733 dl:122-123 gd:1 +ttp: b43/782 bl:2.9829 bb:1.1862 rl:2.7729 rb:1.0733 dl:121-122 gd:1 +ttp: b44/782 bl:3.1545 bb:1.2292 rl:2.7730 rb:1.0733 dl:122-122 gd:1 +ttp: b42/782 bl:3.1159 bb:1.2474 rl:2.7730 rb:1.0734 dl:120-121 gd:1 +ttp: b41/782 bl:3.1457 bb:1.2856 rl:2.7731 rb:1.0734 dl:119-120 gd:1 +ttp: b39/782 bl:3.1424 bb:1.2418 rl:2.7732 rb:1.0734 dl:118-119 gd:1 +ttp: b40/782 bl:3.0131 bb:1.2119 rl:2.7732 rb:1.0735 dl:119-119 gd:1 +ttp: b38/782 bl:3.0461 bb:1.2158 rl:2.7733 rb:1.0735 dl:117-118 gd:1 +ttp: b37/782 bl:3.0916 bb:1.2137 rl:2.7733 rb:1.0735 dl:116-117 gd:1 +ttp: b36/782 bl:2.9954 bb:1.2250 rl:2.7734 rb:1.0735 dl:115-116 gd:1 +ttp: b34/782 bl:3.0884 bb:1.2503 rl:2.7734 rb:1.0736 dl:114-115 gd:1 +ttp: b35/782 bl:3.0183 bb:1.1986 rl:2.7735 rb:1.0736 dl:115-115 gd:1 +ttp: b33/782 bl:3.0917 bb:1.2103 rl:2.7735 rb:1.0736 dl:113-114 gd:1 +ttp: b32/782 bl:3.0289 bb:1.2108 rl:2.7736 rb:1.0736 dl:112-113 gd:1 +ttp: b31/782 bl:3.1927 bb:1.2652 rl:2.7736 rb:1.0737 dl:111-112 gd:1 +ttp: b30/782 bl:3.1219 bb:1.2506 rl:2.7737 rb:1.0737 dl:110-111 gd:1 +ttp: b29/782 bl:3.0662 bb:1.2496 rl:2.7737 rb:1.0737 dl:109-110 gd:1 +ttp: b28/782 bl:3.0201 bb:1.2161 rl:2.7738 rb:1.0738 dl:108-109 gd:1 +ttp: b27/782 bl:3.1021 bb:1.2384 rl:2.7738 rb:1.0738 dl:107-108 gd:1 +ttp: b25/782 bl:3.3073 bb:1.3107 rl:2.7739 rb:1.0738 dl:106-107 gd:1 +ttp: b26/782 bl:3.0816 bb:1.2564 rl:2.7740 rb:1.0739 dl:107-107 gd:1 +ttp: b24/782 bl:3.0706 bb:1.2148 rl:2.7740 rb:1.0739 dl:105-106 gd:1 +ttp: b23/782 bl:3.1468 bb:1.2542 rl:2.7741 rb:1.0739 dl:104-105 gd:1 +ttp: b22/782 bl:3.1651 bb:1.2338 rl:2.7742 rb:1.0739 dl:103-104 gd:1 +ttp: b21/782 bl:3.2062 bb:1.2467 rl:2.7742 rb:1.0740 dl:102-103 gd:1 +ttp: b20/782 bl:3.1298 bb:1.2658 rl:2.7743 rb:1.0740 dl:101-102 gd:1 +ttp: b19/782 bl:3.1403 bb:1.2265 rl:2.7743 rb:1.0740 dl:100-101 gd:1 +ttp: b18/782 bl:3.1301 bb:1.2679 rl:2.7744 rb:1.0740 dl:99-100 gd:1 +ttp: b17/782 bl:3.1235 bb:1.2381 rl:2.7744 rb:1.0741 dl:98-99 gd:1 +ttp: b16/782 bl:3.0436 bb:1.2135 rl:2.7745 rb:1.0741 dl:97-98 gd:1 +ttp: b15/782 bl:3.2469 bb:1.2422 rl:2.7746 rb:1.0741 dl:95-97 gd:1 +ttp: b14/782 bl:3.1289 bb:1.2306 rl:2.7746 rb:1.0741 dl:94-95 gd:1 +ttp: b13/782 bl:3.1606 bb:1.2721 rl:2.7747 rb:1.0742 dl:93-94 gd:1 +ttp: b12/782 bl:3.1862 bb:1.2419 rl:2.7747 rb:1.0742 dl:92-93 gd:1 +ttp: b11/782 bl:3.2417 bb:1.2669 rl:2.7748 rb:1.0742 dl:90-92 gd:1 +ttp: b10/782 bl:3.1175 bb:1.2319 rl:2.7748 rb:1.0742 dl:89-90 gd:1 +ttp: b9/782 bl:3.2139 bb:1.2736 rl:2.7749 rb:1.0743 dl:87-89 gd:1 +ttp: b8/782 bl:3.2665 bb:1.2627 rl:2.7750 rb:1.0743 dl:86-87 gd:1 +ttp: b7/782 bl:3.2287 bb:1.2388 rl:2.7750 rb:1.0743 dl:84-86 gd:1 +ttp: b6/782 bl:3.2769 bb:1.2785 rl:2.7751 rb:1.0743 dl:82-84 gd:1 +ttp: b5/782 bl:3.3096 bb:1.2909 rl:2.7752 rb:1.0744 dl:80-82 gd:1 +ttp: b4/782 bl:3.2215 bb:1.2352 rl:2.7752 rb:1.0744 dl:78-80 gd:1 +ttp: b3/782 bl:3.3301 bb:1.2630 rl:2.7753 rb:1.0744 dl:75-78 gd:1 +ttp: b2/782 bl:3.1534 bb:1.1696 rl:2.7753 rb:1.0744 dl:70-75 gd:1 +ttp: b1/782 bl:3.3717 bb:1.2513 rl:2.7754 rb:1.0744 dl:45-70 gd:1 +quantized_ttt_phased val_loss:2.77538792 val_bpb:1.07443915 eval_time:3147523ms +total_eval_time:3147.5s diff --git a/train_h200_seed42.log b/train_h200_seed42.log new file mode 100644 index 0000000000..960840cca6 --- /dev/null +++ b/train_h200_seed42.log @@ -0,0 +1,1318 @@ +Hyperparameters: + adam_eps: 1e-08 + adam_wd: 0.02 + artifact_dir: + attn_clip_sigmas: 13.0 + attn_out_gate_enabled: False + attn_out_gate_src: proj + beta1: 0.9 + beta2: 0.95 + bigram_blend_enabled: True + bigram_blend_lambda: 0.03 + caseops_enabled: False + compressor: brotli + data_dir: ./data/ + datasets_dir: ./data/datasets/fineweb10B_sp8192 + distributed: False + ema_decay: 0.9965 + embed_bits: 7 + embed_clip_sigmas: 14.0 + embed_lr: 0.6 + embed_wd: 0.085 + enable_looping_at: 0.35 + entropy_weighted_loss: False + eval_seq_len: 2048 + eval_stride: 64 + ewl_max_weight: 3.0 + ewl_min_weight: 0.3 + fused_ce_enabled: True + gate_window: 12 + gated_attn_enabled: False + gated_attn_init_std: 0.01 + gated_attn_quant_gate: False + global_ttt_batch_seqs: 32 + global_ttt_chunk_tokens: 32768 + global_ttt_epochs: 1 + global_ttt_grad_clip: 1.0 + global_ttt_lr: 0.001 + global_ttt_momentum: 0.9 + global_ttt_respect_doc_boundaries: True + global_ttt_warmup_chunks: 0 + global_ttt_warmup_start_lr: 0.0 + gptq_calibration_batches: 16 + gptq_reserve_seconds: 4.0 + grad_accum_steps: 8 + grad_clip_norm: 0.3 + is_main_process: True + iterations: 5000 + leaky_relu_slope: 0.3 + ln_scale: True + local_rank: 0 + logfile: logs/05cc0d77-a468-429a-8e4a-6d9ba565bfa9.txt + logit_softcap: 30.0 + loop_end: 5 + loop_start: 3 + lqer_asym_enabled: True + lqer_asym_group: 64 + lqer_enabled: True + lqer_factor_bits: 4 + lqer_rank: 4 + lqer_top_k: 3 + matrix_bits: 6 + matrix_clip_sigmas: 12.85 + matrix_lr: 0.026 + max_wallclock_seconds: 0.0 + min_lr: 0.1 + mlp_clip_sigmas: 11.5 + mlp_mult: 4.0 + model_dim: 512 + model_path: final_model.pt + muon_backend_steps: 5 + muon_momentum: 0.97 + muon_momentum_warmup_start: 0.92 + muon_momentum_warmup_steps: 1500 + muon_row_normalize: True + muon_wd: 0.095 + num_heads: 8 + num_kv_heads: 4 + num_layers: 11 + num_loops: 2 + parallel_final_lane: mean + parallel_start_layer: 8 + phased_ttt_num_phases: 1 + phased_ttt_prefix_docs: 2000 + qk_gain_init: 5.25 + quantized_model_path: final_model.int6.ptz + rank: 0 + rope_base: 10000.0 + rope_dims: 16 + rope_train_seq_len: 2048 + rope_yarn: False + run_id: 05cc0d77-a468-429a-8e4a-6d9ba565bfa9 + scalar_lr: 0.02 + seed: 42 + skip_gates_enabled: True + smear_gate_enabled: True + sparse_attn_gate_enabled: True + sparse_attn_gate_init_std: 0.0 + sparse_attn_gate_scale: 1.0 + temp_scale_enabled: False + temp_scale_ent_high: 6.0 + temp_scale_ent_low: 2.0 + temp_scale_high: 1.15 + temp_scale_low: 0.85 + tie_embeddings: True + tied_embed_init_std: 0.005 + tied_embed_lr: 0.03 + tokenizer_path: ./data/tokenizers/fineweb_8192_bpe.model + train_batch_tokens: 786432 + train_files: ./data/datasets/fineweb10B_sp8192/fineweb_train_*.bin + train_log_every: 500 + train_seq_len: 2048 + ttt_batch_size: 64 + ttt_beta1: 0.0 + ttt_beta2: 0.999 + ttt_chunk_size: 48 + ttt_enabled: True + ttt_eval_batches: + ttt_eval_seq_len: 2048 + ttt_grad_steps: 1 + ttt_k_lora: True + ttt_lora_lr: 0.0001 + ttt_lora_rank: 96 + ttt_mlp_lora: True + ttt_o_lora: True + ttt_optimizer: adam + ttt_weight_decay: 1.0 + val_batch_tokens: 524288 + val_bytes_files: ./data/datasets/fineweb10B_sp8192/fineweb_val_bytes_*.bin + val_doc_fraction: 1.0 + val_files: ./data/datasets/fineweb10B_sp8192/fineweb_val_*.bin + val_loss_every: 5000 + vocab_size: 8192 + warmdown_frac: 0.75 + warmup_steps: 20 + world_size: 1 + xsa_last_n: 11 +train_shards: 5 +val_tokens: 40540160 +model_params:35945671 +warmup_cu_buckets:64,128,192,256 iters_each:3 +warmup_step: 1/20 +warmup_step: 2/20 +warmup_step: 3/20 +warmup_step: 4/20 +warmup_step: 5/20 +warmup_step: 6/20 +warmup_step: 10/20 +warmup_step: 20/20 +loop_warmup:enabled encoder:[0, 1, 2, 3, 4, 5, 3, 4] decoder:[5, 3, 4, 5, 6, 7, 8, 9, 10] +loop_warmup_step: 1/20 +loop_warmup_step: 2/20 +loop_warmup_step: 3/20 +loop_warmup_step: 4/20 +loop_warmup_step: 5/20 +loop_warmup_step: 6/20 +loop_warmup_step: 10/20 +loop_warmup_step: 20/20 +bigram_blend:enabled lambda=0.03 +0/5000 val_loss: 7.9756 val_bpb: 3.0875 +1/5000 train_loss: 9.0069 train_time: 0.0m tok/s: 1328571 +2/5000 train_loss: 12.3981 train_time: 0.0m tok/s: 1258766 +3/5000 train_loss: 11.1555 train_time: 0.0m tok/s: 1224762 +4/5000 train_loss: 9.6100 train_time: 0.0m tok/s: 1210484 +5/5000 train_loss: 8.4556 train_time: 0.1m tok/s: 1204652 +500/5000 train_loss: 3.3133 train_time: 5.6m tok/s: 1166605 +1000/5000 train_loss: 3.2142 train_time: 11.2m tok/s: 1165948 +1500/5000 train_loss: 3.2284 train_time: 16.9m tok/s: 1165761 +layer_loop:enabled step:1750 frac:0.350 encoder:[0, 1, 2, 3, 4, 5, 3, 4] decoder:[5, 3, 4, 5, 6, 7, 8, 9, 10] +2000/5000 train_loss: 3.0753 train_time: 23.9m tok/s: 1097010 +2500/5000 train_loss: 3.0206 train_time: 32.3m tok/s: 1013577 +3000/5000 train_loss: 2.9492 train_time: 40.7m tok/s: 964959 +3500/5000 train_loss: 2.8882 train_time: 49.2m tok/s: 932962 +4000/5000 train_loss: 2.7564 train_time: 57.6m tok/s: 909550 +4500/5000 train_loss: 2.7710 train_time: 66.1m tok/s: 892841 +5000/5000 train_loss: 2.8095 train_time: 74.5m tok/s: 879933 +bigram_blend:enabled lambda=0.03 +5000/5000 val_loss: 2.7874 val_bpb: 1.0791 +peak memory allocated: 42423 MiB reserved: 59798 MiB +ema:applying EMA weights +bigram_blend:enabled lambda=0.03 +diagnostic pre-quantization post-ema val_loss:2.75201371 val_bpb:1.06536170 eval_time:30271ms +Serialized model: 135417533 bytes +Code size (uncompressed): 164068 bytes +Code size (compressed): 32949 bytes +GPTQ:collecting Hessians from calibration data... +GPTQ:collected 67 Hessians in 5.0s +Quantized weights: + gptq (int6): blocks.attn.c_k.weight, blocks.attn.c_q.weight, blocks.attn.c_v.weight, blocks.attn.proj.weight, blocks.mlp.fc.weight, blocks.mlp.proj.weight + gptq (int6)+lqer_asym: blocks.mlp.fc.weight + gptq (int7)+lqer_asym: tok_emb.weight + passthrough (float16): blocks.attn.attn_gate_w, blocks.attn.q_gain, blocks.attn_scale, blocks.mlp_scale, blocks.resid_mix, parallel_post_lambdas, parallel_resid_lambdas, skip_gates, skip_weights, smear_gate.weight, smear_lambda +Serialized model quantized+brotli: 16118149 bytes +Total submission size quantized+brotli: 16151098 bytes +bigram_blend:enabled lambda=0.03 +diagnostic quantized val_loss:2.77695592 val_bpb:1.07501735 eval_time:35580ms +ttt_lora:warming up compile (random tokens, no val data) +ttt_lora:compile warmup done (5.9s) + +beginning TTT eval timer +ttt_phased: total_docs:50000 prefix_docs:2000 suffix_docs:48000 num_phases:1 boundaries:[2000] +ttp: b782/782 bl:2.5601 bb:1.0336 rl:2.5601 rb:1.0336 dl:26524-79464 gd:0 +ttp: b781/782 bl:2.5664 bb:1.0595 rl:2.5641 rb:1.0500 dl:14510-25988 gd:0 +ttp: b780/782 bl:2.6243 bb:1.0767 rl:2.5821 rb:1.0580 dl:11071-14414 gd:0 +ttp: b779/782 bl:2.6510 bb:1.0798 rl:2.5953 rb:1.0622 dl:9037-11049 gd:0 +ttp: b778/782 bl:2.7933 bb:1.1175 rl:2.6230 rb:1.0701 dl:7961-8997 gd:0 +ttp: b777/782 bl:2.7329 bb:1.0931 rl:2.6352 rb:1.0727 dl:7190-7938 gd:0 +ttp: b776/782 bl:2.7258 bb:1.0906 rl:2.6434 rb:1.0743 dl:6364-7180 gd:0 +ttp: b775/782 bl:2.6922 bb:1.0658 rl:2.6471 rb:1.0736 dl:5853-6355 gd:0 +ttp: b774/782 bl:2.7339 bb:1.0797 rl:2.6528 rb:1.0741 dl:5552-5852 gd:0 +ttp: b773/782 bl:2.6577 bb:1.0784 rl:2.6531 rb:1.0743 dl:5203-5550 gd:0 +ttp: b772/782 bl:2.7691 bb:1.1076 rl:2.6592 rb:1.0761 dl:4937-5193 gd:0 +ttp: b771/782 bl:2.7681 bb:1.0823 rl:2.6644 rb:1.0764 dl:4701-4937 gd:0 +ttp: b770/782 bl:2.6653 bb:1.0547 rl:2.6644 rb:1.0754 dl:4479-4698 gd:0 +ttp: b769/782 bl:2.7772 bb:1.0990 rl:2.6689 rb:1.0764 dl:4307-4479 gd:0 +ttp: b768/782 bl:2.7130 bb:1.0887 rl:2.6705 rb:1.0768 dl:4128-4306 gd:0 +ttp: b767/782 bl:2.7602 bb:1.1021 rl:2.6735 rb:1.0777 dl:3963-4123 gd:0 +ttp: b766/782 bl:2.6505 bb:1.0376 rl:2.6728 rb:1.0764 dl:3846-3962 gd:0 +ttp: b765/782 bl:2.7885 bb:1.0951 rl:2.6763 rb:1.0770 dl:3743-3845 gd:0 +ttp: b764/782 bl:2.7714 bb:1.1012 rl:2.6790 rb:1.0777 dl:3639-3742 gd:0 +ttp: b763/782 bl:2.8003 bb:1.1050 rl:2.6822 rb:1.0784 dl:3536-3637 gd:0 +ttp: b762/782 bl:2.8307 bb:1.0774 rl:2.6860 rb:1.0784 dl:3431-3533 gd:0 +ttp: b761/782 bl:2.7552 bb:1.0658 rl:2.6877 rb:1.0781 dl:3336-3430 gd:0 +ttp: b760/782 bl:2.8490 bb:1.1189 rl:2.6914 rb:1.0790 dl:3255-3334 gd:0 +ttp: b759/782 bl:2.7303 bb:1.1020 rl:2.6922 rb:1.0795 dl:3188-3253 gd:0 +ttp: b758/782 bl:2.8857 bb:1.0888 rl:2.6963 rb:1.0797 dl:3108-3187 gd:0 +ttp: b757/782 bl:2.6431 bb:1.0215 rl:2.6952 rb:1.0785 dl:3033-3108 gd:0 +ttp: b756/782 bl:2.7870 bb:1.0802 rl:2.6970 rb:1.0786 dl:2973-3032 gd:0 +ttp: b755/782 bl:2.6930 bb:1.0434 rl:2.6969 rb:1.0779 dl:2899-2972 gd:0 +ttp: b754/782 bl:2.6921 bb:1.0563 rl:2.6968 rb:1.0775 dl:2839-2899 gd:0 +ttp: b753/782 bl:2.7510 bb:1.0492 rl:2.6977 rb:1.0770 dl:2795-2838 gd:0 +ttp: b752/782 bl:2.7685 bb:1.0622 rl:2.6989 rb:1.0767 dl:2740-2793 gd:0 +ttp: b751/782 bl:2.7976 bb:1.0740 rl:2.7005 rb:1.0767 dl:2689-2740 gd:0 +ttpp: phase:1/1 pd:2000 gd:2000 t:1653.6s +tttg: c1/333 lr:0.001000 t:0.2s +tttg: c2/333 lr:0.001000 t:0.3s +tttg: c3/333 lr:0.001000 t:0.4s +tttg: c4/333 lr:0.001000 t:0.5s +tttg: c5/333 lr:0.001000 t:0.6s +tttg: c6/333 lr:0.000999 t:0.7s +tttg: c7/333 lr:0.000999 t:0.8s +tttg: c8/333 lr:0.000999 t:0.9s +tttg: c9/333 lr:0.000999 t:1.1s +tttg: c10/333 lr:0.000998 t:1.2s +tttg: c11/333 lr:0.000998 t:1.3s +tttg: c12/333 lr:0.000997 t:1.4s +tttg: c13/333 lr:0.000997 t:1.5s +tttg: c14/333 lr:0.000996 t:1.6s +tttg: c15/333 lr:0.000996 t:1.7s +tttg: c16/333 lr:0.000995 t:1.8s +tttg: c17/333 lr:0.000994 t:2.0s +tttg: c18/333 lr:0.000994 t:2.1s +tttg: c19/333 lr:0.000993 t:2.2s +tttg: c20/333 lr:0.000992 t:2.3s +tttg: c21/333 lr:0.000991 t:2.4s +tttg: c22/333 lr:0.000990 t:2.5s +tttg: c23/333 lr:0.000989 t:2.6s +tttg: c24/333 lr:0.000988 t:2.7s +tttg: c25/333 lr:0.000987 t:2.9s +tttg: c26/333 lr:0.000986 t:3.0s +tttg: c27/333 lr:0.000985 t:3.1s +tttg: c28/333 lr:0.000984 t:3.2s +tttg: c29/333 lr:0.000983 t:3.3s +tttg: c30/333 lr:0.000981 t:3.4s +tttg: c31/333 lr:0.000980 t:3.5s +tttg: c32/333 lr:0.000979 t:3.6s +tttg: c33/333 lr:0.000977 t:3.8s +tttg: c34/333 lr:0.000976 t:3.9s +tttg: c35/333 lr:0.000974 t:4.0s +tttg: c36/333 lr:0.000973 t:4.1s +tttg: c37/333 lr:0.000971 t:4.2s +tttg: c38/333 lr:0.000970 t:4.3s +tttg: c39/333 lr:0.000968 t:4.4s +tttg: c40/333 lr:0.000966 t:4.5s +tttg: c41/333 lr:0.000965 t:4.7s +tttg: c42/333 lr:0.000963 t:4.8s +tttg: c43/333 lr:0.000961 t:4.9s +tttg: c44/333 lr:0.000959 t:5.0s +tttg: c45/333 lr:0.000957 t:5.1s +tttg: c46/333 lr:0.000955 t:5.2s +tttg: c47/333 lr:0.000953 t:5.3s +tttg: c48/333 lr:0.000951 t:5.5s +tttg: c49/333 lr:0.000949 t:5.6s +tttg: c50/333 lr:0.000947 t:5.7s +tttg: c51/333 lr:0.000945 t:5.8s +tttg: c52/333 lr:0.000943 t:5.9s +tttg: c53/333 lr:0.000941 t:6.0s +tttg: c54/333 lr:0.000938 t:6.1s +tttg: c55/333 lr:0.000936 t:6.2s +tttg: c56/333 lr:0.000934 t:6.4s +tttg: c57/333 lr:0.000931 t:6.5s +tttg: c58/333 lr:0.000929 t:6.6s +tttg: c59/333 lr:0.000927 t:6.7s +tttg: c60/333 lr:0.000924 t:6.8s +tttg: c61/333 lr:0.000922 t:6.9s +tttg: c62/333 lr:0.000919 t:7.0s +tttg: c63/333 lr:0.000916 t:7.1s +tttg: c64/333 lr:0.000914 t:7.3s +tttg: c65/333 lr:0.000911 t:7.4s +tttg: c66/333 lr:0.000908 t:7.5s +tttg: c67/333 lr:0.000906 t:7.6s +tttg: c68/333 lr:0.000903 t:7.7s +tttg: c69/333 lr:0.000900 t:7.8s +tttg: c70/333 lr:0.000897 t:7.9s +tttg: c71/333 lr:0.000894 t:8.0s +tttg: c72/333 lr:0.000891 t:8.2s +tttg: c73/333 lr:0.000888 t:8.3s +tttg: c74/333 lr:0.000885 t:8.4s +tttg: c75/333 lr:0.000882 t:8.5s +tttg: c76/333 lr:0.000879 t:8.6s +tttg: c77/333 lr:0.000876 t:8.7s +tttg: c78/333 lr:0.000873 t:8.8s +tttg: c79/333 lr:0.000870 t:9.0s +tttg: c80/333 lr:0.000867 t:9.1s +tttg: c81/333 lr:0.000863 t:9.2s +tttg: c82/333 lr:0.000860 t:9.3s +tttg: c83/333 lr:0.000857 t:9.4s +tttg: c84/333 lr:0.000854 t:9.5s +tttg: c85/333 lr:0.000850 t:9.6s +tttg: c86/333 lr:0.000847 t:9.7s +tttg: c87/333 lr:0.000843 t:9.9s +tttg: c88/333 lr:0.000840 t:10.0s +tttg: c89/333 lr:0.000836 t:10.1s +tttg: c90/333 lr:0.000833 t:10.2s +tttg: c91/333 lr:0.000829 t:10.3s +tttg: c92/333 lr:0.000826 t:10.4s +tttg: c93/333 lr:0.000822 t:10.5s +tttg: c94/333 lr:0.000819 t:10.7s +tttg: c95/333 lr:0.000815 t:10.8s +tttg: c96/333 lr:0.000811 t:10.9s +tttg: c97/333 lr:0.000807 t:11.0s +tttg: c98/333 lr:0.000804 t:11.1s +tttg: c99/333 lr:0.000800 t:11.2s +tttg: c100/333 lr:0.000796 t:11.3s +tttg: c101/333 lr:0.000792 t:11.4s +tttg: c102/333 lr:0.000789 t:11.6s +tttg: c103/333 lr:0.000785 t:11.7s +tttg: c104/333 lr:0.000781 t:11.8s +tttg: c105/333 lr:0.000777 t:11.9s +tttg: c106/333 lr:0.000773 t:12.0s +tttg: c107/333 lr:0.000769 t:12.1s +tttg: c108/333 lr:0.000765 t:12.2s +tttg: c109/333 lr:0.000761 t:12.4s +tttg: c110/333 lr:0.000757 t:12.5s +tttg: c111/333 lr:0.000753 t:12.6s +tttg: c112/333 lr:0.000749 t:12.7s +tttg: c113/333 lr:0.000745 t:12.8s +tttg: c114/333 lr:0.000740 t:12.9s +tttg: c115/333 lr:0.000736 t:13.0s +tttg: c116/333 lr:0.000732 t:13.2s +tttg: c117/333 lr:0.000728 t:13.3s +tttg: c118/333 lr:0.000724 t:13.4s +tttg: c119/333 lr:0.000719 t:13.5s +tttg: c120/333 lr:0.000715 t:13.6s +tttg: c121/333 lr:0.000711 t:13.7s +tttg: c122/333 lr:0.000707 t:13.8s +tttg: c123/333 lr:0.000702 t:13.9s +tttg: c124/333 lr:0.000698 t:14.1s +tttg: c125/333 lr:0.000694 t:14.2s +tttg: c126/333 lr:0.000689 t:14.3s +tttg: c127/333 lr:0.000685 t:14.4s +tttg: c128/333 lr:0.000680 t:14.5s +tttg: c129/333 lr:0.000676 t:14.6s +tttg: c130/333 lr:0.000672 t:14.7s +tttg: c131/333 lr:0.000667 t:14.9s +tttg: c132/333 lr:0.000663 t:15.0s +tttg: c133/333 lr:0.000658 t:15.1s +tttg: c134/333 lr:0.000654 t:15.2s +tttg: c135/333 lr:0.000649 t:15.3s +tttg: c136/333 lr:0.000645 t:15.4s +tttg: c137/333 lr:0.000640 t:15.5s +tttg: c138/333 lr:0.000635 t:15.7s +tttg: c139/333 lr:0.000631 t:15.8s +tttg: c140/333 lr:0.000626 t:15.9s +tttg: c141/333 lr:0.000622 t:16.0s +tttg: c142/333 lr:0.000617 t:16.1s +tttg: c143/333 lr:0.000613 t:16.2s +tttg: c144/333 lr:0.000608 t:16.3s +tttg: c145/333 lr:0.000603 t:16.4s +tttg: c146/333 lr:0.000599 t:16.6s +tttg: c147/333 lr:0.000594 t:16.7s +tttg: c148/333 lr:0.000589 t:16.8s +tttg: c149/333 lr:0.000585 t:16.9s +tttg: c150/333 lr:0.000580 t:17.0s +tttg: c151/333 lr:0.000575 t:17.1s +tttg: c152/333 lr:0.000571 t:17.2s +tttg: c153/333 lr:0.000566 t:17.3s +tttg: c154/333 lr:0.000561 t:17.5s +tttg: c155/333 lr:0.000557 t:17.6s +tttg: c156/333 lr:0.000552 t:17.7s +tttg: c157/333 lr:0.000547 t:17.8s +tttg: c158/333 lr:0.000543 t:17.9s +tttg: c159/333 lr:0.000538 t:18.0s +tttg: c160/333 lr:0.000533 t:18.1s +tttg: c161/333 lr:0.000528 t:18.3s +tttg: c162/333 lr:0.000524 t:18.4s +tttg: c163/333 lr:0.000519 t:18.5s +tttg: c164/333 lr:0.000514 t:18.6s +tttg: c165/333 lr:0.000509 t:18.7s +tttg: c166/333 lr:0.000505 t:18.8s +tttg: c167/333 lr:0.000500 t:18.9s +tttg: c168/333 lr:0.000495 t:19.0s +tttg: c169/333 lr:0.000491 t:19.2s +tttg: c170/333 lr:0.000486 t:19.3s +tttg: c171/333 lr:0.000481 t:19.4s +tttg: c172/333 lr:0.000476 t:19.5s +tttg: c173/333 lr:0.000472 t:19.6s +tttg: c174/333 lr:0.000467 t:19.7s +tttg: c175/333 lr:0.000462 t:19.8s +tttg: c176/333 lr:0.000457 t:20.0s +tttg: c177/333 lr:0.000453 t:20.1s +tttg: c178/333 lr:0.000448 t:20.2s +tttg: c179/333 lr:0.000443 t:20.3s +tttg: c180/333 lr:0.000439 t:20.4s +tttg: c181/333 lr:0.000434 t:20.5s +tttg: c182/333 lr:0.000429 t:20.6s +tttg: c183/333 lr:0.000425 t:20.8s +tttg: c184/333 lr:0.000420 t:20.9s +tttg: c185/333 lr:0.000415 t:21.0s +tttg: c186/333 lr:0.000411 t:21.1s +tttg: c187/333 lr:0.000406 t:21.2s +tttg: c188/333 lr:0.000401 t:21.3s +tttg: c189/333 lr:0.000397 t:21.4s +tttg: c190/333 lr:0.000392 t:21.5s +tttg: c191/333 lr:0.000387 t:21.7s +tttg: c192/333 lr:0.000383 t:21.8s +tttg: c193/333 lr:0.000378 t:21.9s +tttg: c194/333 lr:0.000374 t:22.0s +tttg: c195/333 lr:0.000369 t:22.1s +tttg: c196/333 lr:0.000365 t:22.2s +tttg: c197/333 lr:0.000360 t:22.3s +tttg: c198/333 lr:0.000355 t:22.5s +tttg: c199/333 lr:0.000351 t:22.6s +tttg: c200/333 lr:0.000346 t:22.7s +tttg: c201/333 lr:0.000342 t:22.8s +tttg: c202/333 lr:0.000337 t:22.9s +tttg: c203/333 lr:0.000333 t:23.0s +tttg: c204/333 lr:0.000328 t:23.1s +tttg: c205/333 lr:0.000324 t:23.2s +tttg: c206/333 lr:0.000320 t:23.4s +tttg: c207/333 lr:0.000315 t:23.5s +tttg: c208/333 lr:0.000311 t:23.6s +tttg: c209/333 lr:0.000306 t:23.7s +tttg: c210/333 lr:0.000302 t:23.8s +tttg: c211/333 lr:0.000298 t:23.9s +tttg: c212/333 lr:0.000293 t:24.0s +tttg: c213/333 lr:0.000289 t:24.2s +tttg: c214/333 lr:0.000285 t:24.3s +tttg: c215/333 lr:0.000281 t:24.4s +tttg: c216/333 lr:0.000276 t:24.5s +tttg: c217/333 lr:0.000272 t:24.6s +tttg: c218/333 lr:0.000268 t:24.7s +tttg: c219/333 lr:0.000264 t:24.8s +tttg: c220/333 lr:0.000260 t:24.9s +tttg: c221/333 lr:0.000255 t:25.1s +tttg: c222/333 lr:0.000251 t:25.2s +tttg: c223/333 lr:0.000247 t:25.3s +tttg: c224/333 lr:0.000243 t:25.4s +tttg: c225/333 lr:0.000239 t:25.5s +tttg: c226/333 lr:0.000235 t:25.6s +tttg: c227/333 lr:0.000231 t:25.7s +tttg: c228/333 lr:0.000227 t:25.9s +tttg: c229/333 lr:0.000223 t:26.0s +tttg: c230/333 lr:0.000219 t:26.1s +tttg: c231/333 lr:0.000215 t:26.2s +tttg: c232/333 lr:0.000211 t:26.3s +tttg: c233/333 lr:0.000208 t:26.4s +tttg: c234/333 lr:0.000204 t:26.5s +tttg: c235/333 lr:0.000200 t:26.7s +tttg: c236/333 lr:0.000196 t:26.8s +tttg: c237/333 lr:0.000193 t:26.9s +tttg: c238/333 lr:0.000189 t:27.0s +tttg: c239/333 lr:0.000185 t:27.1s +tttg: c240/333 lr:0.000181 t:27.2s +tttg: c241/333 lr:0.000178 t:27.3s +tttg: c242/333 lr:0.000174 t:27.4s +tttg: c243/333 lr:0.000171 t:27.6s +tttg: c244/333 lr:0.000167 t:27.7s +tttg: c245/333 lr:0.000164 t:27.8s +tttg: c246/333 lr:0.000160 t:27.9s +tttg: c247/333 lr:0.000157 t:28.0s +tttg: c248/333 lr:0.000153 t:28.1s +tttg: c249/333 lr:0.000150 t:28.2s +tttg: c250/333 lr:0.000146 t:28.4s +tttg: c251/333 lr:0.000143 t:28.5s +tttg: c252/333 lr:0.000140 t:28.6s +tttg: c253/333 lr:0.000137 t:28.7s +tttg: c254/333 lr:0.000133 t:28.8s +tttg: c255/333 lr:0.000130 t:28.9s +tttg: c256/333 lr:0.000127 t:29.0s +tttg: c257/333 lr:0.000124 t:29.2s +tttg: c258/333 lr:0.000121 t:29.3s +tttg: c259/333 lr:0.000118 t:29.4s +tttg: c260/333 lr:0.000115 t:29.5s +tttg: c261/333 lr:0.000112 t:29.6s +tttg: c262/333 lr:0.000109 t:29.7s +tttg: c263/333 lr:0.000106 t:29.8s +tttg: c264/333 lr:0.000103 t:29.9s +tttg: c265/333 lr:0.000100 t:30.1s +tttg: c266/333 lr:0.000097 t:30.2s +tttg: c267/333 lr:0.000094 t:30.3s +tttg: c268/333 lr:0.000092 t:30.4s +tttg: c269/333 lr:0.000089 t:30.5s +tttg: c270/333 lr:0.000086 t:30.6s +tttg: c271/333 lr:0.000084 t:30.7s +tttg: c272/333 lr:0.000081 t:30.9s +tttg: c273/333 lr:0.000078 t:31.0s +tttg: c274/333 lr:0.000076 t:31.1s +tttg: c275/333 lr:0.000073 t:31.2s +tttg: c276/333 lr:0.000071 t:31.3s +tttg: c277/333 lr:0.000069 t:31.4s +tttg: c278/333 lr:0.000066 t:31.5s +tttg: c279/333 lr:0.000064 t:31.6s +tttg: c280/333 lr:0.000062 t:31.8s +tttg: c281/333 lr:0.000059 t:31.9s +tttg: c282/333 lr:0.000057 t:32.0s +tttg: c283/333 lr:0.000055 t:32.1s +tttg: c284/333 lr:0.000053 t:32.2s +tttg: c285/333 lr:0.000051 t:32.3s +tttg: c286/333 lr:0.000049 t:32.4s +tttg: c287/333 lr:0.000047 t:32.6s +tttg: c288/333 lr:0.000045 t:32.7s +tttg: c289/333 lr:0.000043 t:32.8s +tttg: c290/333 lr:0.000041 t:32.9s +tttg: c291/333 lr:0.000039 t:33.0s +tttg: c292/333 lr:0.000037 t:33.1s +tttg: c293/333 lr:0.000035 t:33.2s +tttg: c294/333 lr:0.000034 t:33.4s +tttg: c295/333 lr:0.000032 t:33.5s +tttg: c296/333 lr:0.000030 t:33.6s +tttg: c297/333 lr:0.000029 t:33.7s +tttg: c298/333 lr:0.000027 t:33.8s +tttg: c299/333 lr:0.000026 t:33.9s +tttg: c300/333 lr:0.000024 t:34.0s +tttg: c301/333 lr:0.000023 t:34.2s +tttg: c302/333 lr:0.000021 t:34.3s +tttg: c303/333 lr:0.000020 t:34.4s +tttg: c304/333 lr:0.000019 t:34.5s +tttg: c305/333 lr:0.000017 t:34.6s +tttg: c306/333 lr:0.000016 t:34.7s +tttg: c307/333 lr:0.000015 t:34.8s +tttg: c308/333 lr:0.000014 t:34.9s +tttg: c309/333 lr:0.000013 t:35.1s +tttg: c310/333 lr:0.000012 t:35.2s +tttg: c311/333 lr:0.000011 t:35.3s +tttg: c312/333 lr:0.000010 t:35.4s +tttg: c313/333 lr:0.000009 t:35.5s +tttg: c314/333 lr:0.000008 t:35.6s +tttg: c315/333 lr:0.000007 t:35.7s +tttg: c316/333 lr:0.000006 t:35.9s +tttg: c317/333 lr:0.000006 t:36.0s +tttg: c318/333 lr:0.000005 t:36.1s +tttg: c319/333 lr:0.000004 t:36.2s +tttg: c320/333 lr:0.000004 t:36.3s +tttg: c321/333 lr:0.000003 t:36.4s +tttg: c322/333 lr:0.000003 t:36.5s +tttg: c323/333 lr:0.000002 t:36.7s +tttg: c324/333 lr:0.000002 t:36.8s +tttg: c325/333 lr:0.000001 t:36.9s +tttg: c326/333 lr:0.000001 t:37.0s +tttg: c327/333 lr:0.000001 t:37.1s +tttg: c328/333 lr:0.000001 t:37.2s +tttg: c329/333 lr:0.000000 t:37.3s +tttg: c330/333 lr:0.000000 t:37.5s +tttg: c331/333 lr:0.000000 t:37.6s +tttg: c332/333 lr:0.000000 t:37.7s +ttpr: phase:1/1 t:1693.4s +ttp: b750/782 bl:2.8363 bb:1.0701 rl:2.7026 rb:1.0766 dl:2638-2688 gd:1 +ttp: b749/782 bl:2.8361 bb:1.0911 rl:2.7045 rb:1.0768 dl:2580-2638 gd:1 +ttp: b748/782 bl:2.8065 bb:1.0747 rl:2.7060 rb:1.0768 dl:2539-2578 gd:1 +ttp: b747/782 bl:2.7875 bb:1.0602 rl:2.7071 rb:1.0765 dl:2501-2538 gd:1 +ttp: b746/782 bl:2.6789 bb:1.0548 rl:2.7068 rb:1.0762 dl:2459-2501 gd:1 +ttp: b745/782 bl:2.7910 bb:1.0910 rl:2.7079 rb:1.0764 dl:2421-2458 gd:1 +ttp: b744/782 bl:2.6578 bb:1.0588 rl:2.7072 rb:1.0762 dl:2388-2419 gd:1 +ttp: b743/782 bl:2.7152 bb:1.0454 rl:2.7073 rb:1.0758 dl:2355-2388 gd:1 +ttp: b742/782 bl:2.7875 bb:1.0677 rl:2.7083 rb:1.0757 dl:2319-2353 gd:1 +ttp: b741/782 bl:2.8088 bb:1.1057 rl:2.7095 rb:1.0761 dl:2286-2319 gd:1 +ttp: b740/782 bl:2.7385 bb:1.0338 rl:2.7098 rb:1.0756 dl:2254-2285 gd:1 +ttp: b739/782 bl:2.8273 bb:1.0735 rl:2.7111 rb:1.0755 dl:2227-2253 gd:1 +ttp: b738/782 bl:2.7497 bb:1.0550 rl:2.7116 rb:1.0753 dl:2194-2227 gd:1 +ttp: b737/782 bl:2.8016 bb:1.0683 rl:2.7125 rb:1.0752 dl:2165-2193 gd:1 +ttp: b736/782 bl:2.6761 bb:1.0431 rl:2.7121 rb:1.0749 dl:2140-2165 gd:1 +ttp: b735/782 bl:2.8339 bb:1.0792 rl:2.7134 rb:1.0749 dl:2116-2140 gd:1 +ttp: b734/782 bl:2.7766 bb:1.0588 rl:2.7140 rb:1.0748 dl:2091-2115 gd:1 +ttp: b733/782 bl:2.7596 bb:1.0531 rl:2.7145 rb:1.0745 dl:2062-2090 gd:1 +ttp: b732/782 bl:2.8234 bb:1.0988 rl:2.7155 rb:1.0748 dl:2041-2062 gd:1 +ttp: b731/782 bl:2.7771 bb:1.0599 rl:2.7161 rb:1.0746 dl:2017-2041 gd:1 +ttp: b730/782 bl:2.7707 bb:1.0908 rl:2.7166 rb:1.0748 dl:1995-2016 gd:1 +ttp: b729/782 bl:2.7236 bb:1.0379 rl:2.7166 rb:1.0744 dl:1978-1994 gd:1 +ttp: b728/782 bl:2.7599 bb:1.0689 rl:2.7170 rb:1.0744 dl:1960-1977 gd:1 +ttp: b727/782 bl:2.7748 bb:1.0563 rl:2.7175 rb:1.0742 dl:1936-1960 gd:1 +ttp: b726/782 bl:2.7998 bb:1.0655 rl:2.7182 rb:1.0741 dl:1915-1936 gd:1 +ttp: b725/782 bl:2.7624 bb:1.0707 rl:2.7186 rb:1.0741 dl:1900-1915 gd:1 +ttp: b724/782 bl:2.7554 bb:1.0533 rl:2.7189 rb:1.0739 dl:1885-1900 gd:1 +ttp: b723/782 bl:2.7832 bb:1.0618 rl:2.7194 rb:1.0738 dl:1861-1885 gd:1 +ttp: b722/782 bl:2.7739 bb:1.0606 rl:2.7198 rb:1.0737 dl:1846-1861 gd:1 +ttp: b721/782 bl:2.7509 bb:1.0268 rl:2.7201 rb:1.0733 dl:1832-1846 gd:1 +ttp: b720/782 bl:2.8254 bb:1.0792 rl:2.7209 rb:1.0734 dl:1816-1832 gd:1 +ttp: b719/782 bl:2.6829 bb:1.0288 rl:2.7206 rb:1.0730 dl:1793-1816 gd:1 +ttp: b718/782 bl:2.7801 bb:1.0717 rl:2.7210 rb:1.0730 dl:1773-1792 gd:1 +ttp: b717/782 bl:2.7914 bb:1.0513 rl:2.7216 rb:1.0729 dl:1754-1773 gd:1 +ttp: b716/782 bl:2.8088 bb:1.0365 rl:2.7222 rb:1.0726 dl:1739-1754 gd:1 +ttp: b715/782 bl:2.6453 bb:1.0394 rl:2.7216 rb:1.0724 dl:1725-1739 gd:1 +ttp: b714/782 bl:2.8124 bb:1.0705 rl:2.7223 rb:1.0723 dl:1711-1725 gd:1 +ttp: b713/782 bl:2.8341 bb:1.0456 rl:2.7230 rb:1.0722 dl:1697-1711 gd:1 +ttp: b712/782 bl:2.8327 bb:1.0785 rl:2.7238 rb:1.0722 dl:1684-1697 gd:1 +ttp: b711/782 bl:2.7764 bb:1.0455 rl:2.7241 rb:1.0720 dl:1673-1683 gd:1 +ttp: b710/782 bl:2.7610 bb:1.0704 rl:2.7244 rb:1.0720 dl:1661-1673 gd:1 +ttp: b709/782 bl:2.7851 bb:1.0582 rl:2.7247 rb:1.0719 dl:1649-1661 gd:1 +ttp: b708/782 bl:2.7258 bb:1.0475 rl:2.7248 rb:1.0717 dl:1639-1649 gd:1 +ttp: b707/782 bl:2.7734 bb:1.0842 rl:2.7251 rb:1.0718 dl:1627-1638 gd:1 +ttp: b706/782 bl:2.7176 bb:1.0447 rl:2.7250 rb:1.0717 dl:1617-1627 gd:1 +ttp: b705/782 bl:2.7866 bb:1.0734 rl:2.7254 rb:1.0717 dl:1606-1617 gd:1 +ttp: b704/782 bl:2.7470 bb:1.0245 rl:2.7255 rb:1.0714 dl:1595-1606 gd:1 +ttp: b703/782 bl:2.9200 bb:1.1045 rl:2.7267 rb:1.0716 dl:1582-1594 gd:1 +ttp: b702/782 bl:2.8051 bb:1.0670 rl:2.7271 rb:1.0715 dl:1572-1581 gd:1 +ttp: b701/782 bl:2.7568 bb:1.0484 rl:2.7273 rb:1.0714 dl:1562-1572 gd:1 +ttp: b700/782 bl:2.6743 bb:1.0437 rl:2.7270 rb:1.0712 dl:1552-1562 gd:1 +ttp: b699/782 bl:2.8200 bb:1.0436 rl:2.7275 rb:1.0711 dl:1543-1552 gd:1 +ttp: b698/782 bl:2.7842 bb:1.0319 rl:2.7279 rb:1.0708 dl:1534-1543 gd:1 +ttp: b697/782 bl:2.7640 bb:1.0414 rl:2.7281 rb:1.0707 dl:1522-1534 gd:1 +ttp: b696/782 bl:2.8098 bb:1.0740 rl:2.7285 rb:1.0707 dl:1513-1522 gd:1 +ttp: b695/782 bl:2.7835 bb:1.0791 rl:2.7288 rb:1.0707 dl:1504-1513 gd:1 +ttp: b694/782 bl:2.7628 bb:1.0663 rl:2.7290 rb:1.0707 dl:1494-1504 gd:1 +ttp: b693/782 bl:2.8174 bb:1.1052 rl:2.7295 rb:1.0709 dl:1485-1494 gd:1 +ttp: b692/782 bl:2.7674 bb:1.0500 rl:2.7297 rb:1.0708 dl:1477-1484 gd:1 +ttp: b691/782 bl:2.6958 bb:1.0409 rl:2.7295 rb:1.0706 dl:1467-1476 gd:1 +ttp: b690/782 bl:2.8385 bb:1.0636 rl:2.7300 rb:1.0706 dl:1458-1467 gd:1 +ttp: b689/782 bl:2.7799 bb:1.0636 rl:2.7303 rb:1.0706 dl:1450-1458 gd:1 +ttp: b688/782 bl:2.7483 bb:1.0485 rl:2.7304 rb:1.0704 dl:1441-1450 gd:1 +ttp: b687/782 bl:2.7189 bb:1.0500 rl:2.7303 rb:1.0703 dl:1432-1441 gd:1 +ttp: b686/782 bl:2.8052 bb:1.0540 rl:2.7307 rb:1.0703 dl:1422-1432 gd:1 +ttp: b685/782 bl:2.7759 bb:1.0640 rl:2.7309 rb:1.0702 dl:1414-1422 gd:1 +ttp: b684/782 bl:2.7974 bb:1.0754 rl:2.7312 rb:1.0702 dl:1407-1414 gd:1 +ttp: b683/782 bl:2.7776 bb:1.0695 rl:2.7314 rb:1.0702 dl:1400-1406 gd:1 +ttp: b682/782 bl:2.8079 bb:1.0726 rl:2.7318 rb:1.0703 dl:1393-1400 gd:1 +ttp: b681/782 bl:2.8185 bb:1.0701 rl:2.7322 rb:1.0703 dl:1383-1393 gd:1 +ttp: b680/782 bl:2.8019 bb:1.0540 rl:2.7325 rb:1.0702 dl:1375-1383 gd:1 +ttp: b679/782 bl:2.8523 bb:1.0867 rl:2.7331 rb:1.0703 dl:1368-1374 gd:1 +ttp: b678/782 bl:2.7868 bb:1.0490 rl:2.7333 rb:1.0702 dl:1361-1368 gd:1 +ttp: b677/782 bl:2.8705 bb:1.1127 rl:2.7339 rb:1.0703 dl:1353-1360 gd:1 +ttp: b676/782 bl:2.7912 bb:1.0665 rl:2.7342 rb:1.0703 dl:1347-1353 gd:1 +ttp: b675/782 bl:2.8406 bb:1.0664 rl:2.7346 rb:1.0703 dl:1341-1347 gd:1 +ttp: b674/782 bl:2.7860 bb:1.0572 rl:2.7349 rb:1.0703 dl:1334-1341 gd:1 +ttp: b673/782 bl:2.8154 bb:1.0572 rl:2.7352 rb:1.0702 dl:1327-1334 gd:1 +ttp: b672/782 bl:2.9049 bb:1.1083 rl:2.7359 rb:1.0704 dl:1321-1327 gd:1 +ttp: b671/782 bl:2.8838 bb:1.1177 rl:2.7366 rb:1.0706 dl:1316-1321 gd:1 +ttp: b670/782 bl:2.8287 bb:1.0577 rl:2.7369 rb:1.0705 dl:1308-1315 gd:1 +ttp: b669/782 bl:2.7811 bb:1.0546 rl:2.7371 rb:1.0704 dl:1301-1308 gd:1 +ttp: b668/782 bl:2.7948 bb:1.0593 rl:2.7374 rb:1.0704 dl:1295-1301 gd:1 +ttp: b667/782 bl:2.8211 bb:1.1051 rl:2.7377 rb:1.0705 dl:1288-1295 gd:1 +ttp: b666/782 bl:2.8213 bb:1.0604 rl:2.7380 rb:1.0705 dl:1282-1288 gd:1 +ttp: b665/782 bl:2.7387 bb:1.0321 rl:2.7380 rb:1.0703 dl:1275-1282 gd:1 +ttp: b664/782 bl:2.7022 bb:1.0418 rl:2.7379 rb:1.0702 dl:1270-1275 gd:1 +ttp: b663/782 bl:2.7960 bb:1.0612 rl:2.7381 rb:1.0702 dl:1264-1269 gd:1 +ttp: b662/782 bl:2.8089 bb:1.0718 rl:2.7384 rb:1.0702 dl:1258-1263 gd:1 +ttp: b661/782 bl:2.7169 bb:1.0186 rl:2.7383 rb:1.0700 dl:1251-1258 gd:1 +ttp: b660/782 bl:2.8544 bb:1.0922 rl:2.7388 rb:1.0701 dl:1245-1250 gd:1 +ttp: b659/782 bl:2.7137 bb:1.0219 rl:2.7387 rb:1.0699 dl:1239-1245 gd:1 +ttp: b658/782 bl:2.8131 bb:1.0767 rl:2.7389 rb:1.0699 dl:1234-1239 gd:1 +ttp: b657/782 bl:2.7837 bb:1.0454 rl:2.7391 rb:1.0698 dl:1227-1234 gd:1 +ttp: b656/782 bl:2.7475 bb:1.0372 rl:2.7391 rb:1.0697 dl:1220-1227 gd:1 +ttp: b655/782 bl:2.6846 bb:1.0212 rl:2.7389 rb:1.0695 dl:1215-1220 gd:1 +ttp: b654/782 bl:2.7319 bb:1.0370 rl:2.7389 rb:1.0694 dl:1209-1215 gd:1 +ttp: b653/782 bl:2.7573 bb:1.0344 rl:2.7390 rb:1.0692 dl:1203-1209 gd:1 +ttp: b652/782 bl:2.7979 bb:1.0720 rl:2.7392 rb:1.0693 dl:1198-1203 gd:1 +ttp: b651/782 bl:2.7208 bb:1.0450 rl:2.7391 rb:1.0692 dl:1193-1198 gd:1 +ttp: b650/782 bl:2.7932 bb:1.0755 rl:2.7393 rb:1.0692 dl:1188-1193 gd:1 +ttp: b649/782 bl:2.8109 bb:1.0602 rl:2.7396 rb:1.0692 dl:1183-1188 gd:1 +ttp: b648/782 bl:2.7478 bb:1.0416 rl:2.7396 rb:1.0691 dl:1177-1182 gd:1 +ttp: b647/782 bl:2.7555 bb:1.0490 rl:2.7397 rb:1.0690 dl:1171-1177 gd:1 +ttp: b646/782 bl:2.7670 bb:1.0715 rl:2.7397 rb:1.0690 dl:1166-1171 gd:1 +ttp: b645/782 bl:2.7960 bb:1.0939 rl:2.7399 rb:1.0691 dl:1160-1166 gd:1 +ttp: b644/782 bl:2.7315 bb:1.0307 rl:2.7399 rb:1.0690 dl:1155-1160 gd:1 +ttp: b643/782 bl:2.7932 bb:1.0649 rl:2.7401 rb:1.0689 dl:1150-1155 gd:1 +ttp: b642/782 bl:2.7797 bb:1.0814 rl:2.7402 rb:1.0690 dl:1144-1150 gd:1 +ttp: b641/782 bl:2.7705 bb:1.0431 rl:2.7403 rb:1.0689 dl:1140-1144 gd:1 +ttp: b640/782 bl:2.7868 bb:1.0846 rl:2.7405 rb:1.0689 dl:1134-1140 gd:1 +ttp: b639/782 bl:2.8563 bb:1.0820 rl:2.7408 rb:1.0690 dl:1129-1134 gd:1 +ttp: b638/782 bl:2.8431 bb:1.0487 rl:2.7412 rb:1.0689 dl:1123-1129 gd:1 +ttp: b637/782 bl:2.8048 bb:1.0807 rl:2.7414 rb:1.0690 dl:1120-1123 gd:1 +ttp: b636/782 bl:2.7606 bb:1.0708 rl:2.7414 rb:1.0690 dl:1116-1120 gd:1 +ttp: b635/782 bl:2.7416 bb:1.0613 rl:2.7414 rb:1.0689 dl:1111-1116 gd:1 +ttp: b634/782 bl:2.7009 bb:1.0427 rl:2.7413 rb:1.0689 dl:1105-1111 gd:1 +ttp: b633/782 bl:2.8314 bb:1.1048 rl:2.7416 rb:1.0690 dl:1101-1105 gd:1 +ttp: b632/782 bl:2.7381 bb:1.0285 rl:2.7416 rb:1.0688 dl:1096-1101 gd:1 +ttp: b631/782 bl:2.7655 bb:1.0618 rl:2.7416 rb:1.0688 dl:1092-1096 gd:1 +ttp: b630/782 bl:2.8338 bb:1.0612 rl:2.7419 rb:1.0688 dl:1087-1092 gd:1 +ttp: b629/782 bl:2.7278 bb:1.0451 rl:2.7419 rb:1.0687 dl:1082-1086 gd:1 +ttp: b628/782 bl:2.7727 bb:1.0486 rl:2.7420 rb:1.0687 dl:1078-1082 gd:1 +ttp: b627/782 bl:2.7337 bb:1.0348 rl:2.7419 rb:1.0686 dl:1073-1077 gd:1 +ttp: b626/782 bl:2.8171 bb:1.0468 rl:2.7422 rb:1.0685 dl:1068-1073 gd:1 +ttp: b625/782 bl:2.6691 bb:1.0028 rl:2.7420 rb:1.0683 dl:1064-1068 gd:1 +ttp: b624/782 bl:2.7929 bb:1.0746 rl:2.7421 rb:1.0683 dl:1060-1064 gd:1 +ttp: b623/782 bl:2.7916 bb:1.0745 rl:2.7422 rb:1.0683 dl:1055-1060 gd:1 +ttp: b622/782 bl:2.8467 bb:1.0772 rl:2.7425 rb:1.0684 dl:1050-1055 gd:1 +ttp: b621/782 bl:2.8434 bb:1.0890 rl:2.7428 rb:1.0684 dl:1046-1050 gd:1 +ttp: b620/782 bl:2.7816 bb:1.0425 rl:2.7429 rb:1.0683 dl:1041-1046 gd:1 +ttp: b619/782 bl:2.7956 bb:1.0591 rl:2.7431 rb:1.0683 dl:1037-1041 gd:1 +ttp: b618/782 bl:2.7360 bb:1.0489 rl:2.7431 rb:1.0683 dl:1031-1037 gd:1 +ttp: b617/782 bl:2.7419 bb:1.0378 rl:2.7431 rb:1.0682 dl:1027-1031 gd:1 +ttp: b616/782 bl:2.8535 bb:1.0882 rl:2.7434 rb:1.0682 dl:1024-1027 gd:1 +ttp: b615/782 bl:2.8397 bb:1.0662 rl:2.7436 rb:1.0682 dl:1020-1023 gd:1 +ttp: b614/782 bl:2.7888 bb:1.0667 rl:2.7437 rb:1.0682 dl:1016-1020 gd:1 +ttp: b613/782 bl:2.8243 bb:1.0630 rl:2.7440 rb:1.0682 dl:1012-1016 gd:1 +ttp: b612/782 bl:2.8248 bb:1.0434 rl:2.7442 rb:1.0681 dl:1007-1012 gd:1 +ttp: b611/782 bl:2.7652 bb:1.0705 rl:2.7442 rb:1.0681 dl:1004-1007 gd:1 +ttp: b610/782 bl:2.8332 bb:1.0637 rl:2.7445 rb:1.0681 dl:999-1004 gd:1 +ttp: b609/782 bl:2.7925 bb:1.0600 rl:2.7446 rb:1.0681 dl:994-999 gd:1 +ttp: b608/782 bl:2.7368 bb:1.0329 rl:2.7446 rb:1.0680 dl:990-994 gd:1 +ttp: b607/782 bl:2.6934 bb:1.0381 rl:2.7444 rb:1.0679 dl:986-990 gd:1 +ttp: b606/782 bl:2.8167 bb:1.0838 rl:2.7446 rb:1.0680 dl:982-986 gd:1 +ttp: b605/782 bl:2.7417 bb:1.0576 rl:2.7446 rb:1.0679 dl:978-982 gd:1 +ttp: b604/782 bl:2.7271 bb:1.0368 rl:2.7446 rb:1.0679 dl:974-978 gd:1 +ttp: b603/782 bl:2.8344 bb:1.0857 rl:2.7448 rb:1.0679 dl:971-974 gd:1 +ttp: b602/782 bl:2.7802 bb:1.0399 rl:2.7449 rb:1.0678 dl:966-971 gd:1 +ttp: b601/782 bl:2.7692 bb:1.0642 rl:2.7449 rb:1.0678 dl:963-966 gd:1 +ttp: b600/782 bl:2.7949 bb:1.0609 rl:2.7451 rb:1.0678 dl:958-963 gd:1 +ttp: b599/782 bl:2.7378 bb:1.0515 rl:2.7450 rb:1.0678 dl:954-958 gd:1 +ttp: b598/782 bl:2.8097 bb:1.0702 rl:2.7452 rb:1.0678 dl:950-954 gd:1 +ttp: b597/782 bl:2.7746 bb:1.0418 rl:2.7453 rb:1.0677 dl:947-950 gd:1 +ttp: b596/782 bl:2.7758 bb:1.0631 rl:2.7453 rb:1.0677 dl:943-947 gd:1 +ttp: b595/782 bl:2.7337 bb:1.0569 rl:2.7453 rb:1.0677 dl:940-943 gd:1 +ttp: b594/782 bl:2.9074 bb:1.1042 rl:2.7457 rb:1.0678 dl:937-940 gd:1 +ttp: b593/782 bl:2.7990 bb:1.0467 rl:2.7458 rb:1.0677 dl:933-937 gd:1 +ttp: b592/782 bl:2.7888 bb:1.0499 rl:2.7459 rb:1.0677 dl:930-933 gd:1 +ttp: b591/782 bl:2.6681 bb:1.0082 rl:2.7457 rb:1.0675 dl:927-930 gd:1 +ttp: b590/782 bl:2.7338 bb:1.0289 rl:2.7457 rb:1.0674 dl:924-927 gd:1 +ttp: b589/782 bl:2.7544 bb:1.0544 rl:2.7457 rb:1.0674 dl:921-924 gd:1 +ttp: b588/782 bl:2.7454 bb:1.0474 rl:2.7457 rb:1.0674 dl:917-921 gd:1 +ttp: b587/782 bl:2.7797 bb:1.0614 rl:2.7458 rb:1.0673 dl:914-917 gd:1 +ttp: b586/782 bl:2.7258 bb:1.0142 rl:2.7458 rb:1.0672 dl:911-914 gd:1 +ttp: b585/782 bl:2.7670 bb:1.0669 rl:2.7458 rb:1.0672 dl:908-911 gd:1 +ttp: b584/782 bl:2.7676 bb:1.0398 rl:2.7459 rb:1.0672 dl:904-907 gd:1 +ttp: b583/782 bl:2.7999 bb:1.0922 rl:2.7460 rb:1.0672 dl:901-904 gd:1 +ttp: b582/782 bl:2.8636 bb:1.0923 rl:2.7462 rb:1.0673 dl:897-901 gd:1 +ttp: b581/782 bl:2.7249 bb:1.0165 rl:2.7462 rb:1.0672 dl:894-897 gd:1 +ttp: b580/782 bl:2.7276 bb:1.0363 rl:2.7462 rb:1.0671 dl:891-894 gd:1 +ttp: b579/782 bl:2.6360 bb:1.0046 rl:2.7459 rb:1.0669 dl:888-891 gd:1 +ttp: b578/782 bl:2.8080 bb:1.0699 rl:2.7460 rb:1.0670 dl:884-887 gd:1 +ttp: b577/782 bl:2.7535 bb:1.0415 rl:2.7461 rb:1.0669 dl:880-884 gd:1 +ttp: b576/782 bl:2.7780 bb:1.0462 rl:2.7461 rb:1.0669 dl:877-880 gd:1 +ttp: b575/782 bl:2.7968 bb:1.0531 rl:2.7462 rb:1.0668 dl:874-877 gd:1 +ttp: b574/782 bl:2.7850 bb:1.0403 rl:2.7463 rb:1.0668 dl:871-874 gd:1 +ttp: b573/782 bl:2.9310 bb:1.0727 rl:2.7467 rb:1.0668 dl:868-871 gd:1 +ttp: b572/782 bl:2.9431 bb:1.1200 rl:2.7471 rb:1.0669 dl:865-868 gd:1 +ttp: b571/782 bl:2.7068 bb:1.0325 rl:2.7470 rb:1.0668 dl:862-865 gd:1 +ttp: b570/782 bl:2.7727 bb:1.0791 rl:2.7471 rb:1.0668 dl:858-862 gd:1 +ttp: b569/782 bl:2.7613 bb:1.0550 rl:2.7471 rb:1.0668 dl:855-858 gd:1 +ttp: b568/782 bl:2.7993 bb:1.0559 rl:2.7472 rb:1.0668 dl:852-855 gd:1 +ttp: b567/782 bl:2.6720 bb:1.0292 rl:2.7471 rb:1.0667 dl:849-852 gd:1 +ttp: b566/782 bl:2.7184 bb:1.0331 rl:2.7470 rb:1.0667 dl:846-849 gd:1 +ttp: b565/782 bl:2.7734 bb:1.0627 rl:2.7471 rb:1.0666 dl:843-846 gd:1 +ttp: b564/782 bl:2.8632 bb:1.1078 rl:2.7473 rb:1.0667 dl:840-843 gd:1 +ttp: b563/782 bl:2.8030 bb:1.0633 rl:2.7474 rb:1.0667 dl:837-840 gd:1 +ttp: b562/782 bl:2.7132 bb:1.0255 rl:2.7473 rb:1.0666 dl:834-837 gd:1 +ttp: b561/782 bl:2.7126 bb:1.0638 rl:2.7473 rb:1.0666 dl:831-834 gd:1 +ttp: b560/782 bl:2.8164 bb:1.0907 rl:2.7474 rb:1.0667 dl:828-831 gd:1 +ttp: b559/782 bl:2.7540 bb:1.0467 rl:2.7474 rb:1.0666 dl:824-827 gd:1 +ttp: b558/782 bl:2.7007 bb:1.0228 rl:2.7473 rb:1.0666 dl:821-824 gd:1 +ttp: b557/782 bl:2.8016 bb:1.0446 rl:2.7474 rb:1.0665 dl:818-821 gd:1 +ttp: b556/782 bl:2.8385 bb:1.0852 rl:2.7476 rb:1.0665 dl:815-818 gd:1 +ttp: b555/782 bl:2.7607 bb:1.0536 rl:2.7476 rb:1.0665 dl:812-815 gd:1 +ttp: b554/782 bl:2.7362 bb:1.0299 rl:2.7476 rb:1.0664 dl:809-812 gd:1 +ttp: b553/782 bl:2.7670 bb:1.0601 rl:2.7476 rb:1.0664 dl:806-809 gd:1 +ttp: b552/782 bl:2.8035 bb:1.0448 rl:2.7477 rb:1.0664 dl:804-806 gd:1 +ttp: b551/782 bl:2.8243 bb:1.0646 rl:2.7479 rb:1.0664 dl:801-804 gd:1 +ttp: b550/782 bl:2.8050 bb:1.0763 rl:2.7480 rb:1.0664 dl:798-801 gd:1 +ttp: b549/782 bl:2.7670 bb:1.0646 rl:2.7480 rb:1.0664 dl:795-798 gd:1 +ttp: b548/782 bl:2.7620 bb:1.0473 rl:2.7480 rb:1.0664 dl:793-795 gd:1 +ttp: b547/782 bl:2.7340 bb:1.0325 rl:2.7480 rb:1.0663 dl:790-793 gd:1 +ttp: b546/782 bl:2.8260 bb:1.0730 rl:2.7482 rb:1.0663 dl:788-790 gd:1 +ttp: b545/782 bl:2.7880 bb:1.0543 rl:2.7482 rb:1.0663 dl:785-788 gd:1 +ttp: b544/782 bl:2.7552 bb:1.0434 rl:2.7482 rb:1.0663 dl:782-785 gd:1 +ttp: b543/782 bl:2.7874 bb:1.0464 rl:2.7483 rb:1.0662 dl:779-782 gd:1 +ttp: b542/782 bl:2.8350 bb:1.0739 rl:2.7485 rb:1.0662 dl:777-779 gd:1 +ttp: b541/782 bl:2.8020 bb:1.0605 rl:2.7486 rb:1.0662 dl:774-776 gd:1 +ttp: b540/782 bl:2.7012 bb:1.0191 rl:2.7485 rb:1.0661 dl:771-774 gd:1 +ttp: b539/782 bl:2.7310 bb:1.0457 rl:2.7485 rb:1.0661 dl:769-771 gd:1 +ttp: b538/782 bl:2.6854 bb:1.0386 rl:2.7483 rb:1.0661 dl:767-769 gd:1 +ttp: b537/782 bl:2.7142 bb:1.0263 rl:2.7483 rb:1.0660 dl:764-767 gd:1 +ttp: b536/782 bl:2.7868 bb:1.0749 rl:2.7483 rb:1.0660 dl:762-764 gd:1 +ttp: b535/782 bl:2.7892 bb:1.0575 rl:2.7484 rb:1.0660 dl:759-762 gd:1 +ttp: b534/782 bl:2.8193 bb:1.0724 rl:2.7485 rb:1.0660 dl:757-759 gd:1 +ttp: b533/782 bl:2.7713 bb:1.0351 rl:2.7486 rb:1.0659 dl:754-757 gd:1 +ttp: b532/782 bl:2.8176 bb:1.0580 rl:2.7487 rb:1.0659 dl:752-754 gd:1 +ttp: b531/782 bl:2.7755 bb:1.0527 rl:2.7487 rb:1.0659 dl:750-752 gd:1 +ttp: b530/782 bl:2.8058 bb:1.0386 rl:2.7488 rb:1.0659 dl:747-750 gd:1 +ttp: b529/782 bl:2.7775 bb:1.0579 rl:2.7489 rb:1.0658 dl:745-747 gd:1 +ttp: b528/782 bl:2.7535 bb:1.0315 rl:2.7489 rb:1.0658 dl:742-745 gd:1 +ttp: b527/782 bl:2.7434 bb:1.0425 rl:2.7489 rb:1.0657 dl:739-742 gd:1 +ttp: b526/782 bl:2.7682 bb:1.0571 rl:2.7489 rb:1.0657 dl:737-739 gd:1 +ttp: b525/782 bl:2.7858 bb:1.0718 rl:2.7490 rb:1.0657 dl:735-737 gd:1 +ttp: b524/782 bl:2.8135 bb:1.0514 rl:2.7491 rb:1.0657 dl:732-735 gd:1 +ttp: b523/782 bl:2.8125 bb:1.0562 rl:2.7492 rb:1.0657 dl:730-732 gd:1 +ttp: b522/782 bl:2.8237 bb:1.0855 rl:2.7493 rb:1.0657 dl:727-730 gd:1 +ttp: b521/782 bl:2.7704 bb:1.0512 rl:2.7493 rb:1.0657 dl:725-727 gd:1 +ttp: b520/782 bl:2.7890 bb:1.0570 rl:2.7494 rb:1.0657 dl:723-725 gd:1 +ttp: b519/782 bl:2.7287 bb:1.0348 rl:2.7494 rb:1.0656 dl:720-723 gd:1 +ttp: b518/782 bl:2.7292 bb:1.0512 rl:2.7493 rb:1.0656 dl:717-720 gd:1 +ttp: b517/782 bl:2.7779 bb:1.0515 rl:2.7494 rb:1.0656 dl:715-717 gd:1 +ttp: b516/782 bl:2.8611 bb:1.0772 rl:2.7495 rb:1.0656 dl:713-715 gd:1 +ttp: b515/782 bl:2.7881 bb:1.0746 rl:2.7496 rb:1.0656 dl:710-713 gd:1 +ttp: b514/782 bl:2.9120 bb:1.0984 rl:2.7498 rb:1.0657 dl:707-710 gd:1 +ttp: b513/782 bl:2.7379 bb:1.0134 rl:2.7498 rb:1.0656 dl:705-707 gd:1 +ttp: b512/782 bl:2.7862 bb:1.0577 rl:2.7499 rb:1.0656 dl:703-705 gd:1 +ttp: b511/782 bl:2.7693 bb:1.0460 rl:2.7499 rb:1.0656 dl:700-703 gd:1 +ttp: b510/782 bl:2.7572 bb:1.0197 rl:2.7499 rb:1.0655 dl:698-700 gd:1 +ttp: b509/782 bl:2.7486 bb:1.0696 rl:2.7499 rb:1.0655 dl:695-698 gd:1 +ttp: b508/782 bl:2.7583 bb:1.0307 rl:2.7499 rb:1.0654 dl:693-695 gd:1 +ttp: b507/782 bl:2.7569 bb:1.0407 rl:2.7499 rb:1.0654 dl:690-693 gd:1 +ttp: b506/782 bl:2.8108 bb:1.0767 rl:2.7500 rb:1.0654 dl:688-690 gd:1 +ttp: b505/782 bl:2.7822 bb:1.0630 rl:2.7501 rb:1.0654 dl:686-688 gd:1 +ttp: b504/782 bl:2.8661 bb:1.0982 rl:2.7503 rb:1.0655 dl:685-686 gd:1 +ttp: b503/782 bl:2.8287 bb:1.0772 rl:2.7504 rb:1.0655 dl:683-685 gd:1 +ttp: b502/782 bl:2.8294 bb:1.0624 rl:2.7505 rb:1.0655 dl:680-682 gd:1 +ttp: b501/782 bl:2.7902 bb:1.0394 rl:2.7505 rb:1.0654 dl:677-680 gd:1 +ttp: b500/782 bl:2.8366 bb:1.0835 rl:2.7507 rb:1.0655 dl:675-677 gd:1 +ttp: b499/782 bl:2.7879 bb:1.0521 rl:2.7507 rb:1.0654 dl:673-675 gd:1 +ttp: b498/782 bl:2.6782 bb:1.0368 rl:2.7506 rb:1.0654 dl:671-673 gd:1 +ttp: b497/782 bl:2.8342 bb:1.0806 rl:2.7507 rb:1.0654 dl:668-671 gd:1 +ttp: b496/782 bl:2.8342 bb:1.0504 rl:2.7508 rb:1.0654 dl:666-668 gd:1 +ttp: b495/782 bl:2.7660 bb:1.0561 rl:2.7509 rb:1.0654 dl:664-666 gd:1 +ttp: b494/782 bl:2.7952 bb:1.0537 rl:2.7509 rb:1.0654 dl:661-664 gd:1 +ttp: b493/782 bl:2.8469 bb:1.1164 rl:2.7511 rb:1.0654 dl:659-661 gd:1 +ttp: b492/782 bl:2.8105 bb:1.0569 rl:2.7511 rb:1.0654 dl:657-659 gd:1 +ttp: b491/782 bl:2.7388 bb:1.0319 rl:2.7511 rb:1.0654 dl:655-657 gd:1 +ttp: b490/782 bl:2.8552 bb:1.0910 rl:2.7513 rb:1.0654 dl:653-655 gd:1 +ttp: b489/782 bl:2.7969 bb:1.0815 rl:2.7513 rb:1.0654 dl:651-653 gd:1 +ttp: b488/782 bl:2.8223 bb:1.0518 rl:2.7514 rb:1.0654 dl:649-651 gd:1 +ttp: b487/782 bl:2.8065 bb:1.0722 rl:2.7515 rb:1.0654 dl:647-649 gd:1 +ttp: b486/782 bl:2.7935 bb:1.0603 rl:2.7516 rb:1.0654 dl:645-646 gd:1 +ttp: b485/782 bl:2.7877 bb:1.0491 rl:2.7516 rb:1.0654 dl:643-645 gd:1 +ttp: b484/782 bl:2.8049 bb:1.0705 rl:2.7517 rb:1.0654 dl:641-643 gd:1 +ttp: b483/782 bl:2.7493 bb:1.0514 rl:2.7517 rb:1.0654 dl:639-641 gd:1 +ttp: b482/782 bl:2.7578 bb:1.0823 rl:2.7517 rb:1.0654 dl:637-639 gd:1 +ttp: b481/782 bl:2.7980 bb:1.1001 rl:2.7517 rb:1.0655 dl:635-637 gd:1 +ttp: b480/782 bl:2.7964 bb:1.0557 rl:2.7518 rb:1.0654 dl:632-635 gd:1 +ttp: b479/782 bl:2.7099 bb:1.0343 rl:2.7517 rb:1.0654 dl:630-632 gd:1 +ttp: b478/782 bl:2.7960 bb:1.0529 rl:2.7518 rb:1.0654 dl:628-630 gd:1 +ttp: b477/782 bl:2.7745 bb:1.0541 rl:2.7518 rb:1.0654 dl:626-628 gd:1 +ttp: b476/782 bl:2.7552 bb:1.0523 rl:2.7518 rb:1.0654 dl:624-626 gd:1 +ttp: b475/782 bl:2.7260 bb:1.0220 rl:2.7518 rb:1.0653 dl:622-623 gd:1 +ttp: b474/782 bl:2.7592 bb:1.0521 rl:2.7518 rb:1.0653 dl:620-622 gd:1 +ttp: b473/782 bl:2.8356 bb:1.0789 rl:2.7519 rb:1.0653 dl:618-620 gd:1 +ttp: b472/782 bl:2.8030 bb:1.0715 rl:2.7520 rb:1.0653 dl:616-618 gd:1 +ttp: b471/782 bl:2.8461 bb:1.0724 rl:2.7521 rb:1.0653 dl:614-616 gd:1 +ttp: b470/782 bl:2.8705 bb:1.0966 rl:2.7522 rb:1.0654 dl:611-613 gd:1 +ttp: b469/782 bl:2.8020 bb:1.1142 rl:2.7523 rb:1.0654 dl:610-611 gd:1 +ttp: b468/782 bl:2.7934 bb:1.0604 rl:2.7524 rb:1.0654 dl:608-610 gd:1 +ttp: b467/782 bl:2.7969 bb:1.0566 rl:2.7524 rb:1.0654 dl:606-608 gd:1 +ttp: b466/782 bl:2.8046 bb:1.0663 rl:2.7525 rb:1.0654 dl:604-606 gd:1 +ttp: b465/782 bl:2.8099 bb:1.0599 rl:2.7525 rb:1.0654 dl:602-604 gd:1 +ttp: b464/782 bl:2.7103 bb:1.0741 rl:2.7525 rb:1.0654 dl:600-602 gd:1 +ttp: b463/782 bl:2.8056 bb:1.0770 rl:2.7526 rb:1.0654 dl:599-600 gd:1 +ttp: b462/782 bl:2.8690 bb:1.0689 rl:2.7527 rb:1.0654 dl:597-599 gd:1 +ttp: b461/782 bl:2.7767 bb:1.0589 rl:2.7527 rb:1.0654 dl:595-597 gd:1 +ttp: b460/782 bl:2.7989 bb:1.0616 rl:2.7528 rb:1.0654 dl:593-595 gd:1 +ttp: b459/782 bl:2.7438 bb:1.0412 rl:2.7528 rb:1.0654 dl:591-593 gd:1 +ttp: b458/782 bl:2.8165 bb:1.0676 rl:2.7529 rb:1.0654 dl:589-591 gd:1 +ttp: b457/782 bl:2.7642 bb:1.0496 rl:2.7529 rb:1.0654 dl:587-589 gd:1 +ttp: b456/782 bl:2.8142 bb:1.0688 rl:2.7529 rb:1.0654 dl:586-587 gd:1 +ttp: b455/782 bl:2.8026 bb:1.0747 rl:2.7530 rb:1.0654 dl:584-586 gd:1 +ttp: b454/782 bl:2.8366 bb:1.0741 rl:2.7531 rb:1.0654 dl:582-584 gd:1 +ttp: b453/782 bl:2.7547 bb:1.0571 rl:2.7531 rb:1.0654 dl:580-582 gd:1 +ttp: b452/782 bl:2.7422 bb:1.0578 rl:2.7531 rb:1.0654 dl:579-580 gd:1 +ttp: b451/782 bl:2.7772 bb:1.0639 rl:2.7531 rb:1.0654 dl:576-579 gd:1 +ttp: b450/782 bl:2.7611 bb:1.0305 rl:2.7531 rb:1.0653 dl:575-576 gd:1 +ttp: b449/782 bl:2.7981 bb:1.0532 rl:2.7532 rb:1.0653 dl:573-575 gd:1 +ttp: b448/782 bl:2.7298 bb:1.0371 rl:2.7531 rb:1.0653 dl:571-573 gd:1 +ttp: b447/782 bl:2.8325 bb:1.0892 rl:2.7532 rb:1.0653 dl:569-571 gd:1 +ttp: b446/782 bl:2.8284 bb:1.0917 rl:2.7533 rb:1.0653 dl:568-569 gd:1 +ttp: b445/782 bl:2.7691 bb:1.0647 rl:2.7533 rb:1.0653 dl:566-568 gd:1 +ttp: b444/782 bl:2.6709 bb:1.0120 rl:2.7532 rb:1.0653 dl:564-566 gd:1 +ttp: b443/782 bl:2.7795 bb:1.0587 rl:2.7533 rb:1.0653 dl:562-564 gd:1 +ttp: b442/782 bl:2.8194 bb:1.0589 rl:2.7533 rb:1.0653 dl:560-562 gd:1 +ttp: b441/782 bl:2.7095 bb:1.0430 rl:2.7533 rb:1.0652 dl:559-560 gd:1 +ttp: b440/782 bl:2.8649 bb:1.0938 rl:2.7534 rb:1.0653 dl:556-559 gd:1 +ttp: b439/782 bl:2.7512 bb:1.0427 rl:2.7534 rb:1.0652 dl:555-556 gd:1 +ttp: b438/782 bl:2.7181 bb:1.0575 rl:2.7534 rb:1.0652 dl:553-555 gd:1 +ttp: b437/782 bl:2.8758 bb:1.0612 rl:2.7535 rb:1.0652 dl:551-553 gd:1 +ttp: b436/782 bl:2.8446 bb:1.0672 rl:2.7536 rb:1.0652 dl:549-551 gd:1 +ttp: b435/782 bl:2.7312 bb:1.0517 rl:2.7536 rb:1.0652 dl:547-549 gd:1 +ttp: b434/782 bl:2.7283 bb:1.0425 rl:2.7536 rb:1.0652 dl:545-547 gd:1 +ttp: b433/782 bl:2.7762 bb:1.0655 rl:2.7536 rb:1.0652 dl:544-545 gd:1 +ttp: b432/782 bl:2.7640 bb:1.0515 rl:2.7536 rb:1.0652 dl:542-544 gd:1 +ttp: b431/782 bl:2.7529 bb:1.0627 rl:2.7536 rb:1.0652 dl:540-542 gd:1 +ttp: b430/782 bl:2.7591 bb:1.0474 rl:2.7536 rb:1.0652 dl:539-540 gd:1 +ttp: b429/782 bl:2.7571 bb:1.0820 rl:2.7536 rb:1.0652 dl:537-539 gd:1 +ttp: b428/782 bl:2.8257 bb:1.0690 rl:2.7537 rb:1.0652 dl:535-537 gd:1 +ttp: b427/782 bl:2.7552 bb:1.0645 rl:2.7537 rb:1.0652 dl:533-535 gd:1 +ttp: b426/782 bl:2.7285 bb:1.0678 rl:2.7536 rb:1.0652 dl:532-533 gd:1 +ttp: b425/782 bl:2.7626 bb:1.0511 rl:2.7537 rb:1.0652 dl:530-532 gd:1 +ttp: b424/782 bl:2.7929 bb:1.0794 rl:2.7537 rb:1.0652 dl:528-530 gd:1 +ttp: b423/782 bl:2.7450 bb:1.0310 rl:2.7537 rb:1.0651 dl:526-528 gd:1 +ttp: b422/782 bl:2.7296 bb:1.0408 rl:2.7537 rb:1.0651 dl:524-526 gd:1 +ttp: b421/782 bl:2.7907 bb:1.0527 rl:2.7537 rb:1.0651 dl:523-524 gd:1 +ttp: b420/782 bl:2.7794 bb:1.0585 rl:2.7537 rb:1.0651 dl:521-522 gd:1 +ttp: b419/782 bl:2.8027 bb:1.0416 rl:2.7538 rb:1.0651 dl:519-521 gd:1 +ttp: b418/782 bl:2.8136 bb:1.0732 rl:2.7538 rb:1.0651 dl:517-519 gd:1 +ttp: b417/782 bl:2.8187 bb:1.0570 rl:2.7539 rb:1.0651 dl:516-517 gd:1 +ttp: b416/782 bl:2.7611 bb:1.0364 rl:2.7539 rb:1.0650 dl:514-516 gd:1 +ttp: b415/782 bl:2.8516 bb:1.0834 rl:2.7540 rb:1.0651 dl:513-514 gd:1 +ttp: b414/782 bl:2.8173 bb:1.0861 rl:2.7541 rb:1.0651 dl:511-513 gd:1 +ttp: b413/782 bl:2.6491 bb:0.9989 rl:2.7540 rb:1.0650 dl:510-511 gd:1 +ttp: b412/782 bl:2.7058 bb:1.0508 rl:2.7539 rb:1.0650 dl:508-510 gd:1 +ttp: b411/782 bl:2.8209 bb:1.0755 rl:2.7540 rb:1.0650 dl:507-508 gd:1 +ttp: b410/782 bl:2.7778 bb:1.0548 rl:2.7540 rb:1.0650 dl:505-507 gd:1 +ttp: b409/782 bl:2.7112 bb:1.0474 rl:2.7540 rb:1.0650 dl:503-505 gd:1 +ttp: b408/782 bl:2.8382 bb:1.0856 rl:2.7540 rb:1.0650 dl:501-503 gd:1 +ttp: b407/782 bl:2.7792 bb:1.0579 rl:2.7541 rb:1.0650 dl:500-501 gd:1 +ttp: b406/782 bl:2.8384 bb:1.1052 rl:2.7541 rb:1.0650 dl:498-500 gd:1 +ttp: b405/782 bl:2.8240 bb:1.0674 rl:2.7542 rb:1.0650 dl:497-498 gd:1 +ttp: b404/782 bl:2.7820 bb:1.0675 rl:2.7542 rb:1.0650 dl:495-497 gd:1 +ttp: b403/782 bl:2.8177 bb:1.0529 rl:2.7543 rb:1.0650 dl:493-495 gd:1 +ttp: b402/782 bl:2.7530 bb:1.0372 rl:2.7543 rb:1.0650 dl:492-493 gd:1 +ttp: b401/782 bl:2.7412 bb:1.0611 rl:2.7543 rb:1.0650 dl:490-492 gd:1 +ttp: b400/782 bl:2.7936 bb:1.0656 rl:2.7543 rb:1.0650 dl:489-490 gd:1 +ttp: b399/782 bl:2.7470 bb:1.0405 rl:2.7543 rb:1.0650 dl:487-489 gd:1 +ttp: b398/782 bl:2.8818 bb:1.0945 rl:2.7544 rb:1.0650 dl:486-487 gd:1 +ttp: b397/782 bl:2.8947 bb:1.0997 rl:2.7546 rb:1.0650 dl:484-486 gd:1 +ttp: b396/782 bl:2.7542 bb:1.0539 rl:2.7546 rb:1.0650 dl:482-484 gd:1 +ttp: b395/782 bl:2.7367 bb:1.0450 rl:2.7545 rb:1.0650 dl:481-482 gd:1 +ttp: b394/782 bl:2.9004 bb:1.1186 rl:2.7547 rb:1.0651 dl:479-481 gd:1 +ttp: b393/782 bl:2.8596 bb:1.0890 rl:2.7548 rb:1.0651 dl:478-479 gd:1 +ttp: b392/782 bl:2.8010 bb:1.0816 rl:2.7548 rb:1.0651 dl:476-478 gd:1 +ttp: b391/782 bl:2.8128 bb:1.0955 rl:2.7549 rb:1.0651 dl:475-476 gd:1 +ttp: b390/782 bl:2.8129 bb:1.0911 rl:2.7549 rb:1.0651 dl:473-475 gd:1 +ttp: b389/782 bl:2.7962 bb:1.0651 rl:2.7549 rb:1.0651 dl:471-473 gd:1 +ttp: b388/782 bl:2.7881 bb:1.0698 rl:2.7550 rb:1.0651 dl:470-471 gd:1 +ttp: b387/782 bl:2.8365 bb:1.0737 rl:2.7550 rb:1.0652 dl:468-470 gd:1 +ttp: b386/782 bl:2.7253 bb:1.0647 rl:2.7550 rb:1.0652 dl:467-468 gd:1 +ttp: b385/782 bl:2.8901 bb:1.1013 rl:2.7551 rb:1.0652 dl:466-467 gd:1 +ttp: b384/782 bl:2.8462 bb:1.0920 rl:2.7552 rb:1.0652 dl:464-466 gd:1 +ttp: b383/782 bl:2.8359 bb:1.0861 rl:2.7553 rb:1.0652 dl:463-464 gd:1 +ttp: b382/782 bl:2.9132 bb:1.1340 rl:2.7554 rb:1.0653 dl:461-463 gd:1 +ttp: b381/782 bl:2.9057 bb:1.0911 rl:2.7556 rb:1.0653 dl:460-461 gd:1 +ttp: b380/782 bl:2.8484 bb:1.0785 rl:2.7556 rb:1.0653 dl:459-460 gd:1 +ttp: b379/782 bl:2.7661 bb:1.0592 rl:2.7556 rb:1.0653 dl:457-459 gd:1 +ttp: b378/782 bl:2.8231 bb:1.0984 rl:2.7557 rb:1.0653 dl:456-457 gd:1 +ttp: b377/782 bl:2.8023 bb:1.0865 rl:2.7557 rb:1.0654 dl:454-455 gd:1 +ttp: b376/782 bl:2.7199 bb:1.0446 rl:2.7557 rb:1.0653 dl:453-454 gd:1 +ttp: b375/782 bl:2.8126 bb:1.1084 rl:2.7558 rb:1.0654 dl:452-453 gd:1 +ttp: b374/782 bl:2.7490 bb:1.0682 rl:2.7557 rb:1.0654 dl:450-452 gd:1 +ttp: b373/782 bl:2.7605 bb:1.0775 rl:2.7558 rb:1.0654 dl:449-450 gd:1 +ttp: b372/782 bl:2.8395 bb:1.0705 rl:2.7558 rb:1.0654 dl:447-449 gd:1 +ttp: b371/782 bl:2.8031 bb:1.0714 rl:2.7559 rb:1.0654 dl:446-447 gd:1 +ttp: b370/782 bl:2.6805 bb:1.0429 rl:2.7558 rb:1.0654 dl:444-446 gd:1 +ttp: b369/782 bl:2.9292 bb:1.0873 rl:2.7559 rb:1.0654 dl:443-444 gd:1 +ttp: b368/782 bl:2.8531 bb:1.0886 rl:2.7560 rb:1.0654 dl:441-443 gd:1 +ttp: b367/782 bl:2.8336 bb:1.0642 rl:2.7561 rb:1.0654 dl:440-441 gd:1 +ttp: b366/782 bl:2.8811 bb:1.1279 rl:2.7562 rb:1.0655 dl:439-440 gd:1 +ttp: b365/782 bl:2.7747 bb:1.0814 rl:2.7562 rb:1.0655 dl:437-439 gd:1 +ttp: b364/782 bl:2.7490 bb:1.0723 rl:2.7562 rb:1.0655 dl:436-437 gd:1 +ttp: b363/782 bl:2.7434 bb:1.0940 rl:2.7562 rb:1.0655 dl:434-436 gd:1 +ttp: b362/782 bl:2.8169 bb:1.0651 rl:2.7562 rb:1.0655 dl:433-434 gd:1 +ttp: b361/782 bl:2.8192 bb:1.0779 rl:2.7563 rb:1.0655 dl:432-433 gd:1 +ttp: b360/782 bl:2.8415 bb:1.0836 rl:2.7563 rb:1.0655 dl:430-432 gd:1 +ttp: b359/782 bl:2.7989 bb:1.0817 rl:2.7564 rb:1.0655 dl:429-430 gd:1 +ttp: b358/782 bl:2.8232 bb:1.0913 rl:2.7564 rb:1.0656 dl:427-429 gd:1 +ttp: b357/782 bl:2.8597 bb:1.0820 rl:2.7565 rb:1.0656 dl:426-427 gd:1 +ttp: b356/782 bl:2.6939 bb:1.0467 rl:2.7565 rb:1.0656 dl:424-426 gd:1 +ttp: b355/782 bl:2.7071 bb:1.0667 rl:2.7564 rb:1.0656 dl:423-424 gd:1 +ttp: b354/782 bl:2.7895 bb:1.0823 rl:2.7564 rb:1.0656 dl:422-423 gd:1 +ttp: b353/782 bl:2.8031 bb:1.0984 rl:2.7565 rb:1.0656 dl:420-422 gd:1 +ttp: b352/782 bl:2.7581 bb:1.0964 rl:2.7565 rb:1.0656 dl:419-420 gd:1 +ttp: b351/782 bl:2.8423 bb:1.0939 rl:2.7565 rb:1.0656 dl:418-419 gd:1 +ttp: b350/782 bl:2.7386 bb:1.0622 rl:2.7565 rb:1.0656 dl:417-418 gd:1 +ttp: b349/782 bl:2.9118 bb:1.1064 rl:2.7567 rb:1.0657 dl:415-417 gd:1 +ttp: b348/782 bl:2.8069 bb:1.0667 rl:2.7567 rb:1.0657 dl:414-415 gd:1 +ttp: b347/782 bl:2.8667 bb:1.0927 rl:2.7568 rb:1.0657 dl:413-414 gd:1 +ttp: b346/782 bl:2.8512 bb:1.0881 rl:2.7568 rb:1.0657 dl:412-413 gd:1 +ttp: b345/782 bl:2.8696 bb:1.1128 rl:2.7569 rb:1.0657 dl:410-412 gd:1 +ttp: b344/782 bl:2.8924 bb:1.1087 rl:2.7570 rb:1.0658 dl:408-410 gd:1 +ttp: b343/782 bl:2.8057 bb:1.0706 rl:2.7571 rb:1.0658 dl:407-408 gd:1 +ttp: b342/782 bl:2.8708 bb:1.1045 rl:2.7571 rb:1.0658 dl:406-407 gd:1 +ttp: b341/782 bl:2.8755 bb:1.1008 rl:2.7572 rb:1.0658 dl:404-406 gd:1 +ttp: b340/782 bl:2.8224 bb:1.0918 rl:2.7573 rb:1.0659 dl:403-404 gd:1 +ttp: b339/782 bl:2.8280 bb:1.0739 rl:2.7573 rb:1.0659 dl:402-403 gd:1 +ttp: b338/782 bl:2.8458 bb:1.1100 rl:2.7574 rb:1.0659 dl:400-402 gd:1 +ttp: b337/782 bl:2.8319 bb:1.0782 rl:2.7574 rb:1.0659 dl:399-400 gd:1 +ttp: b336/782 bl:2.9482 bb:1.1651 rl:2.7576 rb:1.0660 dl:398-399 gd:1 +ttp: b335/782 bl:2.7187 bb:1.0896 rl:2.7576 rb:1.0660 dl:396-398 gd:1 +ttp: b334/782 bl:2.8659 bb:1.1028 rl:2.7576 rb:1.0660 dl:395-396 gd:1 +ttp: b333/782 bl:2.9036 bb:1.1308 rl:2.7577 rb:1.0661 dl:394-395 gd:1 +ttp: b332/782 bl:2.8230 bb:1.0962 rl:2.7578 rb:1.0661 dl:393-394 gd:1 +ttp: b331/782 bl:2.7837 bb:1.0686 rl:2.7578 rb:1.0661 dl:392-393 gd:1 +ttp: b330/782 bl:2.8714 bb:1.0947 rl:2.7579 rb:1.0661 dl:390-392 gd:1 +ttp: b329/782 bl:2.8375 bb:1.1068 rl:2.7579 rb:1.0661 dl:389-390 gd:1 +ttp: b328/782 bl:2.7915 bb:1.0824 rl:2.7580 rb:1.0661 dl:388-389 gd:1 +ttp: b327/782 bl:2.7807 bb:1.0795 rl:2.7580 rb:1.0662 dl:387-388 gd:1 +ttp: b326/782 bl:2.8547 bb:1.1282 rl:2.7580 rb:1.0662 dl:385-387 gd:1 +ttp: b325/782 bl:2.8552 bb:1.0968 rl:2.7581 rb:1.0662 dl:384-385 gd:1 +ttp: b324/782 bl:2.7712 bb:1.0570 rl:2.7581 rb:1.0662 dl:382-384 gd:1 +ttp: b323/782 bl:2.8158 bb:1.0474 rl:2.7582 rb:1.0662 dl:381-382 gd:1 +ttp: b322/782 bl:2.7623 bb:1.0794 rl:2.7582 rb:1.0662 dl:380-381 gd:1 +ttp: b321/782 bl:2.8087 bb:1.1037 rl:2.7582 rb:1.0662 dl:378-380 gd:1 +ttp: b320/782 bl:2.7588 bb:1.0762 rl:2.7582 rb:1.0662 dl:377-378 gd:1 +ttp: b319/782 bl:2.8355 bb:1.1124 rl:2.7582 rb:1.0663 dl:376-377 gd:1 +ttp: b318/782 bl:2.8082 bb:1.0652 rl:2.7583 rb:1.0663 dl:374-376 gd:1 +ttp: b317/782 bl:2.8779 bb:1.1125 rl:2.7584 rb:1.0663 dl:373-374 gd:1 +ttp: b316/782 bl:2.7825 bb:1.0942 rl:2.7584 rb:1.0663 dl:371-373 gd:1 +ttp: b315/782 bl:2.7145 bb:1.0673 rl:2.7583 rb:1.0663 dl:370-371 gd:1 +ttp: b314/782 bl:2.8063 bb:1.0667 rl:2.7584 rb:1.0663 dl:369-370 gd:1 +ttp: b313/782 bl:2.8284 bb:1.0893 rl:2.7584 rb:1.0663 dl:368-369 gd:1 +ttp: b312/782 bl:2.7382 bb:1.0689 rl:2.7584 rb:1.0663 dl:367-368 gd:1 +ttp: b311/782 bl:2.8563 bb:1.0942 rl:2.7585 rb:1.0663 dl:365-367 gd:1 +ttp: b310/782 bl:2.7978 bb:1.0839 rl:2.7585 rb:1.0664 dl:364-365 gd:1 +ttp: b309/782 bl:2.8314 bb:1.1047 rl:2.7585 rb:1.0664 dl:363-364 gd:1 +ttp: b308/782 bl:2.7995 bb:1.0876 rl:2.7586 rb:1.0664 dl:362-363 gd:1 +ttp: b307/782 bl:2.9003 bb:1.1087 rl:2.7587 rb:1.0664 dl:361-362 gd:1 +ttp: b306/782 bl:2.8802 bb:1.1395 rl:2.7587 rb:1.0665 dl:359-361 gd:1 +ttp: b305/782 bl:2.8607 bb:1.0853 rl:2.7588 rb:1.0665 dl:358-359 gd:1 +ttp: b304/782 bl:2.9047 bb:1.1313 rl:2.7589 rb:1.0665 dl:357-358 gd:1 +ttp: b303/782 bl:2.8107 bb:1.0889 rl:2.7589 rb:1.0665 dl:355-357 gd:1 +ttp: b302/782 bl:2.8364 bb:1.1001 rl:2.7590 rb:1.0666 dl:354-355 gd:1 +ttp: b301/782 bl:2.7947 bb:1.0867 rl:2.7590 rb:1.0666 dl:353-354 gd:1 +ttp: b300/782 bl:2.8599 bb:1.0900 rl:2.7590 rb:1.0666 dl:352-353 gd:1 +ttp: b299/782 bl:2.9040 bb:1.1261 rl:2.7591 rb:1.0666 dl:351-352 gd:1 +ttp: b298/782 bl:2.8483 bb:1.1025 rl:2.7592 rb:1.0666 dl:349-351 gd:1 +ttp: b297/782 bl:2.7994 bb:1.0606 rl:2.7592 rb:1.0666 dl:348-349 gd:1 +ttp: b296/782 bl:2.8091 bb:1.0864 rl:2.7592 rb:1.0666 dl:347-348 gd:1 +ttp: b295/782 bl:2.8422 bb:1.1206 rl:2.7593 rb:1.0667 dl:345-347 gd:1 +ttp: b293/782 bl:2.7671 bb:1.0690 rl:2.7593 rb:1.0667 dl:343-345 gd:1 +ttp: b294/782 bl:2.8475 bb:1.1012 rl:2.7594 rb:1.0667 dl:345-345 gd:1 +ttp: b292/782 bl:2.7938 bb:1.0825 rl:2.7594 rb:1.0667 dl:342-343 gd:1 +ttp: b291/782 bl:2.9563 bb:1.1161 rl:2.7595 rb:1.0667 dl:341-342 gd:1 +ttp: b290/782 bl:2.8709 bb:1.0877 rl:2.7596 rb:1.0668 dl:340-341 gd:1 +ttp: b289/782 bl:2.8391 bb:1.1239 rl:2.7596 rb:1.0668 dl:339-340 gd:1 +ttp: b288/782 bl:2.8165 bb:1.1058 rl:2.7596 rb:1.0668 dl:337-339 gd:1 +ttp: b287/782 bl:2.8594 bb:1.1154 rl:2.7597 rb:1.0668 dl:336-337 gd:1 +ttp: b286/782 bl:2.8981 bb:1.1009 rl:2.7598 rb:1.0669 dl:335-336 gd:1 +ttp: b285/782 bl:2.8838 bb:1.1286 rl:2.7598 rb:1.0669 dl:334-335 gd:1 +ttp: b284/782 bl:2.8807 bb:1.0860 rl:2.7599 rb:1.0669 dl:333-334 gd:1 +ttp: b283/782 bl:2.7992 bb:1.0738 rl:2.7599 rb:1.0669 dl:332-333 gd:1 +ttp: b282/782 bl:2.8265 bb:1.1256 rl:2.7600 rb:1.0669 dl:331-332 gd:1 +ttp: b281/782 bl:2.9300 bb:1.1553 rl:2.7601 rb:1.0670 dl:329-330 gd:1 +ttp: b279/782 bl:2.8621 bb:1.0939 rl:2.7601 rb:1.0670 dl:327-329 gd:1 +ttp: b280/782 bl:2.8110 bb:1.0910 rl:2.7602 rb:1.0670 dl:329-329 gd:1 +ttp: b278/782 bl:2.8931 bb:1.1407 rl:2.7602 rb:1.0671 dl:326-327 gd:1 +ttp: b277/782 bl:2.8109 bb:1.1071 rl:2.7603 rb:1.0671 dl:325-326 gd:1 +ttp: b276/782 bl:2.8534 bb:1.1059 rl:2.7603 rb:1.0671 dl:324-325 gd:1 +ttp: b275/782 bl:2.7607 bb:1.0677 rl:2.7603 rb:1.0671 dl:323-324 gd:1 +ttp: b274/782 bl:2.8122 bb:1.0918 rl:2.7603 rb:1.0671 dl:322-323 gd:1 +ttp: b273/782 bl:2.7757 bb:1.0632 rl:2.7604 rb:1.0671 dl:321-322 gd:1 +ttp: b272/782 bl:2.8636 bb:1.1109 rl:2.7604 rb:1.0671 dl:320-321 gd:1 +ttp: b271/782 bl:2.7774 bb:1.0704 rl:2.7604 rb:1.0671 dl:319-320 gd:1 +ttp: b270/782 bl:2.7750 bb:1.0891 rl:2.7604 rb:1.0672 dl:318-319 gd:1 +ttp: b269/782 bl:2.9267 bb:1.1281 rl:2.7605 rb:1.0672 dl:316-318 gd:1 +ttp: b268/782 bl:2.8695 bb:1.1032 rl:2.7606 rb:1.0672 dl:315-316 gd:1 +ttp: b267/782 bl:2.8609 bb:1.0970 rl:2.7606 rb:1.0672 dl:314-315 gd:1 +ttp: b266/782 bl:2.8556 bb:1.0981 rl:2.7607 rb:1.0672 dl:313-314 gd:1 +ttp: b265/782 bl:2.8431 bb:1.0944 rl:2.7607 rb:1.0673 dl:312-313 gd:1 +ttp: b264/782 bl:2.8971 bb:1.1467 rl:2.7608 rb:1.0673 dl:311-312 gd:1 +ttp: b263/782 bl:2.8259 bb:1.1007 rl:2.7608 rb:1.0673 dl:310-311 gd:1 +ttp: b262/782 bl:2.8676 bb:1.1197 rl:2.7609 rb:1.0673 dl:309-310 gd:1 +ttp: b261/782 bl:2.8677 bb:1.1217 rl:2.7609 rb:1.0674 dl:308-309 gd:1 +ttp: b260/782 bl:2.8307 bb:1.1042 rl:2.7610 rb:1.0674 dl:306-307 gd:1 +ttp: b259/782 bl:2.8660 bb:1.1430 rl:2.7610 rb:1.0674 dl:305-306 gd:1 +ttp: b258/782 bl:2.9584 bb:1.1665 rl:2.7611 rb:1.0675 dl:304-305 gd:1 +ttp: b257/782 bl:2.9249 bb:1.1136 rl:2.7612 rb:1.0675 dl:302-304 gd:1 +ttp: b256/782 bl:2.8948 bb:1.1348 rl:2.7613 rb:1.0675 dl:301-302 gd:1 +ttp: b255/782 bl:2.8605 bb:1.1288 rl:2.7613 rb:1.0676 dl:300-301 gd:1 +ttp: b254/782 bl:2.9020 bb:1.1431 rl:2.7614 rb:1.0676 dl:299-300 gd:1 +ttp: b253/782 bl:2.7568 bb:1.0826 rl:2.7614 rb:1.0676 dl:298-299 gd:1 +ttp: b252/782 bl:2.8975 bb:1.1290 rl:2.7615 rb:1.0676 dl:297-298 gd:1 +ttp: b251/782 bl:2.8784 bb:1.1102 rl:2.7615 rb:1.0677 dl:296-297 gd:1 +ttp: b250/782 bl:2.8822 bb:1.1453 rl:2.7616 rb:1.0677 dl:295-296 gd:1 +ttp: b249/782 bl:2.8908 bb:1.1514 rl:2.7617 rb:1.0677 dl:294-295 gd:1 +ttp: b248/782 bl:2.8906 bb:1.1031 rl:2.7617 rb:1.0678 dl:293-294 gd:1 +ttp: b247/782 bl:2.7912 bb:1.0785 rl:2.7617 rb:1.0678 dl:292-293 gd:1 +ttp: b246/782 bl:2.9039 bb:1.1374 rl:2.7618 rb:1.0678 dl:291-292 gd:1 +ttp: b245/782 bl:2.8770 bb:1.1048 rl:2.7619 rb:1.0678 dl:290-291 gd:1 +ttp: b244/782 bl:2.9525 bb:1.1580 rl:2.7620 rb:1.0679 dl:289-290 gd:1 +ttp: b243/782 bl:2.8267 bb:1.1024 rl:2.7620 rb:1.0679 dl:288-289 gd:1 +ttp: b242/782 bl:2.9061 bb:1.1111 rl:2.7621 rb:1.0679 dl:287-288 gd:1 +ttp: b241/782 bl:2.9110 bb:1.1277 rl:2.7621 rb:1.0679 dl:286-287 gd:1 +ttp: b240/782 bl:2.9091 bb:1.1546 rl:2.7622 rb:1.0680 dl:285-286 gd:1 +ttp: b239/782 bl:2.8783 bb:1.1288 rl:2.7623 rb:1.0680 dl:284-285 gd:1 +ttp: b238/782 bl:2.8882 bb:1.1457 rl:2.7623 rb:1.0680 dl:283-284 gd:1 +ttp: b237/782 bl:2.9176 bb:1.1471 rl:2.7624 rb:1.0681 dl:282-283 gd:1 +ttp: b236/782 bl:2.8529 bb:1.1088 rl:2.7624 rb:1.0681 dl:281-282 gd:1 +ttp: b235/782 bl:2.9334 bb:1.1150 rl:2.7625 rb:1.0681 dl:280-281 gd:1 +ttp: b234/782 bl:2.9204 bb:1.1577 rl:2.7626 rb:1.0682 dl:279-280 gd:1 +ttp: b233/782 bl:2.8601 bb:1.1237 rl:2.7626 rb:1.0682 dl:278-279 gd:1 +ttp: b232/782 bl:2.9358 bb:1.1355 rl:2.7627 rb:1.0682 dl:277-278 gd:1 +ttp: b231/782 bl:2.8301 bb:1.1038 rl:2.7628 rb:1.0682 dl:276-277 gd:1 +ttp: b230/782 bl:2.9081 bb:1.1128 rl:2.7628 rb:1.0682 dl:275-276 gd:1 +ttp: b229/782 bl:2.8997 bb:1.1406 rl:2.7629 rb:1.0683 dl:274-275 gd:1 +ttp: b228/782 bl:2.8767 bb:1.1384 rl:2.7629 rb:1.0683 dl:273-274 gd:1 +ttp: b227/782 bl:2.8047 bb:1.0877 rl:2.7630 rb:1.0683 dl:272-273 gd:1 +ttp: b226/782 bl:2.9489 bb:1.1472 rl:2.7630 rb:1.0684 dl:271-272 gd:1 +ttp: b225/782 bl:2.8848 bb:1.1235 rl:2.7631 rb:1.0684 dl:270-271 gd:1 +ttp: b224/782 bl:2.8168 bb:1.1065 rl:2.7631 rb:1.0684 dl:269-270 gd:1 +ttp: b223/782 bl:2.8261 bb:1.0880 rl:2.7631 rb:1.0684 dl:268-269 gd:1 +ttp: b222/782 bl:2.8748 bb:1.1169 rl:2.7632 rb:1.0684 dl:267-268 gd:1 +ttp: b221/782 bl:2.8477 bb:1.1428 rl:2.7632 rb:1.0685 dl:266-267 gd:1 +ttp: b220/782 bl:2.8660 bb:1.1091 rl:2.7633 rb:1.0685 dl:265-266 gd:1 +ttp: b219/782 bl:2.9091 bb:1.1350 rl:2.7633 rb:1.0685 dl:264-265 gd:1 +ttp: b218/782 bl:2.7369 bb:1.1005 rl:2.7633 rb:1.0685 dl:263-264 gd:1 +ttp: b217/782 bl:2.8857 bb:1.1304 rl:2.7634 rb:1.0685 dl:262-263 gd:1 +ttp: b216/782 bl:2.9335 bb:1.1163 rl:2.7635 rb:1.0686 dl:261-262 gd:1 +ttp: b215/782 bl:2.8530 bb:1.1448 rl:2.7635 rb:1.0686 dl:260-261 gd:1 +ttp: b214/782 bl:2.9388 bb:1.1306 rl:2.7636 rb:1.0686 dl:259-260 gd:1 +ttp: b213/782 bl:3.0138 bb:1.1760 rl:2.7637 rb:1.0687 dl:258-259 gd:1 +ttp: b212/782 bl:2.9335 bb:1.1481 rl:2.7638 rb:1.0687 dl:257-258 gd:1 +ttp: b211/782 bl:2.8922 bb:1.1522 rl:2.7638 rb:1.0687 dl:256-257 gd:1 +ttp: b210/782 bl:2.8565 bb:1.1242 rl:2.7639 rb:1.0688 dl:255-256 gd:1 +ttp: b209/782 bl:2.9175 bb:1.1550 rl:2.7639 rb:1.0688 dl:254-255 gd:1 +ttp: b207/782 bl:2.8447 bb:1.1191 rl:2.7640 rb:1.0688 dl:253-254 gd:1 +ttp: b208/782 bl:2.8328 bb:1.1185 rl:2.7640 rb:1.0688 dl:254-254 gd:1 +ttp: b206/782 bl:2.8871 bb:1.1175 rl:2.7640 rb:1.0689 dl:252-253 gd:1 +ttp: b205/782 bl:2.8453 bb:1.1101 rl:2.7641 rb:1.0689 dl:251-252 gd:1 +ttp: b204/782 bl:2.9153 bb:1.1340 rl:2.7641 rb:1.0689 dl:250-251 gd:1 +ttp: b203/782 bl:2.7791 bb:1.0918 rl:2.7641 rb:1.0689 dl:249-250 gd:1 +ttp: b202/782 bl:2.8633 bb:1.1318 rl:2.7642 rb:1.0689 dl:248-249 gd:1 +ttp: b201/782 bl:2.8731 bb:1.1198 rl:2.7642 rb:1.0690 dl:247-248 gd:1 +ttp: b199/782 bl:2.9490 bb:1.1303 rl:2.7643 rb:1.0690 dl:246-247 gd:1 +ttp: b200/782 bl:2.8489 bb:1.0950 rl:2.7643 rb:1.0690 dl:247-247 gd:1 +ttp: b198/782 bl:2.9836 bb:1.1539 rl:2.7644 rb:1.0690 dl:245-246 gd:1 +ttp: b197/782 bl:2.8555 bb:1.1261 rl:2.7645 rb:1.0691 dl:244-245 gd:1 +ttp: b196/782 bl:2.9176 bb:1.1691 rl:2.7645 rb:1.0691 dl:243-244 gd:1 +ttp: b195/782 bl:2.8563 bb:1.1179 rl:2.7646 rb:1.0691 dl:242-243 gd:1 +ttp: b194/782 bl:2.9042 bb:1.1022 rl:2.7646 rb:1.0691 dl:241-242 gd:1 +ttp: b193/782 bl:2.8897 bb:1.1643 rl:2.7647 rb:1.0692 dl:240-241 gd:1 +ttp: b192/782 bl:2.9127 bb:1.1481 rl:2.7647 rb:1.0692 dl:239-240 gd:1 +ttp: b191/782 bl:2.9387 bb:1.1475 rl:2.7648 rb:1.0692 dl:238-239 gd:1 +ttp: b190/782 bl:2.8803 bb:1.0950 rl:2.7648 rb:1.0692 dl:237-238 gd:1 +ttp: b188/782 bl:2.9061 bb:1.1512 rl:2.7649 rb:1.0693 dl:236-237 gd:1 +ttp: b189/782 bl:2.9639 bb:1.2029 rl:2.7650 rb:1.0693 dl:237-237 gd:1 +ttp: b187/782 bl:2.9039 bb:1.1197 rl:2.7650 rb:1.0693 dl:235-236 gd:1 +ttp: b186/782 bl:2.9407 bb:1.1749 rl:2.7651 rb:1.0694 dl:234-235 gd:1 +ttp: b185/782 bl:2.8634 bb:1.1239 rl:2.7651 rb:1.0694 dl:233-234 gd:1 +ttp: b184/782 bl:2.9035 bb:1.1529 rl:2.7652 rb:1.0694 dl:232-233 gd:1 +ttp: b183/782 bl:2.8671 bb:1.1444 rl:2.7652 rb:1.0695 dl:231-232 gd:1 +ttp: b182/782 bl:2.8504 bb:1.1339 rl:2.7653 rb:1.0695 dl:230-231 gd:1 +ttp: b180/782 bl:2.9127 bb:1.1359 rl:2.7653 rb:1.0695 dl:229-230 gd:1 +ttp: b181/782 bl:2.8875 bb:1.1602 rl:2.7654 rb:1.0695 dl:230-230 gd:1 +ttp: b179/782 bl:2.9514 bb:1.1529 rl:2.7654 rb:1.0696 dl:228-229 gd:1 +ttp: b178/782 bl:2.8541 bb:1.1383 rl:2.7655 rb:1.0696 dl:227-228 gd:1 +ttp: b177/782 bl:2.9340 bb:1.1513 rl:2.7655 rb:1.0696 dl:226-227 gd:1 +ttp: b176/782 bl:2.8234 bb:1.1077 rl:2.7655 rb:1.0696 dl:225-226 gd:1 +ttp: b175/782 bl:2.8432 bb:1.1145 rl:2.7656 rb:1.0697 dl:225-225 gd:1 +ttp: b173/782 bl:2.9626 bb:1.1517 rl:2.7656 rb:1.0697 dl:223-224 gd:1 +ttp: b174/782 bl:2.9725 bb:1.1540 rl:2.7657 rb:1.0697 dl:224-224 gd:1 +ttp: b172/782 bl:3.0153 bb:1.1859 rl:2.7658 rb:1.0698 dl:222-223 gd:1 +ttp: b171/782 bl:2.8920 bb:1.1123 rl:2.7659 rb:1.0698 dl:221-222 gd:1 +ttp: b170/782 bl:2.9966 bb:1.1718 rl:2.7659 rb:1.0698 dl:220-221 gd:1 +ttp: b169/782 bl:2.9075 bb:1.1617 rl:2.7660 rb:1.0698 dl:219-220 gd:1 +ttp: b168/782 bl:2.9269 bb:1.1470 rl:2.7660 rb:1.0699 dl:218-219 gd:1 +ttp: b166/782 bl:2.9666 bb:1.1437 rl:2.7661 rb:1.0699 dl:217-218 gd:1 +ttp: b167/782 bl:2.9622 bb:1.1841 rl:2.7662 rb:1.0699 dl:218-218 gd:1 +ttp: b165/782 bl:2.9368 bb:1.1621 rl:2.7662 rb:1.0700 dl:216-217 gd:1 +ttp: b164/782 bl:2.9638 bb:1.1461 rl:2.7663 rb:1.0700 dl:215-216 gd:1 +ttp: b163/782 bl:2.8796 bb:1.1303 rl:2.7664 rb:1.0700 dl:214-215 gd:1 +ttp: b162/782 bl:2.9691 bb:1.1520 rl:2.7664 rb:1.0700 dl:213-214 gd:1 +ttp: b161/782 bl:2.9610 bb:1.1783 rl:2.7665 rb:1.0701 dl:212-213 gd:1 +ttp: b159/782 bl:2.9998 bb:1.1818 rl:2.7666 rb:1.0701 dl:211-212 gd:1 +ttp: b160/782 bl:2.8728 bb:1.1291 rl:2.7666 rb:1.0701 dl:212-212 gd:1 +ttp: b158/782 bl:2.8865 bb:1.1427 rl:2.7667 rb:1.0702 dl:210-211 gd:1 +ttp: b157/782 bl:2.8245 bb:1.1133 rl:2.7667 rb:1.0702 dl:209-210 gd:1 +ttp: b156/782 bl:2.8974 bb:1.1110 rl:2.7667 rb:1.0702 dl:208-209 gd:1 +ttp: b155/782 bl:2.8798 bb:1.1318 rl:2.7668 rb:1.0702 dl:207-208 gd:1 +ttp: b153/782 bl:3.0182 bb:1.1643 rl:2.7668 rb:1.0702 dl:206-207 gd:1 +ttp: b154/782 bl:2.9928 bb:1.1584 rl:2.7669 rb:1.0703 dl:207-207 gd:1 +ttp: b152/782 bl:2.8905 bb:1.1278 rl:2.7670 rb:1.0703 dl:205-206 gd:1 +ttp: b151/782 bl:2.7967 bb:1.1022 rl:2.7670 rb:1.0703 dl:204-205 gd:1 +ttp: b149/782 bl:2.9724 bb:1.1718 rl:2.7670 rb:1.0703 dl:203-204 gd:1 +ttp: b150/782 bl:2.9521 bb:1.1604 rl:2.7671 rb:1.0704 dl:204-204 gd:1 +ttp: b148/782 bl:2.9861 bb:1.1606 rl:2.7672 rb:1.0704 dl:202-203 gd:1 +ttp: b147/782 bl:2.9257 bb:1.1575 rl:2.7672 rb:1.0704 dl:201-202 gd:1 +ttp: b146/782 bl:2.9027 bb:1.1523 rl:2.7673 rb:1.0704 dl:200-201 gd:1 +ttp: b144/782 bl:2.8339 bb:1.1272 rl:2.7673 rb:1.0705 dl:199-200 gd:1 +ttp: b145/782 bl:2.8899 bb:1.1339 rl:2.7673 rb:1.0705 dl:200-200 gd:1 +ttp: b143/782 bl:3.0280 bb:1.1994 rl:2.7674 rb:1.0705 dl:198-199 gd:1 +ttp: b142/782 bl:2.9662 bb:1.1629 rl:2.7675 rb:1.0706 dl:197-198 gd:1 +ttp: b141/782 bl:2.9044 bb:1.1450 rl:2.7675 rb:1.0706 dl:196-197 gd:1 +ttp: b140/782 bl:2.9622 bb:1.1696 rl:2.7676 rb:1.0706 dl:195-196 gd:1 +ttp: b138/782 bl:2.9144 bb:1.1600 rl:2.7676 rb:1.0706 dl:194-195 gd:1 +ttp: b139/782 bl:2.9811 bb:1.1538 rl:2.7677 rb:1.0707 dl:195-195 gd:1 +ttp: b137/782 bl:2.9616 bb:1.1934 rl:2.7678 rb:1.0707 dl:193-194 gd:1 +ttp: b136/782 bl:2.9704 bb:1.1850 rl:2.7678 rb:1.0707 dl:192-193 gd:1 +ttp: b135/782 bl:2.9314 bb:1.1420 rl:2.7679 rb:1.0708 dl:191-192 gd:1 +ttp: b134/782 bl:3.0323 bb:1.2128 rl:2.7679 rb:1.0708 dl:190-191 gd:1 +ttp: b133/782 bl:3.0208 bb:1.1933 rl:2.7680 rb:1.0708 dl:189-190 gd:1 +ttp: b131/782 bl:3.0386 bb:1.2077 rl:2.7681 rb:1.0709 dl:188-189 gd:1 +ttp: b132/782 bl:2.9522 bb:1.1362 rl:2.7682 rb:1.0709 dl:189-189 gd:1 +ttp: b130/782 bl:3.1484 bb:1.2376 rl:2.7683 rb:1.0709 dl:187-188 gd:1 +ttp: b128/782 bl:2.8397 bb:1.0902 rl:2.7683 rb:1.0709 dl:186-187 gd:1 +ttp: b129/782 bl:2.9419 bb:1.1810 rl:2.7684 rb:1.0710 dl:187-187 gd:1 +ttp: b127/782 bl:2.9037 bb:1.1478 rl:2.7684 rb:1.0710 dl:185-186 gd:1 +ttp: b125/782 bl:2.9941 bb:1.1865 rl:2.7685 rb:1.0710 dl:184-185 gd:1 +ttp: b126/782 bl:2.9395 bb:1.1943 rl:2.7685 rb:1.0711 dl:185-185 gd:1 +ttp: b124/782 bl:2.8711 bb:1.1485 rl:2.7685 rb:1.0711 dl:183-184 gd:1 +ttp: b123/782 bl:2.9627 bb:1.1837 rl:2.7686 rb:1.0711 dl:182-183 gd:1 +ttp: b122/782 bl:2.8976 bb:1.1593 rl:2.7686 rb:1.0712 dl:181-182 gd:1 +ttp: b120/782 bl:2.9736 bb:1.1684 rl:2.7687 rb:1.0712 dl:180-181 gd:1 +ttp: b121/782 bl:2.8489 bb:1.1287 rl:2.7687 rb:1.0712 dl:181-181 gd:1 +ttp: b119/782 bl:2.8134 bb:1.0894 rl:2.7687 rb:1.0712 dl:179-180 gd:1 +ttp: b118/782 bl:2.9627 bb:1.1575 rl:2.7688 rb:1.0712 dl:178-179 gd:1 +ttp: b116/782 bl:3.0122 bb:1.1912 rl:2.7689 rb:1.0713 dl:177-178 gd:1 +ttp: b117/782 bl:2.8603 bb:1.1465 rl:2.7689 rb:1.0713 dl:178-178 gd:1 +ttp: b115/782 bl:2.8652 bb:1.1561 rl:2.7689 rb:1.0713 dl:176-177 gd:1 +ttp: b113/782 bl:3.0313 bb:1.1919 rl:2.7690 rb:1.0713 dl:175-176 gd:1 +ttp: b114/782 bl:2.9998 bb:1.1876 rl:2.7691 rb:1.0714 dl:176-176 gd:1 +ttp: b112/782 bl:2.9799 bb:1.1526 rl:2.7691 rb:1.0714 dl:174-175 gd:1 +ttp: b111/782 bl:2.9732 bb:1.1863 rl:2.7692 rb:1.0714 dl:173-174 gd:1 +ttp: b109/782 bl:3.0721 bb:1.2107 rl:2.7693 rb:1.0715 dl:172-173 gd:1 +ttp: b110/782 bl:3.0271 bb:1.1753 rl:2.7693 rb:1.0715 dl:173-173 gd:1 +ttp: b108/782 bl:2.8618 bb:1.0991 rl:2.7693 rb:1.0715 dl:171-172 gd:1 +ttp: b106/782 bl:2.9250 bb:1.1814 rl:2.7694 rb:1.0715 dl:170-171 gd:1 +ttp: b107/782 bl:2.9170 bb:1.1451 rl:2.7694 rb:1.0715 dl:171-171 gd:1 +ttp: b105/782 bl:3.0601 bb:1.2258 rl:2.7695 rb:1.0716 dl:169-170 gd:1 +ttp: b104/782 bl:2.9949 bb:1.1654 rl:2.7696 rb:1.0716 dl:168-169 gd:1 +ttp: b102/782 bl:2.7622 bb:1.1123 rl:2.7696 rb:1.0716 dl:167-168 gd:1 +ttp: b103/782 bl:2.8932 bb:1.1198 rl:2.7696 rb:1.0716 dl:168-168 gd:1 +ttp: b101/782 bl:2.9505 bb:1.1581 rl:2.7697 rb:1.0717 dl:166-167 gd:1 +ttp: b100/782 bl:2.9470 bb:1.1568 rl:2.7697 rb:1.0717 dl:165-166 gd:1 +ttp: b99/782 bl:2.9870 bb:1.1876 rl:2.7698 rb:1.0717 dl:164-165 gd:1 +ttp: b97/782 bl:2.9961 bb:1.1705 rl:2.7698 rb:1.0717 dl:163-164 gd:1 +ttp: b98/782 bl:2.9859 bb:1.1851 rl:2.7699 rb:1.0718 dl:164-164 gd:1 +ttp: b96/782 bl:2.9500 bb:1.1530 rl:2.7699 rb:1.0718 dl:162-163 gd:1 +ttp: b95/782 bl:3.0187 bb:1.2293 rl:2.7700 rb:1.0718 dl:161-162 gd:1 +ttp: b94/782 bl:2.9868 bb:1.1779 rl:2.7700 rb:1.0718 dl:160-161 gd:1 +ttp: b92/782 bl:2.9064 bb:1.1755 rl:2.7701 rb:1.0719 dl:159-160 gd:1 +ttp: b93/782 bl:2.9578 bb:1.1865 rl:2.7701 rb:1.0719 dl:160-160 gd:1 +ttp: b91/782 bl:3.0401 bb:1.2167 rl:2.7702 rb:1.0719 dl:158-159 gd:1 +ttp: b89/782 bl:3.0120 bb:1.2012 rl:2.7702 rb:1.0720 dl:157-158 gd:1 +ttp: b90/782 bl:2.9975 bb:1.1823 rl:2.7703 rb:1.0720 dl:158-158 gd:1 +ttp: b88/782 bl:3.1020 bb:1.2078 rl:2.7704 rb:1.0720 dl:156-157 gd:1 +ttp: b87/782 bl:3.0172 bb:1.2059 rl:2.7705 rb:1.0721 dl:155-156 gd:1 +ttp: b86/782 bl:3.0392 bb:1.2649 rl:2.7705 rb:1.0721 dl:154-155 gd:1 +ttp: b84/782 bl:3.0145 bb:1.2146 rl:2.7706 rb:1.0721 dl:153-154 gd:1 +ttp: b85/782 bl:2.9734 bb:1.1938 rl:2.7706 rb:1.0722 dl:154-154 gd:1 +ttp: b83/782 bl:3.0344 bb:1.2125 rl:2.7707 rb:1.0722 dl:152-153 gd:1 +ttp: b82/782 bl:2.9745 bb:1.1973 rl:2.7707 rb:1.0722 dl:151-152 gd:1 +ttp: b80/782 bl:2.9125 bb:1.1930 rl:2.7708 rb:1.0723 dl:150-151 gd:1 +ttp: b81/782 bl:2.9344 bb:1.1670 rl:2.7708 rb:1.0723 dl:151-151 gd:1 +ttp: b79/782 bl:3.0334 bb:1.2043 rl:2.7709 rb:1.0723 dl:149-150 gd:1 +ttp: b78/782 bl:2.9113 bb:1.1295 rl:2.7709 rb:1.0723 dl:148-149 gd:1 +ttp: b76/782 bl:3.0518 bb:1.2244 rl:2.7710 rb:1.0724 dl:147-148 gd:1 +ttp: b77/782 bl:3.0284 bb:1.1702 rl:2.7710 rb:1.0724 dl:148-148 gd:1 +ttp: b75/782 bl:3.0916 bb:1.2138 rl:2.7711 rb:1.0724 dl:146-147 gd:1 +ttp: b74/782 bl:3.1175 bb:1.2743 rl:2.7712 rb:1.0725 dl:145-146 gd:1 +ttp: b73/782 bl:3.0570 bb:1.2093 rl:2.7713 rb:1.0725 dl:144-145 gd:1 +ttp: b71/782 bl:2.9575 bb:1.1539 rl:2.7713 rb:1.0725 dl:143-144 gd:1 +ttp: b72/782 bl:2.9337 bb:1.1922 rl:2.7713 rb:1.0725 dl:144-144 gd:1 +ttp: b70/782 bl:3.0641 bb:1.1644 rl:2.7714 rb:1.0726 dl:142-143 gd:1 +ttp: b68/782 bl:3.1053 bb:1.2064 rl:2.7715 rb:1.0726 dl:141-142 gd:1 +ttp: b69/782 bl:3.1096 bb:1.2337 rl:2.7716 rb:1.0726 dl:142-142 gd:1 +ttp: b67/782 bl:3.0708 bb:1.2412 rl:2.7716 rb:1.0727 dl:140-141 gd:1 +ttp: b66/782 bl:3.1107 bb:1.2742 rl:2.7717 rb:1.0727 dl:139-140 gd:1 +ttp: b64/782 bl:2.9960 bb:1.2418 rl:2.7717 rb:1.0727 dl:138-139 gd:1 +ttp: b65/782 bl:3.0364 bb:1.2192 rl:2.7718 rb:1.0728 dl:139-139 gd:1 +ttp: b63/782 bl:3.0267 bb:1.2207 rl:2.7719 rb:1.0728 dl:137-138 gd:1 +ttp: b62/782 bl:2.9893 bb:1.2092 rl:2.7719 rb:1.0728 dl:136-137 gd:1 +ttp: b61/782 bl:2.9345 bb:1.1472 rl:2.7719 rb:1.0728 dl:135-136 gd:1 +ttp: b60/782 bl:3.0730 bb:1.2332 rl:2.7720 rb:1.0729 dl:134-135 gd:1 +ttp: b58/782 bl:2.9819 bb:1.2299 rl:2.7720 rb:1.0729 dl:133-134 gd:1 +ttp: b59/782 bl:3.0573 bb:1.1942 rl:2.7721 rb:1.0729 dl:134-134 gd:1 +ttp: b57/782 bl:3.0414 bb:1.2260 rl:2.7722 rb:1.0730 dl:132-133 gd:1 +ttp: b56/782 bl:3.0581 bb:1.2052 rl:2.7722 rb:1.0730 dl:131-132 gd:1 +ttp: b55/782 bl:3.0801 bb:1.2370 rl:2.7723 rb:1.0730 dl:130-131 gd:1 +ttp: b53/782 bl:3.1226 bb:1.2311 rl:2.7724 rb:1.0731 dl:129-130 gd:1 +ttp: b54/782 bl:3.1120 bb:1.2743 rl:2.7724 rb:1.0731 dl:130-130 gd:1 +ttp: b52/782 bl:3.0575 bb:1.1963 rl:2.7725 rb:1.0731 dl:128-129 gd:1 +ttp: b51/782 bl:3.0561 bb:1.2216 rl:2.7725 rb:1.0731 dl:127-128 gd:1 +ttp: b50/782 bl:2.9900 bb:1.2273 rl:2.7726 rb:1.0732 dl:126-127 gd:1 +ttp: b48/782 bl:2.9963 bb:1.1714 rl:2.7726 rb:1.0732 dl:125-126 gd:1 +ttp: b49/782 bl:2.9743 bb:1.1734 rl:2.7727 rb:1.0732 dl:126-126 gd:1 +ttp: b47/782 bl:2.9472 bb:1.1778 rl:2.7727 rb:1.0732 dl:124-125 gd:1 +ttp: b46/782 bl:3.1333 bb:1.2252 rl:2.7728 rb:1.0733 dl:123-124 gd:1 +ttp: b45/782 bl:3.0968 bb:1.2389 rl:2.7728 rb:1.0733 dl:122-123 gd:1 +ttp: b43/782 bl:2.9828 bb:1.1862 rl:2.7729 rb:1.0733 dl:121-122 gd:1 +ttp: b44/782 bl:3.1551 bb:1.2294 rl:2.7730 rb:1.0733 dl:122-122 gd:1 +ttp: b42/782 bl:3.1154 bb:1.2472 rl:2.7730 rb:1.0734 dl:120-121 gd:1 +ttp: b41/782 bl:3.1458 bb:1.2856 rl:2.7731 rb:1.0734 dl:119-120 gd:1 +ttp: b39/782 bl:3.1422 bb:1.2418 rl:2.7732 rb:1.0734 dl:118-119 gd:1 +ttp: b40/782 bl:3.0130 bb:1.2119 rl:2.7732 rb:1.0735 dl:119-119 gd:1 +ttp: b38/782 bl:3.0467 bb:1.2160 rl:2.7733 rb:1.0735 dl:117-118 gd:1 +ttp: b37/782 bl:3.0923 bb:1.2140 rl:2.7733 rb:1.0735 dl:116-117 gd:1 +ttp: b36/782 bl:2.9951 bb:1.2248 rl:2.7734 rb:1.0735 dl:115-116 gd:1 +ttp: b34/782 bl:3.0884 bb:1.2503 rl:2.7734 rb:1.0736 dl:114-115 gd:1 +ttp: b35/782 bl:3.0181 bb:1.1985 rl:2.7735 rb:1.0736 dl:115-115 gd:1 +ttp: b33/782 bl:3.0925 bb:1.2107 rl:2.7735 rb:1.0736 dl:113-114 gd:1 +ttp: b32/782 bl:3.0292 bb:1.2109 rl:2.7736 rb:1.0736 dl:112-113 gd:1 +ttp: b31/782 bl:3.1921 bb:1.2650 rl:2.7736 rb:1.0737 dl:111-112 gd:1 +ttp: b30/782 bl:3.1216 bb:1.2504 rl:2.7737 rb:1.0737 dl:110-111 gd:1 +ttp: b29/782 bl:3.0657 bb:1.2494 rl:2.7737 rb:1.0737 dl:109-110 gd:1 +ttp: b28/782 bl:3.0206 bb:1.2163 rl:2.7738 rb:1.0738 dl:108-109 gd:1 +ttp: b27/782 bl:3.1018 bb:1.2383 rl:2.7738 rb:1.0738 dl:107-108 gd:1 +ttp: b25/782 bl:3.3073 bb:1.3108 rl:2.7739 rb:1.0738 dl:106-107 gd:1 +ttp: b26/782 bl:3.0814 bb:1.2563 rl:2.7740 rb:1.0739 dl:107-107 gd:1 +ttp: b24/782 bl:3.0709 bb:1.2149 rl:2.7740 rb:1.0739 dl:105-106 gd:1 +ttp: b23/782 bl:3.1460 bb:1.2539 rl:2.7741 rb:1.0739 dl:104-105 gd:1 +ttp: b22/782 bl:3.1660 bb:1.2341 rl:2.7741 rb:1.0739 dl:103-104 gd:1 +ttp: b21/782 bl:3.2057 bb:1.2465 rl:2.7742 rb:1.0740 dl:102-103 gd:1 +ttp: b20/782 bl:3.1291 bb:1.2656 rl:2.7743 rb:1.0740 dl:101-102 gd:1 +ttp: b19/782 bl:3.1400 bb:1.2264 rl:2.7743 rb:1.0740 dl:100-101 gd:1 +ttp: b18/782 bl:3.1292 bb:1.2676 rl:2.7744 rb:1.0740 dl:99-100 gd:1 +ttp: b17/782 bl:3.1247 bb:1.2385 rl:2.7744 rb:1.0741 dl:98-99 gd:1 +ttp: b16/782 bl:3.0427 bb:1.2132 rl:2.7745 rb:1.0741 dl:97-98 gd:1 +ttp: b15/782 bl:3.2472 bb:1.2423 rl:2.7746 rb:1.0741 dl:95-97 gd:1 +ttp: b14/782 bl:3.1309 bb:1.2314 rl:2.7746 rb:1.0741 dl:94-95 gd:1 +ttp: b13/782 bl:3.1599 bb:1.2718 rl:2.7747 rb:1.0742 dl:93-94 gd:1 +ttp: b12/782 bl:3.1863 bb:1.2420 rl:2.7747 rb:1.0742 dl:92-93 gd:1 +ttp: b11/782 bl:3.2419 bb:1.2669 rl:2.7748 rb:1.0742 dl:90-92 gd:1 +ttp: b10/782 bl:3.1183 bb:1.2322 rl:2.7748 rb:1.0742 dl:89-90 gd:1 +ttp: b9/782 bl:3.2133 bb:1.2734 rl:2.7749 rb:1.0743 dl:87-89 gd:1 +ttp: b8/782 bl:3.2669 bb:1.2629 rl:2.7750 rb:1.0743 dl:86-87 gd:1 +ttp: b7/782 bl:3.2280 bb:1.2386 rl:2.7750 rb:1.0743 dl:84-86 gd:1 +ttp: b6/782 bl:3.2762 bb:1.2782 rl:2.7751 rb:1.0743 dl:82-84 gd:1 +ttp: b5/782 bl:3.3106 bb:1.2913 rl:2.7752 rb:1.0744 dl:80-82 gd:1 +ttp: b4/782 bl:3.2215 bb:1.2352 rl:2.7752 rb:1.0744 dl:78-80 gd:1 +ttp: b3/782 bl:3.3317 bb:1.2636 rl:2.7753 rb:1.0744 dl:75-78 gd:1 +ttp: b2/782 bl:3.1534 bb:1.1696 rl:2.7753 rb:1.0744 dl:70-75 gd:1 +ttp: b1/782 bl:3.3719 bb:1.2514 rl:2.7754 rb:1.0744 dl:45-70 gd:1 +quantized_ttt_phased val_loss:2.77538047 val_bpb:1.07443627 eval_time:3160166ms +total_eval_time:3160.2s