From 97ed374043e48994389babe0dca4a551cb61418e Mon Sep 17 00:00:00 2001 From: Denis Kurlov Date: Fri, 1 May 2026 05:09:28 +0200 Subject: [PATCH] Non-record: 52.8M Quinary quantization + SP16384 + Score-First TTT (1.1384 BPB, 3-seed) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 3-seed mean TTT BPB 1.1384 ± 0.0009 std on 8×H100 SXM (10 min training) in records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/. Direct quinary {-2,-1,0,+1,+2} fork of @CiprianFlorin-Ifrim's 2026-03-24 ternary record (PR #640): inherits the U-Net topology + Muon + factored tied embedding + FP8 QAT + YaRN + FlashAttention-3, swaps ternary -> quinary (-0.021 BPB roundtrip BPB), SP8192 -> SP16384 tokenizer, single-blob LZMA -> layout-aware per-stream archive (structurally based on @codemath3000's PR #1855), and stride-16 sliding eval -> score-first TTT on the 42,364 fp16 calibration parameters (-0.024 BPB). 15.72 MB max-seed total, 275 KB margin under the 16 MB cap. BPB denominator audit closed end-to-end: bundled verify_bpb.py shows the LUT byte count matches SentencePiece decoder bytes exactly on the scored slice (delta=+0), and that count (151,078,879) is bit-identical to the runtime eval_bytes printed by train_gpt.py for every seed. See the records folder README for full diff vs the ternary record + the verifier output. --- .../README.md | 261 + .../fineweb_16384_bpe.model | Bin 0 -> 511362 bytes .../fineweb_16384_bpe.vocab | 16384 ++++++++++++++++ .../quinary_seed1337.txt | 1689 ++ .../quinary_seed42.txt | 1689 ++ .../quinary_seed7.txt | 1689 ++ .../requirements.txt | 10 + .../run.sh | 74 + .../setup.sh | 130 + .../submission.json | 34 + .../train_gpt.py | 1652 ++ .../verify_bpb.py | 295 + 12 files changed, 23907 insertions(+) create mode 100644 records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/README.md create mode 100644 records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/fineweb_16384_bpe.model create mode 100644 records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/fineweb_16384_bpe.vocab create mode 100644 records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/quinary_seed1337.txt create mode 100644 records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/quinary_seed42.txt create mode 100644 records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/quinary_seed7.txt create mode 100644 records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/requirements.txt create mode 100755 records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/run.sh create mode 100755 records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/setup.sh create mode 100644 records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/submission.json create mode 100644 records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/train_gpt.py create mode 100644 records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/verify_bpb.py diff --git a/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/README.md b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/README.md new file mode 100644 index 0000000000..e62dca5ff8 --- /dev/null +++ b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/README.md @@ -0,0 +1,261 @@ +# Non-record: Quinary quantization + SP16384 + per-group lrzip + TTT - bpb 1.1384 + +**Quinary {-2,-1,0,+1,+2} weights (5-state, base-5 packed) + 10L (5 Encoder + 5 Decoder) 576d U-Net + Muon + 4× relu² MLP + Tied Embed (380→576) + Poly5 Softcap + YaRN 2048 + SP16384 BPE + FP8 QAT + 5-bit Scale Quant + Layout-Aware Per-Stream Archive + Score-First TTT (3 epochs, fp16-calibration-only)** + +**bpb 1.1384 ± 0.0009 std** (3-seed TTT mean) | **15.72 MB** total artifact max (all 3 seeds FIT) | 8×H100 SXM, 7,800 steps in 599s + ~3.6 min TTT-eval + +## Results (3 seeds, 8×H100 SXM) + +| Seed | TTT BPB | RT BPB | Total bytes | +|------|------------|--------|-------------| +| 42 | **1.1381** | 1.1626 | 15,714,938 | +| 1337 | **1.1394** | 1.1633 | 15,721,124 | +| 7 | **1.1378** | 1.1622 | 15,724,839 | +| **Mean ± std** | **1.1384 ± 0.0009** | 1.1627 ± 0.0006 | | + +## Motivation + +Quinary {-2,-1,0,+1,+2} is one step above ternary {-1,0,+1}: 5 levels per parameter instead of 3. Per-symbol entropy floor: log₂5 ≈ 2.32 bits/param vs log₂3 ≈ 1.59. Fewer parameters fit in 16 MB, but each is finer-grained. We ran the comparison to see which side wins at this budget. + +Beyond the formal comparison, there was the sport of it: how far can the extreme-quantization regime actually be pushed under the restrictions of the competition (16 MB total artifact, 10-min training, 10-min eval on 8×H100 SXM)? The current leaderboard frontier has drifted far away from it (PR #1855, PR #1851, et al. use int6 + LQER rank-4 corrections rather than ternary/binary), and quinary is the smallest step away from ternary that still stays squarely inside the discrete regime. We wanted to test whether that smallest step buys enough to be worth taking. + +Empirically: chunked roundtrip BPB drops from **1.1842** (ternary record, PR #640) to **1.1627** (this submission, 3-seed mean) — a **−0.022 bpb** architectural win — and score-first TTT on the calibration parameters adds another **−0.024 bpb** on top, landing at **1.1384**. + +## Diff from the ternary record (PR #640) + +This submission is a direct quinary fork of [@CiprianFlorin-Ifrim's ternary record](https://github.com/openai/parameter-golf/pull/640). Everything not listed in the table below was inherited unchanged: U-Net topology with per-block residual mix and learned skip weights, 4× relu² MLP, Muon (3 NS steps), factored tied embedding, polynomial-5 softcap with z-loss, YaRN (max_len=2048, base=5000), fused QKV, FP8 QAT for non-quantized linears, FlashAttention-3, 599s wallclock cap. + +| | Ternary record | This submission | +|---|---|---| +| Quantization | ternary {-1,0,+1}, log₂3 ≈ 1.585 bpw entropy floor | quinary {-2,-1,0,+1,+2}, log₂5 ≈ 2.322 bpw entropy floor; raw storage ≈ 2.667 bits/param via 3 quins per byte (post-archive cost depends on the entropy coder) | +| Group size | 128 | 192 (3 groups per c_qkv row) | +| Scale storage | fp16 | 5-bit log-delta scale quant (−141 KB, +2.1 mBPB TTT cost) | +| Tokenizer | SP8192 | SP16384 | +| Model dim | 768 | 576 | +| GQA (Q:KV) | 8:4 | 6:3 | +| Embed bottleneck | 254 | 380 | +| Compression | single-blob LZMA | layout-aware per-stream v2 archive (header `0x03`) | +| Eval | stride-16 sliding + temperature scaling | score-first TTT (3 epochs, fp16 calibration params, 42,364 ≈ 0.08% of model) | +| Param count | 73.7M | 52.8M | + +## Architecture (config) + +| | | +|---|---| +| Layers | 10 (5 encoder + 5 decoder, symmetric U-Net) | +| Model dim | 576 | +| Heads | 6 query / 3 KV (GQA), head_dim=96 | +| MLP | 4× expansion, hidden=2304, relu² activation | +| Embed | tied, 16384 vocab, 380→576 bottleneck | +| RoPE | YaRN, base=5000, max_len=2048 | +| Softcap | poly5, cap=10 | +| Quinary group size | 192, per-group absmean (`scale_correction` exists per group but is inert by design — see Score-first TTT) | +| Optimizer | Muon (matrix params), Adam (scalars + tied embed) | +| Batch / seq | 524 288 tok / 1024 | +| Wallclock cap | 599 s | + +## Tokenizer (SP16384) + +Custom-trained SentencePiece BPE with `vocab_size=16384`, trained from scratch on FineWeb-10B using the upstream [`data/download_hf_docs_and_tokenize.py`](https://github.com/openai/parameter-golf/blob/main/data/download_hf_docs_and_tokenize.py) pipeline (a thin wrapper around `sentencepiece` BPE training). The tokenizer model + pre-tokenized shards are published on the HF dataset repo [`deniskurlov/parameter-golf-fineweb-sp16384`](https://huggingface.co/datasets/deniskurlov/parameter-golf-fineweb-sp16384); `setup.sh` pulls the `canonical/` subset (~23 GB total — 117 train shards + 1 val shard + the tokenizer `.model`/`.vocab`) and that is sufficient to reproduce the run. The competition's 16 MB artifact cap applies only to the model bundle; the tokenizer + tokenized shards are pre-published infrastructure that any reproducer downloads once. + +Doubling the vocab vs the ternary record's SP8192 reduces tokens-per-byte from ~0.30 to ~0.246 (fewer cross-entropy terms in the BPB sum) at the cost of a 2× larger embedding matrix — bounded here by the 380→576 factored-tied-embedding bottleneck. + +## Per-stream compression (header byte `0x03`) + +For each quinary tensor, two-stage choose-min: + +1. **Layout selection** (LZMA-screened): generate 4 candidate byte-layouts and pick the layout with the smallest LZMA9-compressed size. + - **base5** (canonical, 3 symbols/byte) and **base5_T** (transpose, then base-5 pack — wins when columns are more locally similar than rows, common on MLP projections). + - **bitmask** = three bit-planes: `zero_mask | sign_bits[over nonzeros] | mag2_bits[over nonzeros]`, giving the entropy coder homogeneous planes to model independently. Plus **bitmask_T**. +2. **Compressor selection**: compress the winning layout with both `lzma9` and `lrzip-zpaq -L9`; keep whichever is smaller. + +(LZMA-screen rather than full 4×2 keeps serialize wallclock bounded — `lrzip` can be slow on bad streams; we only invoke it on the chosen layout.) + +For `c_qkv.weight`, the row block is split into independent Q / K / V sub-payloads first (Q, K, V have different trained weight distributions, so optimal layouts differ per part). The legacy single-blob lrzip path was the source of the seed-7 OVER cliff (~33% of seeds went OVER 16 MB); per-stream v2 FITS at ~15.65 MB (model only) / ~15.72 MB (model+code) across all 3 seeds we tried. + +## Score-first TTT + +After loading the artifact, freeze all quantized and FP8 weights. Adapt only the **fp16 calibration parameters** — `attn_scale`, `mlp_scale`, `resid_mix`, `q_gain`, `skip_weights`, `vocab_bias` — **42,364 values, ≈0.08% of the model**. Process the val stream in 1,134 chunks of 32k tokens with sliding-window stride=16; for each chunk, **grade tokens first, then train** (legal under the rules). 3 SGD epochs, lr=0.005, momentum 0.9. Eval time ~215 s of the 600 s eval budget. (These params are tagged `CTP` in the code — a holdover constant name from the ternary base.) + +> **Note on `scale_correction`:** the inherited `QuinaryLinear` carries a per-group fp32 `scale_correction` parameter (~190k values across all quinary layers, init=1.0). Its gradient is blocked by the STE detach in the forward pass, so it never updates from 1.0 in either training or TTT. We tested fixing the STE on 2026-05-01 (one seed): no TTT benefit, ~2 mBPB training-side regression. Reverted. The parameter is therefore inert by design; it remains in the state-dict at value 1.0 (compresses to a few KB) and is excluded from the TTT optimizer's adapted-parameter set. + +### Validation accounting + +The validation manifest has **37,147,047** token IDs, hence **37,147,046** possible next-token target positions. The eval loop truncates the stream to a multiple of `train_seq_len=1024`, scoring **37,146,624** target tokens — the final partial 1024-window (the trailing **422** target tokens) is omitted. From the `verify_bpb.py` output: + +| | targets | bytes | +|---|---:|---:| +| Full untruncated stream | 37,147,046 | 151,080,891 | +| Eval-scored slice (`(N-1)//1024 × 1024`) | 37,146,624 | 151,078,879 | +| Dropped tail (last partial window) | 422 | 2,012 | + +The dropped fraction of the byte denominator is **2,012 / 151,080,891 ≈ 1.33 × 10^{-5}**. + +Bound on the resulting BPB bias: BPB = NLL / byte_count, with both numerator and denominator scaling proportionally with token count, so the bias only comes from any *deviation* of the dropped-tail's average loss from the global average. For a 422-token sample of 37M, that worst-plausible deviation gives **|ΔBPB| ≲ 2 × 10^{-5} BPB** — well below the current 3-seed std of **0.0009 BPB** and far below the 0.005 BPB record-submission improvement threshold. The bias is also identical across all 3 seeds (same val stream, same truncation), so seed-to-seed variance is unaffected; only absolute BPB carries the constant offset. + +Fresh logs report `eval_tokens` and `eval_bytes` after both the roundtrip eval and TTT eval so the actual scored slice is auditable line-by-line, and `verify_bpb.py`'s `exact eval slice` `lut_bytes` cross-checks the `eval_bytes:N` value `train_gpt.py` writes at runtime. + +### Verifying the BPB denominator (custom tokenizer) + +Because we use a custom-trained SentencePiece BPE (SP16384), the per-token byte LUTs in `build_luts` are reviewer-auditable. Run: + +```bash +python3 verify_bpb.py +``` + +The script independently rebuilds the `(base_bytes, has_leading_space, is_boundary_token)` LUTs from the `.model` file using the **same shard loader as `train_gpt.py`** (256-int32 / 1024-byte header, magic `20240520`, version `1`), then for each of several stream slices it compares **two byte counts**: + +- the LUT-based sum used by `eval_val` and TTT eval (`base_bytes_lut[tgt] + has_leading_space_lut[tgt] & ~is_boundary_token_lut[prev]`); +- the SentencePiece decoder's UTF-8 byte count for the same tokens, with documents decoded BOS-by-BOS so the LUT's "no leading-space charge after a boundary" rule and the decoder's "no synthetic leading space at sequence start" behavior are aligned. + +The slices checked are: **the exact slice scored by `eval_val`** (truncated to a `train_seq_len=1024` target multiple — this is the slice whose byte count drives the headline BPB), the full untruncated stream, and BOS-delimited document prefixes (single doc, first 10/100/1000 docs). The script also reports the tokenizer's SHA-256 for provenance and counts UNK tokens (must be 0 — the LUT assigns 0 bytes to UNK, which would silently inflate BPB). + +If every slice reports `delta = +0`, the LUT denominator matches SentencePiece decoding on the scored tokenized validation slice — which means BPB reduces to `cross_entropy_in_nats / (decoded_bytes × ln 2)`. **Note**: this proves *internal consistency* between the LUT and the SP decoder for the supplied tokenizer + supplied tokenized shards. It does not by itself prove that the shards correspond to the original canonical FineWeb validation bytes under the intended tokenizer — that is a separate provenance question, addressed by the tokenizer SHA-256 line below and by hosting the tokenized shards at the public HF repo `deniskurlov/parameter-golf-fineweb-sp16384` so reviewers can compare against an independent retokenization. + +**Verified output, on the canonical sp16384 stack (2026-05-01)**: + +``` +tokenizer : data/canonical/tokenizers/fineweb_16384_bpe.model (vocab=16384) +tokenizer sha: abaec140336563026d65c1b7192d47a2b8c81a3bbad0f4d1cd1d852364ac432a +BOS id=1 ('') EOS id=2 ('') UNK id=3 +val shards : 1 (data/canonical/datasets/fineweb10B_sp16384/fineweb_val_000000.bin ... ) +LUT stats : byte-fallback=256 control/unknown/unused=4 with-leading-space=11664 boundary=4 +shard tokens : 37,147,047 +BOS positions: 50,000; first=0 +UNK count : 0 + +eval slice : train_seq_len=1024 target_count=37,146,624 omitted_tail_targets=422 + PASS exact eval slice targets=37,146,624 lut_bytes= 151,078,879 decoded_bytes= 151,078,879 delta=+0 start_boundary=True + PASS full untruncated stream targets=37,147,046 lut_bytes= 151,080,891 decoded_bytes= 151,080,891 delta=+0 start_boundary=True + PASS doc at first BOS targets= 295 lut_bytes= 1,339 decoded_bytes= 1,339 delta=+0 start_boundary=True + PASS first 10 BOS docs targets= 5,592 lut_bytes= 22,919 decoded_bytes= 22,919 delta=+0 start_boundary=True + PASS first 100 BOS docs targets= 72,484 lut_bytes= 291,960 decoded_bytes= 291,960 delta=+0 start_boundary=True + PASS first 1000 BOS docs targets= 706,682 lut_bytes= 2,851,493 decoded_bytes= 2,851,493 delta=+0 start_boundary=True + +ALL CHECKS PASS — LUT bytes match SentencePiece decoder bytes on the eval slice. +``` + +End-to-end audit cross-check: the verifier's `exact eval slice lut_bytes = 151,078,879` is **bit-identical** to the runtime `eval_bytes:151,078,879` printed by `train_gpt.py:eval_val` and TTT eval for every seed. Cross-tokenizer sanity: rerunning the same verifier against the upstream openai/parameter-golf SP1024 stack on the same FineWeb val source gives identical byte counts of `1,339 / 22,919 / 291,960 / 2,851,493` for the first 1/10/100/1000 documents (different tokenizer, same source bytes — the decoder agrees). + +## Setup and Run + +```bash +# Environment setup (lrzip + Python deps + FlashAttention-3 + dataset) +bash setup.sh + +# Single seed +SEED=42 bash run.sh + +# 3-seed sweep +for SEED in 42 1337 7; do + RUN_ID=quinary_seed${SEED} SEED=$SEED bash run.sh +done +``` + +
+Representative run command (subset of run.sh env vars — see run.sh for the authoritative full set) + +The block below mirrors the env vars `run.sh` actually passes (model shape, optimizer, TTT, etc.). A few minor knobs that `run.sh` also passes through (`ADAM_LR`, `ADAM_WD`, `BATCH_TOKENS_START`, `BATCH_SCHEDULE_FRACTION`, `SEQ_LEN_START`, `SEQ_SCHEDULE_FRACTION`, `VAL_LOSS_EVERY`, `TRAIN_LOG_EVERY`, `CHURN_LOG_EVERY`, `VAL_MAX_TOKENS`) are not duplicated here — `run.sh` is the authoritative source. As of Tier-4, all defaults in `train_gpt.py:Hyperparameters` also match the canonical SP16384 config, so a bare `torchrun --standalone --nproc_per_node=8 train_gpt.py` (no env vars) reproduces the submission. + +```bash +RUN_ID=quinary_seed42 \ +DATA_PATH=./data/canonical/datasets/fineweb10B_sp16384 \ +TOKENIZER_PATH=./data/canonical/tokenizers/fineweb_16384_bpe.model \ +VOCAB_SIZE=16384 \ +BITNET_GROUP_SIZE=192 \ +EMBED_DIM=380 \ +NUM_LAYERS=10 \ +MODEL_DIM=576 \ +NUM_KV_HEADS=3 \ +NUM_HEADS=6 \ +MLP_MULT=4 \ +MATRIX_OPTIMIZER=muon \ +MUON_BACKEND_STEPS=3 \ +MUON_MOMENTUM=0.95 \ +MUON_MOMENTUM_WARMUP_START=0.85 \ +MUON_MOMENTUM_WARMUP_STEPS=500 \ +MUON_WD=0.0 \ +MATRIX_LR=0.035 \ +SCALAR_LR=0.02 \ +TIED_EMBED_LR=0.02 \ +WARMDOWN_FRACTION=0.2 \ +LOGIT_SOFTCAP=10 \ +QK_GAIN_INIT=5.0 \ +ROPE_TYPE=yarn \ +YARN_MAX_LEN=2048 \ +ROPE_BASE=5000 \ +TRAIN_BATCH_TOKENS=524288 \ +TRAIN_SEQ_LEN=1024 \ +ITERATIONS=10000 \ +WARMUP_STEPS=5 \ +MAX_WALLCLOCK_SECONDS=599 \ +TIE_EMBEDDINGS=1 \ +HEAD_LR=0.02 \ +ACTIVATION=relu2 \ +SOFTCAP_TYPE=poly \ +TTT_STEPS=3 \ +TTT_LR=0.005 \ +TTT_TOKENS=32768 \ +SCALE_QUANT_BITS=5 \ +SEED=42 \ +COMPILE_MODE=default \ +OMP_NUM_THREADS=1 torchrun --standalone --nproc_per_node=8 train_gpt.py +``` + +
+ +## File Layout + +``` +. +├── README.md # this file +├── submission.json # OpenAI-format submission metadata +├── setup.sh # apt + pip + FlashAttention-3 + HF dataset download +├── run.sh # canonical training+eval entry point +├── train_gpt.py # complete training, compression, and TTT pipeline +├── verify_bpb.py # standalone reviewer-runnable BPB-LUT check +├── requirements.txt # Python dependency pin +├── fineweb_16384_bpe.model # bundled tokenizer (sha256 abaec140…), provenance copy +├── fineweb_16384_bpe.vocab # bundled vocab (provenance copy) +├── quinary_seed42.txt # 3-seed training/TTT logs (one per seed) +├── quinary_seed1337.txt +└── quinary_seed7.txt +``` + +The bundled `fineweb_16384_bpe.{model,vocab}` files are inspection / provenance copies — they should be byte-identical to what `setup.sh` downloads from `deniskurlov/parameter-golf-fineweb-sp16384` into `./data/canonical/tokenizers/`. Tokenizer files are *data*, not code, so they don't count toward the 16 MB cap (`bytes_code + compressed_model_bytes`); their SHA-256 (`abaec140336563026d65c1b7192d47a2b8c81a3bbad0f4d1cd1d852364ac432a`) is also reported by `verify_bpb.py` for cross-checking. + +## Compliance + +- [x] Artifact ≤ 16,000,000 bytes (15,724,839 — max across the 3 verified seeds, seed=7; per-seed values in the Results table; margin = 275,161 bytes) +- [x] Training ≤ 10 minutes (599,436–599,772 ms wallclock across 3 seeds) +- [x] Evaluation ≤ 10 minutes (TTT eval 212,250–215,427 ms; non-TTT roundtrip ~80 s) +- [x] Score-first TTT (CTP params only adapt on tokens *already* graded) +- [x] No network calls during evaluation +- [x] No external compute +- [x] No access to validation data during training. Validation shards are loaded into memory at startup but are consumed only by the post-training eval / TTT functions; under the canonical run `VAL_LOSS_EVERY=0`, so no validation tokens enter gradient updates before they have been scored under the score-first TTT pattern. +- [x] Reproducibly runs end-to-end from `bash setup.sh && bash run.sh` on a fresh 8×H100 SXM pod +- [x] BPB byte-count LUTs match the SentencePiece decoder's UTF-8 output exactly on the slice scored by `eval_val` (`exact eval slice` check) and on BOS-aligned document slices (run `python3 verify_bpb.py` to reproduce; see "Verifying the BPB denominator" above) + +## Our contribution + +All adaptations to the quinary case are ours. Concretely: + +- **Quinary `QuinaryLinear`** — 5-level absmean-scaled STE quantization (`clamp(-2, 2)`) replacing the ternary `clamp(-1, 1)`. Per-group (192) absmean scaling. The inherited `scale_correction` per-group multiplier is kept in the state-dict for backwards-compatibility but is inert by design (see Score-first TTT note). +- **Base-5 packing** — three quins per byte (max symbol value 4·1 + 4·5 + 4·25 = 124, fits in `uint8`), with a paired unpacker. **Raw storage 8/3 ≈ 2.667 bits/param**; the entropy floor for 5 equiprobable symbols is log₂5 ≈ 2.322 bits/param, so the raw packing leaves ~0.34 bits/param of headroom for the downstream entropy coder. Ternary base-3 packing is the analogous 5/3 ≈ 1.667 raw bits / log₂3 ≈ 1.585 floor. +- **{-2,-1,0,+1,+2} bitmask plane decomposition** — alternative encoding as three concatenated bit-planes: `zero_mask | sign_bits[over nonzeros] | mag2_bits[over nonzeros]`. Each plane has homogeneous bit statistics so the entropy coder models them independently rather than fighting a multimodal mixture. +- **LZMA-screened layout selection** — for every quinary tensor, materialize all 4 layouts (`base5`, `base5_T`, `bitmask`, `bitmask_T`), screen them by LZMA9-compressed size, then run LZMA9 vs lrzip-zpaq only on the selected layout. Bounded-cost heuristic with an LZMA floor — *not* an exhaustive 4×2 search; in principle could miss a (layout, lrzip) pair that beats (best-LZMA-layout, lrzip), but in practice the LZMA floor caps the worst case at the canonical base5+LZMA encoding. +- **5-bit log-delta scale quantization** — per-group fp16 scales replaced with anchor + 5-bit log-delta; saves ~141 KB at +2.1 mBPB TTT cost. Net Pareto-positive. +- **Quinary fork of the architecture itself** — the rebalancing of `model_dim` (768→576), GQA ratio (8:4 → 6:3), and embedding bottleneck (254 → 380) for the higher per-param bit cost of quinary, plus the SP8192→SP16384 tokenizer choice and `group_size` 128→192 to fit the new model dim cleanly. + +Empirical effect of the bundle is in the Diff and Results tables above. The seed-7 OVER cliff (single-blob lrzip OVERed on ~33% of seeds, including seed=7 at 17.23 MB) goes away under the per-stream v2 archive — all 3 seeds we tried FIT at 15.71–15.73 MB total. + +Note: with the v2 archive, seed=7 is now actually the **best**-fitting and best-scoring seed (TTT BPB = 1.1378), not the worst — the prior cliff was an artifact of single-blob compression interacting with seed=7's specific weight distribution, fully absorbed by the per-stream layout choose-min. + +## Acknowledgements + +This submission stands on others' work. The architectural foundation, the compression pipeline's core ideas, and the score-first TTT pattern are all upstream of us; see "Our contribution" above for the quinary-specific extensions. + +- **@CiprianFlorin-Ifrim** — [PR #640: Ternary U-Net record (1.1570 BPB)](https://github.com/openai/parameter-golf/pull/640) — architectural base. +Inherited: the U-Net topology (5+5 enc/dec with learned skip weights and per-block residual mix), Muon optimizer settings, FP8 QAT for non-quantized linears, factored tied embedding (with a narrower bottleneck here), polynomial-5 softcap with z-loss, YaRN positional encoding, fused QKV projection, and FlashAttention-3 wiring. +- **@codemath3000** — [PR #1855: per-group lrzip+brotli compression pipeline (1.06108 BPB)](https://github.com/openai/parameter-golf/pull/1855) — compression pipeline core ideas. Inherited: splitting the artifact into multiple compressed payloads instead of one monolithic blob, treating per-tensor byte layout as an optimization axis, and using lrzip's ZPAQ back-end for long-range cross-payload deduplication. +- **Score-first TTT lineage** — [@abaybektursun's PR #549](https://github.com/openai/parameter-golf/pull/549) and [@clarkkev's PR #1394 SP8192 stack](https://github.com/openai/parameter-golf/pull/1394) — origin of the legal "train only on tokens already graded" pattern. \ No newline at end of file diff --git a/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/fineweb_16384_bpe.model b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/fineweb_16384_bpe.model new file mode 100644 index 0000000000000000000000000000000000000000..bd92a6a8466b8eea987054978d2d5154c5d81c7f GIT binary patch literal 511362 zcmZ6Udz@w2Ro4$B1Za{_Ji_~N!#j{nYTh#;z^Sh4p6Rky0{OjX^R zdUVgks0TzuL@WUj5%7wLh=?U3A|jkTKt#k65fKrO5fKrwL_|bHzrXd{dv`wP{g9*^|p@kn1DkM!m7 zNM9b0^yTqLUmlP2g9*^|p@kn1DkM!m7NM9b0_T}+tUmlP4N?#sV`|`Nj zm&et-dYLke z_+6W_JmOpRjs)%kmfoREXG$}O;8-jyNAO^aVC@Je1cy1{CSVlwsbbF~I8okBj6Mq>AEmxKjqJWR5~(_IF9e+JJ2ejp3{b_@hRnE8l~`$HMU zw;zuD4mIlhh-R`W?Ilcw>^~ABn{~Hl!u5s%(Be?lI!HbA7K2|Qj z72(N|s_@&nsA$FC$yL}4W%%8DP}=udFug_N z9&rg#_M;9TN+}-893RA>{HG1=X_&Meq)x=lSKO*XBO@AxjEWgBed#nT#d;wanSw=!#e3(=;0Cs>3lRJ`v%=m;|roB;%;( z^~gAS*^?2Rsa|h#P%Db3A~@FAaU+5;bUf|cqYK`Qs?JWL0}XS%pCiHNyWEMgyybi} zy{%^!c0(_3$Cvx=i;(P9kzk|=OnP5z}pp3M~H%gkY{BXdS$u|AGS^BBlB@|WwHNH!zMRLZ-Rm3MQt zFFbL9G45~Y3VSZu3C5UbrLnsKXbfrcH0OSzES_@~F>E}av)G8~XfH<$#of;z@5Y>Y z5M0gG1!fW4lQ`xPJXA4nb(tHXsfRhS2g9Qb-_v|6QI9R4Xy}s&PooFk$qY;+hr7A* z`yS$7Fy~XL_=`>zYV)=znB~8e_Yb{`E>}1jP|41(ENEP{` z5sj6Ns?s=`#Ly6C_SjSdetD2*Qs#y*^9@s;+VU&>K@0tcu=g}?;=>=yW$cq}Q_zuS z-mi>Au{vxBQ^@pfkpFm)$5FP!8&R>Z%F>{s$_(s{z`B~`Ph=#CG$68zdVRH7)3DSK z_Eu>YlK+z~Nea`XaIAK}CW~;q5vtWdAiL-#e=5>UrGO1#$Xgm$zBV(*)DAlhm|1!p zv*e$SXfw%%pk?!B(tKTJdCIiXfV5YNuJmW}mra_w0ZBFz*{Owopsw5i(xP#8sGjy` zbM2-FG*$ythC9-vug@8tfEtj#L=X6e2p>+AA>kyRZLO2OF(*==Zb1HccUS82=Q7;a zeA58o@q~!gH${%)U@?a^*8KV4{cxhCPyiypu+@0*HEW(hdV&dB(I96T+T~SZc-|p(X zC?#=ta;OR8FJ;zNsa#Xgvl77~XAyH)14P~@(Zm08)__kU6~HQdB-XyeC28T*O7XZtHBnCh*cVYwg zw=yr|0}UcT5=}Q_LO)A6gz5s!%X}Ow!@q3}q3Iif{Y7G^zLxM4>2(k^*SkSi}5XS514dhOkxgaH3)O2Xlr_C_y-wj@791) zw~5jEM>3C0o@!dK6++X+Kg=ng*C!CA&0T@IXN6kSp8Ve?qH$`rvZz@SRJ=H zx4EPXQi~qO;_n?1Z4x6q;ou-vW$(=S9gSs@Xut^mP(%5AsoDN3=by}0!X$|(Ta*_>o%cj8bmW1*Pk!l=sF=5MpJ1OSBj*l~C z-w<|d?~iTI$e1;+y7wW-#@|By@Blz1B;six5bekCd)>xR@B#Fy<8HNt<) z`8|IS4F|TUseUZYel@GQQUa8xRt436%7S@Xtx!W(U+w}2Gdz=ZU1w8p|5D@ z0$46bu|@dx$PxXG2?<2aCK^QlOJ-`~1$G1BLoQv4ReDzU-^h9GtN%7Yw3Assboakz zUJuk0KyKpML2Ph-GrrE#=neAMjd83d|69iUu_JE4$Y2sn-HV*>hE@smJckCI93D;qEsVY-WyznyCt8=eM;jPB9^)&I<2He!_k zbnl#N@%TIDB>D?s3*Ei*e`R=lSF!;*-6p8|-N^q1Re&JXj^%mvzavclf=<}kjYA^8 zm!TUG2!p}H|8YeQJXM2`$DYOb`-weCgRm8FoJQ3D8(GZMs07X2x@!CfP8G{*!elpg zmHuBA;e0Q20nEp3msP!60M&W>tEw8HO35_Tln`ig_-|tN~UGwl}IHm||mQZyj-S8h!sI@!>?Bqjq=_D+bF07Oo+X{aEXdhA`E2H)sv0`=^lX{ob7#ZdL7A18J$T)+y!7hPrGE zwA5ELnZ7KeXT&7PH9I=iRMvo_Xj-c!OpY63@%@7f<`J<6u*O`KeW>v_?2szUfJLx| z(GSR(@5G+DHjb6Nq}707Z2Wxm@fMT@6Ya*T56paOQm8@FkE0mk4W`?gky?P04a0_h zDZ>~JYoJso3qpyf1Iv;^UD?!NQExxj51P-~#m?WApK`M#nNyamGei?NhkiV{x_T|*-j zagFjDP!grG!@IJRwS=8+D|QGU7ST?$ovR7EjhTC>bOTO}bHMVT1tO}~+(&62ZmFr^2~y5< z6A3mBALl(NsR?Ideuh;Z<9T#jKd5Y@JW~g$9ozKTY$6J^G7usc2k2xp;CKOSv?v6MUJC_gq^X4@^Lw*jfvE?E;xy_;H_w^7EE3C zCN@!hd``^>tfmCJrt6wmKvD(`J*~sGaehMZcCx3fWeq@&-p-`d5OjMU`pBS#KnAyC zjr56brNhuofGcqnb3Tw_H+N%s-8$yf7REu2xf!JaQ&VU|4zd{O#$vKWemWLXZg#?Mb?=h~gcj(Gj=dEBq<3HX&`TdgdoYpE1ZuMy8p$Uzc{GI0|MplTVu90G ztbcOEJkUs66Gl8!&afJSE`(U>Q()I+8-=MpB_hh)R1@?vn+;Fc@fs*DZ>-%-3nV4u zQ>iF`TK=}F=Ba^ZW&>s#khoSNg64W} z7Aw+E&v@U%448xrcwraanC+((T8bn=BViX`uw4K9f5JwvyJ|7MZpZV^z=#H&{R(mxhV$=BmP5I7# z&MUDN5;@Oe9sOA$j4@4E6SS1BQhehNuX!y}B1rthuqnPaSMTmbn}E7ttW6Rw*sZc9 z6GRJiYi~?EU01Km{fP5It%Yg`E_PMvK}lIBQP~omW2-ndy3Z0jj;;ovDZ{Ex&;@U5 zv~7W*DQc3}M|d8CfWvHND2uSW^mu0+Jq3dEIEusGWy_HrK5PQa<_h&{K#qAw*0I(x znYod8b$|YLS?4b`5KcCCRU%>dl}V>{aIC|D2O?`sFR|nU(RzE~Cz}p)J_u z+J5(7Zu~9g!I*|>NUqezBxd!d5Ll0BRo+5yp4fskwm%dF5*D`rxQ*3GL)eYS;yE<5 zg#!Q4RUVF16AuIcXL%I;rU6@6PpQE2l?9U8(*U3W5RJ12b+;DeHrdx?QoSM9eMM>mN|l>MPX^~@L0GEWLO9+H%SrXw zQECjfHR03(m_5R#=dWYg2-_46JM|k={WLe!fTYK93Ii0oHk-8|Dt{Pf;GdJZW6p&4 z8tisE(g+Kp&P*Coaeo?neOG;ME@3XKMqS_}j06Sl>r|};S_TMN68gNHcvIc121>Md zLOuZ3c$_Q_Efnz^K5KP^_moTR@Kl|%A?y;i-JsE2h zX$ocPoKLUL0oBU`J=663b8s zYSf)*)DnAczk;i73Kj*F3UJcRT^rXSST09tc5b1-SZsc)Cvr76m|1HOUl<->nXsxk z(@Av;uy9zV!SA)q??xEqYOp^a$ScgD|g1C>p7GWn2bKaXuna3&$DR;FfI8Es*hRECc?HoNh(aYz-vKY0U71tHruw@ zRZm9^82n>(V8n7|opx$Kj*L}CV_OTsx$R9gAyhX*s%e=Bpx8TVjK<+lu4>T7%PAs)OAI2%|e>+!Ay{ z@8`AH?O-krk88q|GCmGht_GxDXLSTP%WXA!^-PAFVyFhXMUIZcx2b_Z+Rcf)G+KbA zF+?`io8uF+wfZX{B}_{{2$oOG{I>hH5MB5RoU2z~l<~|Y8lh0NIbhA=eoE)}L z+{rZ`gbk_s;t<357@JN_m`br8MkWowg*`tJuPvC0aP}AT*Oz2zth3OmK_sLMO+tXw zLs~P$BW%e3v}aPgp0;Pmu)#{C)Gh@l- zp$-Z)I!R~ryGeyru*0)BT_pVBmtH{&hS5A(BU`A>Z7W#|t7#NAge?^vw{dzy zQb{_WWJHllmKgH1(pI#ZMGldG96-)tC5i2n?~IA#cxoTt*&s zL1Byop$l`dOVE`$4mYtsoJ-nDJMCT^;Mk&385Fb_K(i z#)m(aMR#4522tibdT3of-GrSHC}}|Kl$wIs(!k*YYnV`kIp%HUY;FiX^7HBm%Ldm-FaWk#} z{PE2Het3Niz-qn&ZG&n+%2yuYBcSUt=3+xw8X8Dy`=c0Q79a})GZmWA`fKVw9Y|wS z7!JQGXNC30dy)Yp>Lb74nxL3C(VBP!aHcypz=O@yU0F&O1%{(l3nZ5@w+T9bnaU=u z$>L9BapH*bn|*q|My%7CpjgkQXbIEZH8550Vc;V`IS!$DQw5Ol5#AI zUG*m;Q}#6!=kTG1mzuD1+@Hu~Fe-RE4m}&N^=*2<8k`slDAt@6P+Q{D`PlMwV8nBH z63!)GlQUn9X7=Y$K4$JqSM^fLmfKHjfLiMH61dEs$Jd^y>(d3tc4U z!ap6k%;(~M06-(+NQ|H>zNaGqii1!pZbxY-?!0n*3vL%B)qz)kEd=v%B!%h9_x*H1 z>g%HTX_z|)0N3wn#)cXso?{AdwC!->XaHsnH&}Iz1q2E~OJdGyfn+oB2-ktk_as@W zsy`DM?`b?55O!Yht5cR5f*D4IsWmXoCb+zKK5C#@)+{+_(M7@A;knX+ljvsQE8RhG zo;Sij?=&^srWgP?GpuUVr7jpp9raj0PpiD*=~ziZc|DQg5o9quw9G$rcfAf~$o1ZPdRkXnP?c0)4i?tNt? zL6`da+z3aQXEiYmptx*~QnhKQAz0FQgeHIQ5i~XI;c=X!H6<6#d|LA^N}{kOqtkE# z4SDS7dq>z@GIqgN^$l5?7^YzY=+DPKrSfOgDzte7wCdn&8k7c#Yk7mVTa@H?!b!3< zmYZA1w=3TdXPQ{5Z_K5~P%;3zbao|tuE7?`d=_VeBM6i>!@QvB0z|!H8eNp6Y^Lc~ zEts*urufduXvIxkgZXpuX>X<#-Trdi@_65Kz zXBxgK+=5eI6rUXsK7Bg(%@LkzULO!PXOn&G$CL+w!lw)|lm~Do&uJ8I(zio(ibVo( zoYfkPkF^x&02b3*Ic@SW;$LvCaeg-dIM*9m=+{74V_2m?!q(#0I#NvYUX-m^qb;D& z)8#qd5)`w0vWRp53ulLVRNs=B&}t^S{Qy$Z*~6K}fw~Z|q-p6mf?&?}!rc%EXEQD4 z7KADFc?xP1S^Jx=q?p9@+Eeo#$Gc~zAHxusZuAICDN zg=8VI0??kjD}>`&c=}b}7VPOh)PSHnWqgY@aShG3vu6(OZ7xSnq+_v$px8Uo;n5-i zHl@_o0E@J$3qV$5o4>1X&vn$PHzkA3Ca1O`S_rNR$M0U?j!K-l{n{D`){s^BSQNAc zBQ1!=IMMF8BkXF)*iGMtx9TrtIj(A692B@G2g(|#I>pX%1h5ig!|~;ULXkaD3&sT$ z^RIg^QYr}M-xgVl>>$AsHeQNfcq2JP zF2K|$j&s`*^`?$TJAayOvQ4hOBNsZhN74WS>86Joo(P*q9rS5MGlF84u%c7@=3Wd= z4aD@K#Ng7PL=6tjibYhH5WZHD^Ir+6dJY@_+$Qo8@sbM?yzD6Mh_KsmmKJ+X%oYd@ zSpdw}K^Vu|5}V^bZ2~(8WVIX3`<zW;P~aR%;=+X;w5&6|{!xSAR7}rq53??GGTpODr^M!fwS)U7H*M(KBC`YYaFR zW!tiFq7%6Sw}dH;eMeLcWWnLx;scuMyDZpP9t;5H?FN;qL1vwkQY~~x5X`~4x?ba? z7|2-XEdX%3uN6~EScTe$yaQP^CUjbvIjX-FaoJ>1L&7dZn{#APLvSG*I)opU@45{s zJ#u0;`V>moO~R?2jK(GHm8hDnod{AH#!X+t=T8Q4HQ*8B5V zH+x%$;}EVR4Cgv7@g0l58MVyj%mL69+Y8H24W>+()EUkQgbc8*Yb-XADPh-e zM<){MCQ!^0rXM?ZbdYjeN`t;H%K-OeWjKJqcZR&!9Mt7|GK`KOz}xg~$q&8(yUR2D)t&Aj8eV&)zq4xvv+T){&w(3SJ#qz05X3(I}<{yp{N7NEq+AU zX}PbbDHe93xLJ2;tNVGgv4T4d4Xg!lNV<7{SIqB91w;#`5o2-o&P+Z2pNIN zfpb@1EHb2H--rT%?rK-*1ue+MGoZ$TwGg(wFsc4dj-d;YDj5PL%NwR9=#nW)-PoU2 zZc|AIZ(X5^1idoTggg#k^R}eq+(KGa@ZGrDQ2jub=y=n|@B=6=YZGG@WrNe+GMqf4 zPeVhpI{Jl!l7OQl#{b28V~T9S2!bo2s#RCMvJs~$)!!}Ib4um=AOmPgy>;=j2GTT# z+Jue>n>(AOni3Ts8_F(lu10Nv6n$bJo6c!DXWsIa%^%EK$(UqJ`wFaO#c|!^yG%a!DWEpxEnycnOjC9T1*RG7!&3Dl5jTv21Hx$x zO82=Sxcb*^uW@AdSk$&e<|rka*&k10q9*K%4#}wlQ>dfa**fXx6) zFGZ*!SPpAyn^B3sG1D~O6uch?!V9pv!-1S~TL4lBkFCxhX7-Q%(fBac$?t%$IpR=7 zk_JnY;x7Ftfw)oKl$cM(9IyZ**q9iT(a03^h)r5pEIKHz4?bz)srs?Vp7`j;z<~H~ zpkRQwmAmzbuxq`k^YW%Z%h}0rn}`1vVE#A5RMdfKT=I-(m|AD5z2AT^HN*XcEbH_^ z$2=p#){yIRZ)AFFpg7a5@P1i1DVcr&B|7O$R;-S&=XQqbd&A~CVqRL&I)tEWGI>E0 zWPx{;eB_Ahw<)rGiB~BWKnspn^R!85At0W(^wbfiCanDUP`xwrdM&(@2Vl=e+%il{ z!@9)TjdQ*c*fo}gB0ONBz^5E=b)>W~q{2s9owNnUivS(a;#nm*EW9gc7YCctDkyG+ z=d}6)F;Bm!8WW_PaG)YyQ{vrb<+K2~C$mU8jHxam(=*7bC-8oh` z%ZdRgmivC#n}J!&Wp^8u&j)IUrhLW#jZ7C{ggw=J4J~1+BC9a@b|og4b4XeJxRq)u zC#C_wrTFSe4W{0@wL5ZfEKd<9@lD$Wz=C>0cT+&l;BIUbJFv9?V_cl4{6wzkcAByW zP@LqDDHK_QEgt(hDb)y?<%-jHS`|RDeCBfKTL9tozSqJ7L1(z?4pm}qYyD9Dqs#%S zgB_e&Lm?||>eW6q5ar~k126)(dUN!k0l7L`b}U{L!hTv0I6gM%Xlju8W~@FAJAL(& zIg2AEBsd>Hpae!sp&CTtK9U_7Y=rAn<9K-9$>6mBX|Oq@My;bUGQx1@Xej<&c8b+M z&QkKq9!RS2`>zS7rm^SN2!gA+B3pO)ti6dO?9bzauwb+x$t+phJ0LP=4oWALKjmt~ z5@!H#Q_b`UObv9wawkP4M}@GDj;K>XpfOi&Q7S>?fsqjwT7WZpI@a?*H#F7|s$Tt* z+_KoCVr(5ik!;gk2y*h<+C`5Fd@%-)0yp$^;iSlMO(F)lC|1mH(iOZNyO!#wb3?Oq zvnSL5Qj+g(G++%OkM+DWAE^s5N#;&v|_K*%rbU*N8~bL7+8pc%$zB zG-tM_`DXw`Y`!j616;7L-;IoAmGohiBU9DSq*y>YCW@;Io}(bTPi|uq_k#c(G)hM~ zDg9@1UDxgJIVc1zSu|Zi@G!`L7xyejP|Ov}JjURrB$?wUEro;Neuzr6*UxkTuXvfm@;!X#~Ob;q9YMKR3`oGe0-1?*(%(gtS{Q z?D(mX4&aWhJ9&O^?mQR5hCQrM5M1T;SL7UueAAP#PhjH7@8pO*EY2D&8l7~L2Z%$%rNf-O2-^$Wk=1tc#*Y_ds< zF{r9Qnl(X)pCJ=%sQyJrX3V~MKrks5YXyC*p_tpaJv=gu@l|6>`O5uyY7R8FXdRZI zwtT*gC*hf!rP_b*G$UetyT=&Ezl6pM`-ifhAmlIeH^c4rQw_|(f_h>~QuFfITt z{j!=JWYLZFE@20Bc6Y=0U;TVmla0xa`osW&vU*;-hZ=yozW();OVFI*#lrFru|q*d zc=;l8!j_9~h@h-3B(pI@%pE8-il2@AR~aA2>Tgi+Zn!zs;8YyGF`DE@P$b3!Zx|b( z`i1i<2szSUDL)hJ5HB4az z+872*vxy8Z6b~T{K^QE9i4sL`cfopB2sq4WnO6PVoC!x5G9(QO0Uw1aqihYqb${LK zs5=OGZTjUYfTypxv=CO(1vF{=p)UuuKr_irG(BqAL2v^Mxv2&NzZ~@)#}=A!sz9f5p;$0o{{E}5|>3J*)G6H$d;!T;1-OfQ&-@zUO8YBl=9NKrgRR1 zxnvN>QIJNhA(5TmsTu*Ok*}tg#CzJFQgBI3k4c9|DZbt`>q>mtkSVPC_hvM%VhjLo z<#iPba{YJWw08tnxpF>h2%E=n){v!l0SO*i&TBAp+_B}HtM}x_<>`-5l>ro_ z8n?s=TB_^G-L?=gpy-6WDFiKp)Y%H*h>;ytZXsCKbKQ>YfYgY@u`>J*!J=QU9T@CQ z!fDaK^V|rK#K8E}fXo%WQ~qWP$Au-82e8WVHQj~lN(ye@gqh}7vb1PnEK6!+NJz^@ z#Tt;+DGsJa5G;BwfKy6SFoq8B!l`*USz29EKkb`)U5U-kjh3nYW0d80>IQ(1xb(q{ z#AuiN02#<7n;JT;8#cfZVT6e3B=!>x(DJy!j%fihZ%>8?Q0s&oDj;scG^TD8Rlgd+ zE9=yipi^-DO^OA!%wRzWz(gN8y8eTrAxLrPbDSRe!;7%k5_CRCap|c8TWrsAA6ymv zPg%QgRE&2@22iZ0x=0Yl-5QbxgnjM02wPi*>O4(Jk4IWK!7QM-NZvw_Uf;uU1k{1e z0)IXrg{*!p*Dc%-!a_WNMv0p$q9*JNuY^5y1g5gOV54qTz8=KNVd0dsIp12s9v5|A zK|_59#ghrfim;4F`)7 zD<;#BW(&>v@5fD@uHbYV^4CKRR-S?tf)@6c7P~c&9$-V}=-xN^9yLJcx?(T0g4g44 zp#@9NG@=pALXZt*5}WV;lAHY?pOy_E(FnWRpVfq&{buZLN8nUdMvBk`DCTu7-0Pfp zPj|H1dx_(iq$`P|#ifS7kxQq$&tsi5fJ9~Kbv&<917PJu~bjLu;vM(z$^-QUEoO0)QHM!n*XCI*Bl3oDbx7Jw@`)A7Toz+pIx0@H91SDEzE z+QP~G7Is_WuW4`I0X06@yQTVXnG0S_mxKl;_~1T#5+JzreNF450yn}h3P^4<~FKUuSeKpNV#E5$&TP_EWe#gS(o$B0O8= z4BJ3)+3WHA1-RKZc<@5IFbHrxi@y~3pU!rTLJ6kUXVMNc78GZD6fd!i zN(?Pnbt$zeF}PqrD=ffj1$#jNU^$j(1f}+S^7#9moa;5L^pp#BW$lOv%QYmkjGaOw z%LoEu=R`pUtp?;a+135+MS-_;!qOJFqcf!r=;KKUW-AMvKyxnKi%9Q4LUKaw`!pDgpip%9D8}*)>@iJ#X6ata0-_Wr-jiqnmJTm7 zAq-&3GQW)Q2NBy3d=CJw+I%-2cL67H&kp@M?WjaKeo+!?z_h&nlV(BCW$_*sEn#?) z`b`IP!*J6hHcbC76Sxv9U&R3g4N%AQhMF*yn8m|xBS6s-xo~Mtg@&3jLg*NA0ZNS9 z*%@rcc?Z?@=+04O=XCF@eq4-SUwu`zl!CY6fnXY_{h+`y1WSyb5@z=ripzo%_7_8^ z)-zcnf=D1gLo)|DGcGghkfbRD{vbqi^eF@lG%{%|oS4VKLP^emNo|;6w0~{yrREhx zI`+9!(A7ADrg010l84hp$v_V88m6lE`SH{>FY|o~ zs($PtA=*%5H`R2nXZT*J_vdm!7(uZ*Y-y7+2YOt%!Wh(ml4!7LQ@K+Jl7tQt7UiSf z4rRG;R?L3Q1uIFRtTQYoA?CneKUXTk`M429IU<3CtxgK2Cqh6%*2su94vC#SYK1Z%+bfcw!+fj6aZr$Cqc ztc?I*NJZd*v!#&m^SOWt!Q;>kS(+Zk|P3b+6`d(NNGn(C!iFfGTH z2umEX#vcHj?>3%l$$q~KF?WW2R-rU^p^W!@BsYXY3v=>|vJ*~1Jkc{<^Md)C3cfdB zO7u$#rvS?(UIJVIEjKm~+B=;=AY;GL;DjgfUqt5sm*R&*x{?x)Se=YrlnBqqWwYx2 za)MivXlt-bUeQWo0CX)eOCDc>HXGIpv&fbLB2dC{(KyJ!!rov=JzXGfZNJ9 zJg7Pp%Pg7L=*(S-HA&~N7f=vTTvMptKf>7O=>vo{CUF*lR0kl6!dK}&Y7KD4>t(vE zp&=NCu#nr(sWDV)NjL(zCj8w94L%3wA!H|>erXCtC(}BtK80dQGy7Bdl0|C#f|=q` z)wW>%a)yeYgRLAq87Gg(c)B~2FQGc|H`V)3#ZeK$MN^E&K`=Oy4nrpwN3qeC5^~KkNh1JNZ`6C3K^vpq8 zXRg!#9n^n+o)WZ7I8Q))3osdMhU>~1Kuw`Bqjj1SMxS$nZWVdL#&)WMlt#Fd7&0y( zn6D?azo|Yb!>d|@EdeP7uLUGF0GI_HVI=Ny!AFxgj{;i=&tQ-nf-G_T!NW>9x)(pJ zG;^@o<5ki0&sHT0{s?4p3O09`@u;*RwFyh+8IWq9#M@+q;dBlBJH>6zyJsT{fF@G$uOpSxKjrRCJRT zl39MUrqpwwWF=EfN7!7gvDT0w?!sT;5h1)HC&X136#%MRuo;#P7-?ZDIe=Vc88+;s zR6F_jSk_M@0CZXUUqSVo5fnI?=&x1g4oAfr!W5K_cTb&8dFCO`-&yXhGe({{Wvs+o z!bz$5&kEY{yhPo?cGVfBKe)I6C7vg&3w`(fm6?V)1_^4qS%T(FwN<7j0|>53c#u)d zG8BvWMlASBE^%P)wNIhpN5sSF8d^rs+@g%-@Ha2n@MR6Ko6jHwf)>J&)UyUUBi=v+4-38;$BrYgdDGRO(A0U!A-rpwlHy=0 z+uA8u3dp*Eb@Bp~T7g}Y))HrhLMsHNg@T67bYyX!!g`gwBb>VYjQ0jYdKb`OU;d@i z?`kQt7pD%Y(NZC?V(@*i0VHQ6UuLgam!Vk>_=qEi5=H0rz6UvkX4!DUM4udishM5@ zpw;F^=}K7xq7M5S>rV+g>rK5)y#QJS9OcK9aaJe{o3W*Bp;(D9t;h}M9D-(*HN@c9 zSc?x+*SaXNF>HqzJ|q{Rqh-}+35t2&nQ1&3fZdLMWn0|=5=Gqi8fs6_0%Dhu?2JQb zC=hamoe_ZQjdg`{PT0Ki4!dU41_Bw$spPbLyXJec3xAVM`i!7i;Ls%blt8j*^p;&r z^H8kq_zp=n8q+$62vkFhYI^~+g~G1^B&k#%n)zZ9(V4~)1eo5{!f-&C+VJoI!dV8m zfx>;Q2DJm&nvG6;3{wnDQ?@JmZUr@AXfvhOIy*{ zeFn7nF8ipUh2Sd7wI`Y1&mme^{PB*f-9bYA(pA8V5kfYIHq z^{0@~5N)v*1fAE_cnSIp?Apg=;pC`UA~6Wlh0ei9Tz*VwgMr;5E&vE6{d-RJ;knK_ zat?pTB}lH{h}4!?>41si&!Jge z(zO+84N3hVrq&As#!WW177gB(?~K>X;;?e=?>xUj#XEpy=KDJrU^9;7V=M5{(UM`#Tp~=)c(Y0Y0FIfa z*<}dkUJt@ZjCF~^>&}`zhY(yHJtQ##StiNNTxsSIT}Pda#c65-iGCuV*VFP9&Bt$w zUb=wf8t`vp8t=g7>#G0kw}p^rC@q+^pge~LBiN8tW(V@<%tB2Ua=k2siVkna>+~O! z^We&D`o09sm08!g2l8O?tmM87L{!`nLC7`0Ea_Q8X+w#8r1z*shIzd*2f9Ww-=hQt z>r4+JfMG2D6LR6C(eJbo@F(6Cr0L72_!rZ2pwt4PP-B2Oy27d#!CpXcnOupZ(^elF znBm60=}TbRV6J5kLG^9f?Dht!dNyqZ~pA)va;6C8L zbAzIGmTik*(t9oVOH__)!|&@1inZhj(|H^Pv`($JiOva{t820IH=OLTheIwGKo^XP zd#U?znNR&y7*UoWID>6@qz!;@bI1Pg%OI!cvEf*q)sBv1p{rdYJdtciBPdw~TaTK^w)3%LaDHy#=Gvn`x>!FA0!s7)&}x(A2Nw|@`;h* zY%l#Q!xC8a)AT$b?8tKUr?h29M(w!!Nsp|-uB}$OQs<$Q@5if7gi~*x@(zLe>>QGN z;Eh=S-;3c#Q_+tSV_lo6MTv28lsx>;AXx;wJxZ0@l42-{oR`n20vvTEhj<)829Jx9 zVnu&X%FSa{c@{$iVe7@yW&R#OOZv|Lbs>dm<~lFq*z{gKk04!dYj( zqW(5733*8gOIHJSVfQ-lDKsR+|EqZ+0CW?upVFf03<4cm#~f;Ykmp*T`$U>^h?YAe zqZdgXB##$Z{@#1MxZs289)HFC>c~31N4x|w``o>cn;!!xmcv&3wahXYR&gPZ@v#P) zRqeGA^RVQge~ZM1Z4yKIwE9s`B)!*z;u~P_cQtcDcGIdd6 zu^-gw;u!>~!2vIrQ1|i~Gl;t0IRx{l`9c31))fj@`omOmQ4;ZVkFK#-pPacq(onGk zv>M?FXRG!AQql?Ban;7lP^`lIMRIzIy@p10i<80-46lqDAq;;7t%ikQUUCTYMTod5 zajbc#V9PUZ{4UC8&U;||tbE?mHlqciWoR6tIWOPhIjIilN?~F5L(UhF+5;=w9d~fgOW!M55V#RMjE7LPjDh0b`Y|&dtX7yUEkj}xVU3lblCHn2WzPW$| z6Ue|95I;5Z#N2`yfDCNAIuaz9D=Dx4SQtY~V(~*LB~CnCG?o2m2+e}#X4@3ej!L5K z@W!5lsS$e$tzDZE9Zk}D`?N%7?qY4e0Gp3Btfk&-ok6o0P<(r`wI!K%dFNonFjlQP z!dA_vsMrN4$z4}Rr3#-GwbtE{C4wZ27fl9)T{1WF@zIp)?lLq>fE5WzYjCWfWZNDB za_bzWYh5M&jfg)7TRL0z*J~iS5G@G&Udw682O7N?sV*Q{cjCQ+Gob5$y&(LH+ma79 zH%{QCaOY0MqZ2V>bP39a%GV2^I#6tit545t#8UwAs@oDY^E&2elO8yL;6clGZex?W zTw8acyyaFLpfY(iqEsH2(SK6zsNpg3)T>uW~_) zOO|KgBu-o~(3Z9kc(Wrmix5(4Q|;I=La``U)Myt#TKrH)5!Gh|>%52MzO@8))B`Pv z2SD{jJBKejvIaa3)KP_kIetVnhG6>AO)U}#S~0?FOI>LW$yp!8BB8*Q*eU~4`{He+ zZn1!3J@NatF&aR#r1^I$;m5UoEA&*F%|=ktIN^&Xb0;{oA*2Bz zy(>2v5eTX)-=T#Tgk5FU1628}U=C6dYYT>323RkWfM!Aq+3sHCI1jie7{da}Q+;N{ z#$jGY?j?{}*0F|WnE@0Reh_D)%U~4wx!Bd#05N=<$`Z6xV@oQ&Mv!u5zLYYD;>8yz_voR##$_^-=NR-1F|_U6nknR04N&Wbozf$ z((lAy23Mbz6>>ulyDk+NbJ+ms+@~xyWp!VMVumz0Af37pQb!v?$XZWD;-CPMYsQ?% z6s3MMhh{OW)e5W#LDLLnB6r z=@1nFxm*r{Rc|nPpTvn7;WQfP;o(!DCC9(l`C*y`By>%O@KhQgmhE=dme`C#c9%Ja z%Ixq`ansiw>yac$Ic1=s|}qD~_VtU{}k*!8oqM zEdlZlko$1j03wV5R0}YCc_?DJP!M1+QA4pv$6Fl2pb&+So+21Qa5uWiWI8XOZ~I?k z8wga8{T_8E>>6G6^Gr_43QtzOGf-N5$$7`S)V3t?oy>DEwZ6HI00^4>_{Sb9bO9v| zGTK{L_vhk{VnSE~y8yPy><{7@oB>4mi+>GTCY&nkt06T6XE@ZrJOn2G!-I2FFx8_0 z=U`gh&+9b+Yc(zd+6z#BNX{RNqgGH02;{FD>)|M4$WkX@Ylu-al=&R!3^@hzzjbzy z%n-|bM0im!-HZ0D9d=Cc=TJ44v52818jwpHhPD5+pGN9Vkp?Wa0gW-}mOJG-279QCH22kAa{PR810a6=1 zyG{{xi9Xw~*>mXcQ+M1E2sPj#e41nqaQUnd_{&4B$Qx+RoeL+Z1<*CcSyFXcn4pQw zHjcBB;3+K$S|A0y$bCu#0I*j3Ume0`*+CS;IBZA;AQ!;QB1Nno$^tmyqBjBnTebAc zQux&mAeqNIVJrkX3wcFRuexNR$HY(#u>w(u}$L`8vv?H#efr@hS}vpcp^QwQV7yFG8{s1DI5xV>}48- zC>60^6@cEMmk6TYH(*3B+o>Ao)Nv6jVqX9>T&WMA5jHby4Ww%=1b4ITa8W-8Tdy%E z#t}sag>uK?$9DmsoGEYh#rv6$MAmop_g71V-TGVlGyqDpZHskp z9bT0M%ek5ggW-Ear$=+b&U{}^Zw=7Z2n#6{hG315u7w}xqQuc1J(xIyK)bK7>}(0T zXdXkOsPmGX6`1{SJ4j|Z#wfqpegP4d4rO(%9!+A^e$e5#g*zZ@R%kr`-Rd$Vbci*t ztmAb-PhdhHf^u6kqa4OIYXs5Ny%E-PU>cCZ$4}C0ps7_QsZ+vP#o|v?7bSt7_q8L; z88jNix5HY13%DgO=kpRj%@c^Lb9~HGfC~VnP30F~Jr;7*ert&!39sCuyZ(y z#qSx|<*#e6-vV9Yb-EC2pO^RunfB|zZYN9=EW>EV3rNad|IJ=KZvHXXQ2!+(Iz)I1 zfSflEvT1*{3_)_xpF`Dz?b(qhw-~^Okj);OMe1AN4S7({f$nx;N2kQ5P;@UQmI6>* zVa6V^Sd8Eaix2e=Or`WT;-;iab z$DYE#zXZu;tV;C;z|^zj-%lpd-62{&Ecmocts%g=UkTQtz?ddxCVf>aVS3>vE&^JHlyu`rF8#ha z3o-92^w5j{l645x4a}+$eJF5U=C(P|jO@fe#xw=r3{Qg7g2|1lFADYs>J04W*dB8n z8+zVCGrxM*B1q?usJi}Osv~HXVwoO`{)i(h1Z8>8{CEwNl-;46YwjiYe-#wWI{~nC3bs(w|FDJA z@}guic~L?Dx)g5TbHIogpjdF}U8?hvA{I3r&{{&P^E#x=!;myEvHn96pBJ@YK+q7f z1TxPzWM&%_7*E(N7Z~r6*FZDF5m8+A96~{4nAyFV8yVB)H*h6I1|@^1+6bLPu{3aKlGj5A!E+gY!d{4K=XX)! z@f?}PnyN0W_}m@QEJ3pZ?D+Liurt@w9%Q~;2uE`KV3vTu;5R#=h#_IrgI%k(h9i*k z!U0LsCD?-A)?TYAFdi5-Gceer1ka={z+FkE(AmW9Zy1{9T#e?S|i&&(H88DO4p z1J(clc7IVpO^S+%l|~0x8gcBu06Js-LW04I3PXYa%h-Kdf>vm5T5*C&L!CoHDLAkJ zbOv~6k?{q6u#1p`a(JH(N|Yw&ETJ9}~7T|=M>oHU1S z4om#I<9KTo>@D$o-P({8pu;LJWCI!is=zoXAGA}Dg~F@6$N=cp=YI*Kj58?aft$E8 zle7@XjD@ZGeEE9U4}^CRP!{d8F9=#-T=oH1Yk}iwtr+jXF8a3KD;oGS;}xzLbgN}B z<@i*x2GFMQ=kmjn?y?NVBcK^vHSBZ zXHc9gm)NylX-kS{bq(|!=ni$8s&z)348+neA^+ufbtUJ-}x}RNn_IpWEHy2|26yQr;yf|srS^T4J4Z6=2KML$#u^O zt`{Y))|fU3LP~u^YTY{N?Hg2>AkFcf=G*S0z5t7J$H#tPOY2GR^sB28A?#NY$&xXR zdoBEF2jC=Lymd63LvRbMzlcFdtNxPY?pOwvAsC_1#NvGf^6D@i{PQAt4iO$XYKHd) z;Hu1Iay#`m|A+1Z;Q1cwb$Z;BP7gIPH4g)LTL}C|t*+sm7Xs%(S_pOb2-cem2&Gs1 zt?=rJtdHI_;Nu%8HHj%_0L2|ZCa7hQCBq~CCMh_W*7~#Sl@xZ_UW@ z4Cov=zonPKNDC<&Eb@O5(IDrLU90g{Iy!*DFgA_-0AUxqrPmFrCqo-JbU;U!0R9%Q z?+t(?L#vPkVRP>{*J`lY~$} zm}RAnrIx>tT-uOBEh=9y?)z}lg2~%!ne*~-BfQr;uvz122kj~^3V}<(^53Z5l$#YR zLw@0F38Kqf3tyf=!M9?qxeP`)n2zIaZVk+D7sWqXXdF~`fSv@i7Z%VFgw$uWzYe4K zywK8l5n^bdQK!uq{!U9Sw|&mMfZ(QiPA+|CK#!u&zSm^dmPi~fpa=!NS%(K5&=tHA zTi*+?IlXRQqUxz!#+@(|09`}bL)nI;qpAUP`X71_M)8F7uXgl}lEvhJv1SOgDh_4F zmw^C^#?kmI);Yi&tZak>OapMsp$oNKK*;^%fZ>ihoIx@_ zVU25nX7{RJ6#>)9o>`~y33?>cE7R#Oi!UI$2n>~~U-fj>)wM9M#e4}3u2?_B-{AA0agW0q0s+&<` zw(}Z?mq4!2@k}ShX8iLIRCzEjuJGvJmHURVY z#-wP#NxO76UOs!LCCb%!c5VT7tNLbq*iOzM(qL(m*n+4VD6STqmmF@!KXi0p^UVv3 znp7?zm{0C+MDta{ES(Vb63`W!$8b9+pVwl_1e$BQjBQjkBxi}*hu#hgWt#_N;*U8> zE^&H3FK{TkMN{DIYgZ6Mf$d&Vc6(A zf&`D(^*6?I!fp?CW*WpB2<{BwDi^wS3e6R{JJoc!D0ns2%4cBfbNtm`Te66Qjc`mp zhnDLep1OzwlA8&QwOdt5NbkmFSO?c{Un*T1fOp+rt)+%$FXiSCgGV zp;%rZKqm-m5RN0b^O9PwW1-<4B-i0uj1E9I$*N5O)p{mOUWiLoOAySImZ9EdL2@PJ z3V>38o#$mOr|Oc0ox4@(wz8OnY0q$CNG-n=AP;jRjKH0Arg{-ooyeAii|=8Hnu zIbqd;3?P`Njd)wO1)FWXHywY}dJYLO#a!MIwh(0_iQTm`V6B**V!WLzHJ2CL640uu zb-(}lVF1l#aGA>2BbOm1dHV7~4S~V>lkP(d1Zf(M*Qvk=UVC2vKRakv-5m``7bO?{Z!u$L_2yiXu9tXQ35g_HB5I%=7-|V|K!RnU1#w+_N|2?7 z*1*CLj0(oza*iAxtA2CBu28z3LF1>-`jdrI!sa|yv;3(7&0dnkMqYwFgQN!ZweS`c zLrXl)1JKB%|26LjdtyybZRjBS0wRs_BH2{WW~RrRx;MQ9wrF+155fR~+3`3P%SC9Z zLrG)EriNnCuBd^AK-gi6Oyi77Y~I6-HSZjX3*js*?1?4Ht@PjYr-c$X29W&%ip6U? z@fp}f-j0h$K#F8wqx^FK!qG_-DLaq_t}!$Yvn~qB7HVQuUy$>=s-wdtVD68aQe)K& zjWRggq{_OmYfH zoo3H7`vOYo4_=C%IR#f&D6s{Y1ef1xfzb@k)9eR@eDgqr~J zfIw4o&4H?o08(09jH&q?;CTl5@?Tc7P80s+Dn?>~|L0^o+lbC{K$RI2)-Xa(+H*$A=(HYW&K)|deV zC)$p6ada3bg85s1wWi=}W3}}VtPxEM38VpHjJo+|L_yHpU7ph94UpD7*++>^ohIGw zUiiyf^z8PT!(&|!YY97pTUrO51Jgi_&4KC`{8V`OUVzQKk2S?y^~J$lx=ghMa($Ci z`v3wZu%L+5_c9Oxa;{J5HNcK8$c|luE|0|WY{&<<46pviOFJVdR#yE*Ki3XrzrD zo+(O0*z))L|5|^aLNiP2eqnO~!KLiYWCb|`n~@#1-(e|fAz8lM%Z*|G9EuH%<i*h-;fMB6+@990&>dQjaco~9Dg$P~n@tj?)>^>06@sO6z`y2%IbrSuM=#1C*#O9i zKTe?5gsnuOCkvoU<-AgB&9m}-P0`!(oy(LlZ8}XIU5a@&SydWL7TM|g&Zw$W{Oo9^}i&-4t>^sqp9#U++p`g!v`U+Vt5FW>JB zdGh3$^W?$3T6pLNAThtFInD4mM2QQqa}aZfp(#aKIJlN8)OiNivN&3MBKV??ZHToa zuuRZ+e^RNzE-j!AN$15=!%OWzkz$nEqv&K)6~w9L(- z+$$g_{Y3X{*GayX64g$#8hBfm=3GwzZ_nx;ngHsq{BB4qf9rw*3A6z$609F$^3-(% zG)B|V9u(PgaWaO(0i>nVWrqm;hc4X-4TkVz0H^ovm_03;$kZt?uEt_>4oQm%+tdZ$ zE-t6@%vcfb63A03v^EU?XgMnnjjvhPNGG`B(#~?PA1II}5e(X?`8TweTZdAYE<~ z00}WLifUDqO)E1nIu{#b0M2Mf`mDW%9*~3}_uB)$&DrrA-`5?1SS(CM2t9V_@GUX* z_aBBga=tD7U}^#BgsvFgw2E4Sm~R{n$0P@!3}5mv8B$w=nLYgT`r<{3HzHjO#&2XAXp3;@v_47Ems|Hhjr{ znK5*)F2bx5!u-%D(Q6*teXU<*j8;)NzkTbqWvz0NzhMN(u z4Ps6hJGDL2brQZX)`M`LRQbpz!~oEp&;bdIpe$sb&p;TC&hl3)J_vL0rSio#2XcN$ zLOQp|F#QYNOQMA@fhc1(1bvj)6G(w*+tl;!!))WCiWqPPNrWh&2c$!Wj;& zz`lmovCh6}SioWF+#Y?dRlK4?H0Y00-dIUVR3%lc0<-K(|2Ph1s|Y-OdD<_i5tR%?Kyg zEQr)=fLI4#pTz|_lx~or`qz+6jmNThpqIZ#J41+m7(ap61z?69*F8;Bt0 z9#69BFieMmlA6&fW5~@!7$Jj@qz02y0@U99p6q?@R0nb`((Wn?t0tg1IsC#H*vc@n zW+>KF?MzFv*tiMX1wwZ0DoWjZQ0A6#5_$k_Sz|jAL*ghyF!UnrF*JJY{p1YIYB83A za}X;|-soEZ(R^)nu!Jzvr!wPS`5!|ZinNAs-gszZ8= z0d1X+(^Mpp0eIw=8fa$#`Dfu|sG9-UiemoRgSK{HU|Ad~4#3Qe9D{0L7=a+TcisjW zn_|mNbMz?`Lh4&^GZ31R*i@Q=)Z-$9zgz*FIE5KETN~ zB7i2lV~vkZzMVLQ4Y!W~7BN*o=^ge`Hs4*o5uM(ck8Z4&L3fVn2L_O?ysJ~pSHc7X zh<1L#dtYOJ?(S1uJ}sQ%;&N>c;_;UrT!%sKCLQJ1e~MXW3yOYXCEgDV(i_Yl8~nY+jyukjytI1_)Sd0%`XB1Fe8hTPx`*^#$8% zKKN~eqiJj*!eJM}y7K+OHVwh#ncht!wE?6H&dQH!8YTVqTJK4Y9bBdqVW0;|%B-bT zcK^fCggy5_mVSXnNpuxUAeXx9i`gqj(e{}Z&}&HNe#tR3it&e4c>H>6Sy~4ubIEF( z8m5{V>=Te#1b{jCPDbTxNXtRzUdZ8N zm6J9;qMg-&U~*@N;`2!*F!#c#N(;iJyJ8Df)zSuWV&~C$cA+T(9!n_sx6n+|)YlI{ ziY&y*CFS zxltE+tTTw}ojA3tDhP(7@7_>Jbr7jNT~m`!CwOXKAT0p1bY6M6Y6E!<@jR?F%3#^e zkleeqZ{;44CK@<~g||VWFs(;FM^F~dNh~fP%OJu{Tia4h!ORSA2*k7rfZE~J56J?e z++6+0<=f&lKtkb%Q|@Zbw1RXAo?sFkmVoPoumfNZ1_i8rikiGo=ThgJY6)X!^2ju`3_dbVdXEc_#u{+RBga;SG zUsNv<4VxOq2Ot(98#dAtXqTewqKz|vRwW1t+A@wi%kvDcX)(Ko3xLuJm>Y(T-11u( zrbYAvD*%hZzLu>ifNl)s5uq=Y7@l!38`CKevi@9rpEgZily0E~StV*m8B}T8VKi20 zBGUXSF9UpH?S?D{39vOCri%cjpJWX5S@{?YX7OR8G^bG3 zgne~K^CW+##-kD@0y0)|tdp**(_2R!lz$yRSO5dV-h% zHX)qao4hjA$_THc3~fi?DhyKULR59Zei|(H{QL5&$~%B?-C>p?Cx;^d>k~W0w3Rrf z9|KyBcsylcG=;7beIIUW=9!KhZ&JhsfW@>ekAKS~Pc<1^K_URWY^b5Hp{!;YS=ySe z_DhxH+n6TRq0N>p#A9c_3CQ%|LW_qM-)0GaOK~-(4H9~-iBcEB?ChxkJxCWw#&(DS z8ZpGlc5GaX0GynMFxmnL$=T80)ZuASS8R}?NOORq;H*q3K6K9hbh{}|zLpv4GdEQc z12hFVeoM8i`G$pW2j{iFu}bn;yy{zrHcQ>&3cdL)L@?2UrjXKoO;*~F7AvZ3_orPT zWhhUNJ-(eD^Cb+ykU~aa4(un5MnINZ>{N^)DW z2mb9KnvyPoon}{iL@P-1arK@HgzR*zK}c0IESkfgsWe`%tlHGD4(1g0rNqr7KkCL| zE6FEuLIp``?0v&h7s5PpJ}H;vy$tNmFv%Zud1;ivb?^!AV*s~5It;D?o>PFS9QEyEgcV>B1ZzO;yRSj$?GE6x_BT~2{OG%p(9|TJ z+$cZav?fi75Ln9;(-O6U{g>3DYDvXablkIWzHgehbV>e=74|&D`1v#k(Eq1G@=@X3Dgp2Lu}| zL}>{Az8I3ph;NE{NHNEJn)7$p8uX|BCF7fU{yJcn57kea*Oy6U30+vdep7ZYR$zry z8NI02TmxCLcNMGlw`7LLasLlml^(`Ne3v$$!|GNBJ@DgE(5_x>3oGp|h_ll*(NK#9h*M<|P2nM3;y0|2kU&N_IpfJCDL)8$^6@fmk`sJ8^A?OL05q8D9T-2FHzUP=J=c*DmboZ>qDMxP*-UjtOR?VjCmFhs&}ES zvb(knIRa3#;>?0()%*5)7$)0&0H>+LTQ7`+grK56ZK$cS%;m@HgC(0CK&TU4ys!T{1Kvn>bm>8O(@4w4*blks$ zit$Jle-B~Z-Hw*Kf_CAbXq(|7V;&HA4dvo;;4Fi)#{^_w15o?hszOkD@Aqy1xvbvz zi=%Ha=ZZ!UtpV*^547Rhgmg)D_9;nhW`wJ#%vMG?(cbS?M!*fCyNN_(#w=CsB*K|Z z_*?5{BHz8EqK`5FHY9N!y$6D%F5_|Oeun-bJeEMZ2!J=cMjC2-IRw3Sl|2eW< z09hf+axoebyZ?S+)Fw`TI z#qh-5{%e1`o6qGJim(CUiscw2c2(+$u!j}<_>Dy93}PMB1aX?$d}ju*2|&GYER8T* z5H2)+QW)eZ&{ig)+aQ_~w?Qns>Ky|V0!+P#Hn6&x2#-1W+QAVJx#CBmD00sLy;hgT zO*VZ4{8sCyWeos|-dP?s(0J8y27>72{FTmko{`J6;1&QCy_P+s*jr=*dR$9+-#G+DVq$@qmKlPR z!8p6W2eAUgRDwO=7!Oy#7CIifymov@D5ij_hcytTubzs3gmeM?1~c8o-%-g!j>~+N zbOVe+k-zk*4&{t?sK|JapplTb1j(og~sP_9jAWEvi~TYS3(=(M|XDY})3FcoDa zrG>VETvu1}fZ2gGw_!3%s$CG*PWXYK#{ght+}UPM!KeAO0W0F}1DUCx!~fI(x{S@b zzd-@vdr%xI4fZI-VGBW^| zhhaACiRK_?5N#y0+YG@2s$r(F0I|Me=f=>hUDG=-^H@ynmH^Hi?O>Hd(eHsQ${juD zu*xX=9ND7E4*<>?Q!Qk@PMV!y@_2-aG#2%Za|xhCp=+B< z+yJ4MV1~`_W`@T(zIN(20n9a)?VgvnfLvNRH+UBg4Yt74yLoEdhN8lse}{VT>DsiH zVZ4r^A%LzOEIRFu5xE|Ly0rMuA?qGAjD3s?BYg<-tr1g$>L3xBg^KxMBD{|f=VT)g zvnullHY%hmU@-RtZ5(F=ZLMn{pJW7PX7a58HU)Dnu}6l2KvL90?p0C+b12JaThINe zE#7AMx0<&uAidP2E7#U^Wngf}4+%cr)mxfi2!_Rb2n%fh%QR?}ZUyM7`p|HG6^}Fx@+U`jq&|dBrHNa}BZtsv7pH$W z(8%DDnjG`#V&m6Xqs#<=Y)>ToY5v?f#-Th~kO0*w3C{ssek~nI0GZr^!5rp9B(cZ< zHbfD2=TyQ!Lh!RRz(~W&LBsYvfR%4wE5;Qh#mB(~ss@h_(3YQ0DMRRMplsnEw0ZXk z0vj^MKokD%NKjWYwTRh(ETOUYiJ=69(7qDe$lq@+c?B5lZh{~tZ!l@uvR~U&)6YV2iXRaaN>NWQwU73(EtdiI0#o|N6^UNPNPv;rQSav6oX2e2GE zap1gywpjw(zSpe9bjU=NJY=}5@PN05hD-VOR_#L-4U$oA?WRif$GBLt0Zm{uPOsqB zp^82+qYHg%0J%QnVwr7Ko0-Z_*Amrc(zFk9X(g>~MtXvz884`(HnbDfa~b6dbO+D^ z*z-8w1%NjUPF0YjOygO2`|Lqib9UKjcMd zw5`ivwBXbAgfSe-kzu!aTvbhVnFz`l`*!ykWk*x76=d|2oYOq;jp)>7DdHN+W&V5? zOSVSzf|PJnSw>$-;jez=TtuZ)eMa{p37-AsrT zm`!3@K&nmKU2H+YZ0ydo`BqmD#}FL|R|hMzIOywwAQ@Fo3|Q=1y1ZoQ0ia%~&4 zK8Pz!lQ^r!0E}$#efK9(0|>Zbq8?ZOGXWMh@oL5m2sN|QVPxRb!uoBve3~TvB--UP z={H!UQti+dmiEc$BIf|+80mU*?KZ>c92kKuG7Pute5Lgc2o_m+sg*CGtRD=ObPka4 z_ZrPv@u`{$Q@aNUw=9H*J!}+j4QP&7Icx6l2w?HZ!&ce7{ClfFtb1f7y#Zjh!?0GJ zeI3Y5yqDEm0}>8tY_Fa%G(cHSKgI)vn;D;-8rChefR-a@tBEpiL7A=X@@|TMzs1J@ za_pEIc%5!)gGEn6OAB>mdYL>#n*E zPv`{zC<=BLs*M}}reLi2G`x=iEU*(+E;gQ>fLT61{2|^H$Rchfz%434mNTIw{EdC6%G3mKnPM4< zqHcm%N?3@p?jlgjX|U<1VQ?#>u!4-aWE%vwE_n4(mC=Eu;&@Sjdg?-W`WbtImA%~& zu=&)JRz&InSXgD#5^nn$h<-}yJOFTYJqsJGA++UjD1ndsTYg|XTZDE|P%66;8)pnW zhA?G-G#m2qA%$=fl!FwYXVBDe9Qe=qMq_`byT0Yqt=7-R$G+tPkaFYgoQQW2)@ka7 zHNVHzC8!I)>w!r29@?cY^DLV23d~ia3AQ$a9};l2Hq?***^$9a%^dkf6!kWrc z`}?Z`5(53-mp>180R46EaJ%5!!oVrCwk+=a^Wqa`i%Y&?pN{M0?@)&fDsx+STmeu3 zhEfXf0AZQoqYsYP&@L>?DJmb*%wFKLUi3yXDEKE?7|n z)+p3{rgF|J46&Wp%OL83P=O4h!{>PYX#nJmp30B?5Yi&Y21FZ9qx{c1>8JvvrT!C# zQ{xP+?OSwglA)hzV`BhIYUFzWFb`T$s9RivAVc<9yrg`DR!0}PEBkjEdTRermBHl+&&z0wZUYE@&b6CT=iB_i zK8>W2G_SJJFE^oG4KG!Fn~+5axT3ZZYYWWmRZpdp#}=RqL5qnq3;-8YyQp!p+sQDP zuWCo63t(pO0Ewm@K|AvU&8>TorC*VA5iTR4CK-ie18flo(9XA9OB`l^j(rG0OD~wO z68**$rzWbXF_f$ByEDJfH330XvEMP}o93oXczTEo%JX`c8gb`|Fw{kdb_?R-V2MwN zg%h9`C4oB#Gs^I<_)7qn?hE^UzGZn2W`^iPVjpA$Vv%qe4b^~j7CIB5i^xP6OUktF z5lr1gXnAd1r5I<8B(VWPn|scRrOv18h#j1ATmeL6wqaQ~H}gMJLbu?}gk zUi3{u$T{r9>pBx?czM;Oo5;{! z=1LcUoI&_)Mil@`waK`h1aC8ldOrGr1%Qi$Ng3JzX&%@z@%5M`P+2Wq@{UR=+dZh0 z4$l(gzDnfD8D|=Z^Z^8Jzq0yT^XY=TjJd=kw6l%73blVYLjLhTi5l>!PC>hBKDwNj(s)bJ|s0zZeNq*0KmeNJr@IFi~>VYSk@&=v;e||{8HUW zx=9EwJ>wVyIiu(5+$WGu*A^&3bQ{`j3Z0H42>?q1$F7JXEN;7irH#m!MY#cblxckzdk8&f z*Yw$kpDw?uY<(b?E%rQ^iVQN4jt`?F8z#hl#W}|XC{04z#EFgE8z2oRc-N#R#!xP) zJ#i?r30M(ixEH1q$z)`n8m3Y+AR6scHf-h@o@4Epv)zJ_M{)AFfU-{F4uifU$tTi< zC8Q;J7*%o)T^ir1T5ha2GRjVL7?2jw@t1O6WDR7VDR>-|Jc77J^g26TSnVIJV#U#t z+TjKedVP9{Zq)O)w!~w#1O}SOA!@D`taX&2k6iI^3qUYPPflagok1aJS|ywtGPH z-;LYrs|@59HKGMSWLnrLhW+I_qj4{S$-yHuwV?jH_K!uzJ{gKLm<N06ll*T!P(c=s|6+M131^%Y?RaoKxXSR)DX(K`iAZZ+Tv!6)16W+ z=WiScZS+~;7`n7H-dRw~n1G<8n9HLWe4Ed34LNgUi6IV7=NTEZF3jps43HventB}6 zE;5kz3$zDt2jb$!^IpN%GDC57@{l3it<%+)!IK^WxB(3pKU87wgF~|;mRAv>d<8Qir@`CU2<-RpbwC)L5?p~gPR!@ zPdy~nf_9lW>BU=&HYT>fl-yG^B7^He2zm-=*Ue7Z;h}E28E8MwUyh)u-k3lz>BztA zB2zbgXp347w8H#r0A!}$s3wOQd!7Gw33qoqgB^X|XrYNa2JC3EVmo3d76}MPERh!56aSyMwe;WuZeWUS@b!z0mj2 zn*WD5RvCH!bcYEBB<#miYiqvUVcQv_k9Hn2LU_!r{S#GgOuTh@WCMT{e)X$gQT2RN zR|EQw2A^hfSGxkujK;$cdJAte0eJvr@NNM(caDoMW%4GELtCIenAH_NI>j;@X@gTH zMn}@>Ksc}Ma6tp<2Ce!#g0gt7JmKj9SjkweV6Lqc`Wa;p-Um=lkGBNSfFucw0zgvW z@Y#OjUt9-?VPp(R&a`@I^TZTuMeY1dp{xve6AA~>GZ3V8z=~v^Ke=@p?@HW)ShqD< zFI(ITKrgh;K5&!h@`I{KiQhRXJ;J!;({hwO3h(Ph-+vEkMPtX$&p)kzEEwKKQ?K>_ zP{jC>*0zQ$of=)5n(HG->2l&msTRbwf6_(vn-CjNPU29quS2SqVztnKaBQj)R~njD zHNo6>?1wLc&5R${4qA|Ak!uLr1KP^};pwBD2qFXXmpf>=p%&M9hXAmBB)d)1nBwnd8 zPa(^2jwY5B`3%rp?ZwbH&o~3s_AO*pGdOD@-vx*nIpC}c;4Wia$(P+S|7-7z|MvjS z0ezs4SV5!In2ewYe7mBq2P^?Xv)5p**mzCi5z_5A%* zckmjJCyvPy`S!&-VGQ#(bU_XaAelJOFgUG2bVlA&7+@yG5u2 z044haj=@m{2=l&QU4|cLgj0KCoB&vi-@il^pkRTm0|o}Z)pFvQvU$eCcc7ZVEr8R0 zS3MQKaD?kv7vDjsXW6Fa`K0t%tY}+?iidVZ(t%L#kS+s$f7Hew0G#w8{d;&yTLW1f zzBCo*EsuZ}2fmA9T3-9JRZNyzSP#SJ22hdNm3KtyAQpag>0F&i1JqpT)-_xzRH>nUUP{~oL}r@$p&A(?e63u?Xwm_6K6{tfIv(o+m_a=WrrF?8 z#w%|=shjF$Fy3#EV0{oP749EsKZ8FKt+-$v0$SG3)t8JQsb4x8v?G6E*1^mH(o*L< zp8}BQ1jsqy(hbI@j{S?ev6;V_^)Pf~xF6zr-YqnOlKCzn7f@B*7l6xAI zW{~EH#-hk`0E_Nb*tgt5yQs8GnNd^xMMmOCP#ys809+w25$N*wqR38Gz0N%t41S7Z zo)v^y`hI}6K4f@0RW^;J!D}$H@r<4rmB=W#cSI-t>FA_>^=tYGK3#(CH_?IB0h~Hf z=^ygH>|bG}2>`R0duzvhGedtT-E2X++T``F_>0&AbQR!T#C=g4$hn?>jAsWsna;~s zhjb2*=D!o8@Da4d$m)a(@oLk(Oyhg5?!s!{k+|VrHnO4343T6mFUbrYh#CD0hGUcg z8dc_cD{8>Ei~d4IA46u(AmaqI0jQ`Yw+HFiHBp`dmabNBOA~CCNSA2hZ4TlB?S#SV zZH9Qt0f;6v3lQXr)=K~HpemWceH?%S6`666NSJ#7r^!4XfvlVYMbkS;4^R|_6GksC zP4bfk5DEe5!N9vSmRRkdsl?0#w(R1e0Lm+%OT;r+0Kw(wbUrtPa) zjT6~E<&+WOOh8o_dA_U4eG_xYC58WDJh&=^jY-76?A*SwDTd&I4vi z;t@!l%3a1QcDZyUjz^V$IXj_INI3UkuB}V@r?R430lK!hUZGLO3&029YPR;_TzjAD ziq@c|&($-tWc>)@67dcvdDQ;dkkRwk^e%il3B8r3o?Zp2gPM7iOwFnRfY?tl=RkKH zeUBN`CZA>u|4F`K5LyFl7O>ce(r-Bd{4Oz1Z9}*_eadlH$G@m}2E%T~eE#7L*CrWg zK+GZDray(z2LoIW)KxDNX_k?M?1NGD>aYiV!mqooA%shV9b`Ck0dW4WsL-4I{mJ&f zV*t|Ghn)$Z&W5P(@HI{Gr(i{`Xiw^vW*~G?eT}1YzR8Rkd-1V$o58R2JRrdriNv)b zkUJ0<(oGJ8z0ChSH6Bgu9>gk$g3*saR)uE^EE@DfM$#*InmDdOtlA747w>h*@R$*) ztFyOpu3P)(qO`AAt8DOTJ=u$XvkpzUj(=f)w!ydapN`wX8M(wd4bWPX5IXQJWrJ0Jns)y&=s!0xXiy*+37{9P9NqR3Pcs zF>e?^Qv$3R7+Cl=Kc75z9|5=`xq1;j_zj49mn(T?9B25q@|lD(09c@^mblY71u{E7 z%5BFC63sbbB{%2O@_K*#6F2H~a(tT!ptO{lLN0)mW9ZQx1RR!QC22xN&~sc?1t2#E zAFX&PzXEkp@T!aJ3Pu+Y>7+Jk)_hw^46@A19-+|AkKCVN;D8`G9rUUm+mKQ%3A*E7bc12^ z-G#K6<9uDK<|81hf;N@QF>nviWq!}RSsLF@L>eP+r2`Pxc|5C%5&)E@k03_*yGs{B zDQ^H&XiY7p0+6K%9Wy4W2~9GA&QtmVDD$AJed=pxAVuqs!$Zd$z`QfT)S#n;ZvkBc zb}}J-Uw}C?U0Vv9^E)7<6kBUczNrAFXT-nH_*mGHYAOa6xCUWwjB3qb`2Yl^^58mH32KSpkrNiIvpUVydR!nArVm= zU>tq>6_W}`B*t`;)cX)-@jy#*kO7GEM^{LA2yGesz)(u5MgT5j2j#kfa>GL+q~L}( z&O{E~M?gCptTke>Jn_Sj>nuQNqI6wMNpW(UoFI4#} zUi1tg&i|a#O}S4+AHbZy79lX%NZ_;QKh2Ew3`zO*#Z9nMwl5c93)aeLoZQBQ6M#zO z=@RuC5a#mDSD5ZVx@lo!^zO|Us*Pa-u;MNbP>FYx0mBoQ9SHUSOGPl_kIuCZWR+lg zjoJ<}o}Bi_zAkzKh{j#W)NI5z+~N2V;wEErq(`n}Xlo2tmZdV0$0S2}fi{I!PbiOi zv;2pJGSx7LHj8`JUe+y;1%37Py$%Z&0IT13at*FNNsa1}Z_A4vVYHz3FIM&QB5ra(!WuJ3 zW^DERx1)M(K$6VQ>X4dzyP@d;nN27w#Ubve^j>2Nq>Op-E|d3Wwm@C`aRb&*4z``t zKzHR(5Gd=*6ZvQ9LYm#fIEXxgcB%CyJNMR@PyjhA_6zd0gFa}q$ncCifTCCooA5S- zpbhOH-H}6;4Xzs~XLb_X_hV@DvZs0C1d{T9yrkBqj>zphbz3tiD=i?yRrvuGwsPGLCe!fLAbv|A8_d5D$B-xGi1Z2reZvNSV zBCkz{VbeWci)cZT*a?Yk@o6s557k*4fZE|b1BtH#Rn$TjEbro4RTq#dV^k;mBM7+B zsf2RsIXb;&>`|Cv6n#SHH-K^_vc{K1wxjYFLmO%w5{cO^pKs?Yo4Rt#eGF(`kMVxN zm@)wnBeBgkg>X&NS~2L*(rE_f92wc6=SkaF#x1m0TrAt|090EX3t(ixa!(SugHpr$ z@P?{fLRdFmT=qZ2X2(5{Gxj7>VpwG`rZTv`L$@D*%Ca-wZRMy<#|&#=s~?_OxMxQ{ zka0=RPnOjF<*FQmYc>B3XiI|aZyC4O0jSaM577rc5#x!vsAk6N#%n&CNo)6lz5v>) z#7hV$1EiCcnFKtyGyFk3Z_+)9WV+L2v_zgg&x6ke9XVU?{|QPf!Sd z?r)l>qKmm=;F#i$983-T;(dNh;f z)pM;npq=BvF-@xlY1zmrFxtHZKtZtyB(b&$7WVX39f&e@l1E7Y2mnaFJlqI%6G}b&q zJNw=0xa?o6kn=Jj-vB|zm*q&do&j*Z9`{=tAl8%bc=C`wCFz|oHra$moQ&$4GPj^y z_tIotP0|c(3)E#~De|c=s14-0J-1%RtaZAb0u4#c_Qk%Ac^fHL3i!qE#5=ZVWrx{oA3$WEjMX<5IDm)*CZ)lG))oOXu&E+(On7V^8} zxX054aEbFZb{ftRs0F11_0ZKG5ImOG8u}TH!+$lS*v}b&n$I_3c;hIj?{gGqlt0g< zq&G>v2zN7(&M`a?s$-o1S+sGW5m#8Ii4~9LhNfU3Dt+cU_ra$x5JmOZnYJCrnCns*E7O!X{+ zE`=^YN;N>;<^Sssc95_HfvGF1;GR$C_)6O#D@gUU8f72&CKVZrNH?G?I{LR$l}`Hj z2x@JPF-v=)(a+cZjVjq=xlrGLbVYv--)VJd)R>JE@@zmFXE`2aW_L8~uAbAqoLyz1zn41JP+udXho* zU}XyBbp7BI0cJp+aDQ_o6hqQHle4=V&qM$%s(n9Ey-0v(-ZeM41AwPub+m*+G)$i( z`n^dy#rINz6{M@`>TDR7dLJB{05URNLs4<@F3h8&;^{iBU)26h*Qu`UZ}44`Vg9C` zs}6!fF#l$`*MM;K#8Rc18R#uv-9f3Iab1mn38Q@8^w*g{px*!Ym0oo$gY?Ks+0B*Lt_`+rhqLrXW znUb6ns*kw=aqZI4c{w!(LJ+5P+%&q!W7MS48hXzb!|YA{GY~z|QQq8tU&Mt$7z=P?T3GSNt#+G5k`W4LluN^4t1Kf{2W;DtkBC7pcRYDk^;cgFA0YRg#_5N6$Zw-Fog|`7w6+}neHneDh zn&TI$noUTzu=1$AM#L7N>yI5d>SYVs+{=B3Rz&SgWk0$PNYrI#7j?<75`ik@2t-W9 zp>_|#N$-b8!oGh}dxRW7SQEny9g}lZ1w+pFlDr`<-#x~A` zUSMO)5gUb?B=+GZq4BEo!f0d>{x;pW<&K5BfMt9z~{Gdpj2QrUe zWTag!ukZYiIWG#ags9pUwy@D4?@NH#IbK0n{aBUhg8l=5Svrjugx1g$7B5qbC48Gb zZWu8Y{c{!hUS^~lkWNaENWn=Rz}0qsarRE9Z4D6ke#eeJV+e$EqBoK%K$}5azU8rTToV(a)DkQd)q-Ur;m;UDx132Y+V57a`;sv%MmoHa>~fsHLD*v9!JJJKBbCfH!u%B2b#9CIqebEOX3!Anq@zwtA;41nPYw8Aw1@b8t zhxMfOq#POn4UZQr-ihCYa^bHq#`EfZ6U=;b_JT%0k|#_1a_GAS;2hX!pbFa1RcB%Q z{tk!>BSWS*lkEZ_a$jLSf^adO$UXwaNB~ze+Un>7pfuNL8$Qk5voKN}Lp!feU*aTw0)PPI91;Ocq2LR<6WExUKob0!LpY-oxtb+& zz*|74esU@KEui6ZmqGZ>k&nM<*1Ci!xzg*gio6G~P!HvcW>o?Yq(~1Ct^_&+4%bzp zHJF9U1>vw{djzpi@hyhfkW%g6skqtal#UJo+L`Re=Xhlk$ zL%TfBS;)tUbRWpM>Ul%>9Ar8i5n~yzHZTOUWMq1+tVbZ~F+b9dZt|~hqSGUQnDgJp zqZJcq3yFhv6aW&waUOsuW)M{8OIDI|c%SH^{eM+a6br0VBYsdi#-a08l@ zFdKxwI)vvg471vj^lYmEY_>Uxi^H!bNDijyBzrT%;iHua9en_ZE9O8BHW(N($R0cI z>Q~wzuJQ}HxsBnYlhL@Gt7hE=porHW!w1I^RLLD7(j$ePV$We{3Nb!p68u!N8~{+A z;qyXL9YRxZZD67a5N3jda&$_XdjsZn9F9$F$~XqJDCCPtd({(=qVe?4#G597qX`{{ z%o5Ox7KE4slr|C1JKsX1J%>AJ2%k3axkWq1%-6VjqFbi{4&DKmL5h0B?{+$GHJQ0gFQn*slXP%E@IMoPUcV z<5^1F%n)xc^~U=qwDZ;UEjsE}26-C$EnCnOicypt+7OjaupbxAz5`~-oL+py_|JeIzAekp1M+!u^1#qgG zwz%;vzz`Ky*u=PlpfYr|GOmy;p{)<+y18o;YeAaFcb*4qW%RCH+CW#W^DVj=_5vM1R~;*~cZeJ%0iqZo%p<-_KUN-X=z+lc z@u$$ien#K9#?f&sG=N+hTO(JSXhU$9=0n_t9zmPmm)Ps%ZstI#KJK_*!s`GoE;Xt;`0`Og7CP4=93F3*VnN+pgTzOf}=be zXMvb~mhn{Dy`wXPY#*Q?Q06SI;$Z&3AmbRCIC%*Qja)h6K|TB(9Z>Dxt+Kuv(0gq_ zI_vFl3xj@uxQ=9vb}SR?MnWtiF(9D?O(1EXR&Ja5o5`|`Vp>2&na(t0*@C7?*sEq6 z!t95MWy}ydB|t1CyAWn-m;Oj|gCh{j6XlXTdzl8C{g_&54bcbl=!ktIM?1xj+yI!~ zd;52cDSVnOwpx(1Od(f zEy?$Cc^Y$1ASxb%yXd-aO;WAQ`%nwWN++2XBl$ZJXZ*@by=4OGcgf@)K$4BS7Apw1 z^fT({A;Xptcn#po1Ym|2F*8#Fby#W*BPB+2ly1#DM zgGYdt5wwQL9ss(@E-gC2x0&dMC&&SG5%KZ6c)Vu_;=I|Dv<<@u2u^?VXV3;dow$tT z5yLnE4kBd7I03LW7n2JDO%vm_i?Y6%fw|4c`vCJy=MB$FBCcBiDsq4|6Pf@~l^#a{ zp4;33IwzTVF^xi7mtdq??M2;Z5VaE@tggV+6UeIeA^#)za^GtWW&!Aty;z|>0*RLp zS?w=Y|95;*3*`WE)&G_yP~DUUKR9YYS>kjbjBw4Q+3=+4Y$nYWCse%U*xFS^?Y5xI zjNWkx9}De_fJr-L=s;Tx9PHsYTDyx~Fjwo<^$E9-=`k{)H)pzciBNk$RV)6WwN4+v zRq{lf4In8s9tBBp2;p4pu2MxCfw@+8hk5<;2GHe2Qqg*ku?OjV_Avp}BBu{Ri$B~(3oA8k43siQK zHyMM?H54F8R&Xc)q$Rz5CFxF~k#sosp7C8ZbDiSHLTC=?GQZGNS$n&;K&1|3DZPL! zEnvXLOo;Lh%u=YHXq4OQC8#sUC5&_cnyRIz6Kg9dWP~Fmh=&aNQ`l#(p)I82Pu?DV z1Sxy{FF4KNDx4jUyM6*M7TT+RtS zRo?`HYmR|2My40n` zD>?#ZdA=@hf&)~wEqg!IC6OH+UUODFN(7sP@QZ$vNWAHWj*LO5HReUA-vq)r$A}-w zF-?peoR42^yai_d55p5Hq=oZ_MjHmdw?GzzW=-fy26;`lAItAMAQy?rv_{!w0#8|| zz|K7Y+0gHvYs_6i((>(xm2VgS@FNdZP#tR!GeJoi3(@OGAor*ov~#S5m18VkYX3o% zGY-A=$jAnO%M+Vqbx1S669<6}XlI52S8Ojeor11ZI>}7{#P~k6v;|>S^b&jQm~8<$ zhh2@2ZAhoc_GE0tbU;eqq0MmjFfquul0F9for>q9wfjm zCdq0D;OsH`j(O)OgY4Kz^$h^(&p7x!q8~$P{EiD}e7h)Ds@W-|d3+g%NwfSt(5d4b zl62TJ?Ni0KQ0Dc3sUX0@zscqe>$p28rzM*eGyMP3)! z@6&7=Am)_HBHt#og~FvDW}%RD>D_&L4u5{POOKI}xN*YVrw#4q7IXWUad#4X&terd zCh;zq`QhwLJJ&}E=vofU_W-D$=yUq{6XknLHUogdJuaj*jT$EMl^Kk-szxAIS@|M~ z)^h_CW09mYhA<0En*~fV3B6kXj>?*XP}A4G^)M^3F{nfdAY2VJL^j(zRlNmuo~#QP zvD8ffIUDx2bzpo4VBYBl<6vr;hW>{%0t&XV-jOZb0}*SJzKvGgn%AogdWvaYZ+v%^|&TT<2&yox^JteG|fp@mfpM z79@%>M9a4LbnR;|8L#)S&<3L@9I3_4LMSKgoewkt#G0!e`Y=^T8-5>@BR)y82PihH zS!6%c+UEHQEl&m@mIk|#>R*Qdl{%h;Ei8Hg<~riyldopo0J%hapU7xzk_ku5B`1*4 z-ea{gg>nhL;K)OL`V7PgG6MN}`8*+9=24^6+pSZ0fpj8Dd`U~u$IALKmlU0F%3l#AYGF(ggw(@1C;G(O9wKPEh2_Z-aAx~uE(nt~@#wb)zm!UGo9 zTB*lHG*Ie?%_Ve(Z{b zfLG}W033Wz(YE+R`DJ2C&jDc_!xr7Gw*v%Q40|eeHv`a9_*(c8kY&RQSFwH212HFY zn$^z$(SlHe0SI~O+7miqf<-t}juC_lMd!rNn~V$XJ#UaoHLk9>E{#AZVDPBLE2&SR zTnG%RY;`dMvH*B4T(5Y}0nFmFSFh0kNaWE)Rtr9n8$C7E&UaA^t)}oZpz2$KlAC%w zsCy_EpEnG`i|;C9*#z8E@&{0|c=DV|=hK>!j$o+%@}F9${U;T-C)`}3586mT59CqA zI)K^lYOHENnumjHJRng|%?!}dyl)e-=t^u3x1cS=@S7LrwOe3P7pUF!exr1)4$rH(hVL%&_T-vfO)-qLka8o|E=FWYXDf>ubH5t8PMhduMK)k zS^Wc;3n;^VIR)H3LMs zuBN` z21L@wJV5ejnS_gNUA@|ZwALMl>1P|dq>1AvTN-!3TrQ*K6_Z9dI@LOH)+Jv;A z${jG&2FQ$PZ%>=w=nk0kdGBdmJL7fj!hx;^h&qUxQw#1wTQYd4!r-6AhH@WeO7y`} z{T_f>?3DL8`yl3N+t=~X9w2KEwuHAS~~2$?P{P$AB)t zk3OrK02Ik+#~0B}orruTpe^~EqlOql=OE6qqv7@zvNTeRLSjn3$T&JOP?C2bsKHZ3 zSn>@Ax`5`{$31}gq-$m>6Dqg@^n5sa-gNZ#;MlZfy*LVkYXIghd%vT9;nPKlp z{_~1CPQ)+Sj%7&pAN@FZ?GG3}t?=mG2q%pIw&>-V5s1y6NN zz7J{s4&tsEY64=Fi}O965l|vSP;-eRE^WY%0G#Sm=KD9M=(MDtV<^i>=Ku68&}N?` z8R(eHp(=nbX1txK&UTg%ewTE;h290o{Ow<2I1O@}fu2^*;1>z81#}i}itaKV?~+Ad zx&$dQlm(KH5$*vkLv0+YeX%mm)XuBL!UK@Ibc`|N#oF4p2Byg{R>0R|M$`6vFkkyG zsw}rZhqi0-=^^zOk#C|1sZ1fT$0M)#CKy z@h?>U06@J+RKgI#MWtbSa2zGX9Wn0+-T*mQO@qbuI0Ip_>s9$A197)H=8sblGmpUx ztj?g_>gDMB)OL0VbDrVYbfD0;&=iT0S+%x+a^~KBpuYhz)9e$lg~^DuOh{Idah>=c z$bvqUo%#xrM6oec+jxMoRG!LgpN_`Ad9YbK5s#+mhvX5l_Fq=IRqjPww>E&yh8AQo zaMpoJbq_VqL{UJ@MPGYp%_KQzK|L}F?tpp8(E>7OII)n`&{l@w-U#Mol(wA^{c+ctI$)&+CkPt-LZK{{n#6qUD)9srtvl0)<%Tv_sYf|@|P_jq+c=K@6+zkRq% zzrweZ4X@WXkQKw1+G-i+|0}JmCXkj1&vt#*ns5r@c1~NZe0t3QElnBO&|1)ojQLhq zHi&r(Vy0wsw#ff|Ip!e7T_RW7Ag5lJAnq#}d-4|hJ*au!@wxO0fP^`bQ7`uZWnP#c zN>kAfAd81in4BIRfg2BsQ2Vc{T+X#kUzRN!pv2KLkulq@LpxXAnbaOb0{}_FdiYIh zLb`MZ-=iBDnsFi~x&TV9TE1*StNK3Znnl}jV`@*g0cCA?Qf_oG@PN68-2L{r2U-E_ zg0bXhZ@OnX{pR*Fso4DvF%J^M7hHLbWC&L3kMqMA6GuQ+l$X-K8^}_5%x%V|WC4Lj zPoO9Z#}RadQwYmR_a)HHnZt5RQm1)ysM7AVl#OG42cW$^V*p)be7Of?2)zSv8Br4* z^*cDdhX&4jD6`qs5_|<|eLcX~Nc#E!qG_(q4%d83=@7K+zS4rT zr5?$A8q@$pF&oepKnH)G%d9pa2niT6zF>N$jK;)RarQG{C% z8eq;w*0ieNX2$Brm8?x@x0NpP zZDn{=G6uyh5SRN@=jUxmIQsuafA%+2K7_kGrh%CH5rG`Kpk;FPdyaPityernsDsxY zfNKiP!}A;S<33PfRwn-xagebtK7`@x5Wr>R&F$ieWR%gKUR-@p<=q&GvmcM3V-Rcc zXM8i#3qZS6#~-+7Hia}Za#wPyW-$Yzpe)59<|e35o*%n~a9Rh926O^RzYO!9JLr-a zTP@KpmOw5&NBc4Xx(6uQB&)I&BzoDQ1|BjfTaB8dtTQOvD+~va&izF=v#9+yRnoe9 zjQBSI8~~e5vZ^PIwJV)k1KLvNKga&41c;e=s>kj&AziEeFiQ3QSu2zIF}Ad~pk2yy zy5Q)n+ZhDg99Zo@JExPf#nJ^qsJ}sd7!NW9{6T4SMkz8zH@qpL6CjoxS9`_m0KodX z{gl~4M!@tao|_thSo=JM3eIjal`Hxc8vGc*+_Rv=!~wZYfJ*!0f`c2&6pXIxLO!); ze8anDp(q3d7518)?pr>KypOdlw19>|Z0wZk9Tc)fWN}=yg!Itp?Lk)ZF)#p=+#7Uv z#i#j41kA9mVq9=4c4LJ~Kw7Z4%8J)PA3@9nZ+Xx?)&AQm1WSZ-E$KHv96|t4`P)#=2>u38~|7)I*jSVOu$?9Bj_S> z25~v}4w2sgTLYL8c^7&N7^lrGLDq@)dnk8NbQeQj=urj^Z zI286cw_uj|GtCzlNq*7f`wo&qY#%#rbEaji+MVe5S zDh3)X%gRJ_6S(mGDFo31Fmot_rx;s6&QR6ijgWT6+kMaK7%c&y1k8S@(k_J6j6E(y zcLeQ3xx;Pam>v+^U3kB^59KVRf~=bn*&q{P;RZc~c6B__ovRU~my9YhZ>^}oBKRBd zVB|fAWn&0y2xSRtqX~#ROPlIy8pU{$;SP#T)Xd2+?;*`Og!zA__vkbix&@*K*kc4( z@afVXFds)Vpo`w9ZkHMP;48h9W#$wwK2^~ROy!+Uzgv5Ip`(6s`Pl5@?MNyfH)i2fPQp%I*(meinaftDzgl4NMHlN31}Nt z3DiN{KiZ}@>bQ~dPQS*Uax>#;kkA;hnL+5~)$m#g;BHO(Dq8^VfOcM?8u=SLRqYOT z04#9b`iqeBDT_65qPGg_04~DGdJa3TEsTJ`!1aKDGosI5c z1f=famtPR>4TPn^%QA8BH_jN0!=KJnQ4`1DgomZs)Wl`nxR^m$bnJp2zu}w`mgkv5 zEcjKOw?MQQwk}WuzFneiIi$VI(9fy^f+di}CU+tCP*sg%2u%tr5DNiEL_?ofK7dgw zyh=(XAgmI4{u-5lwiNv6je3gzvC<2MO*xg@0C1S%NDakENH#ZdRkM-MUggxi3FOS! zrjlAfyOn)99!h0f02H`7Al)*Fes*-U+|KB4-)XMUfp&43t6$5EtP5fx%eN+GUSjtM z(7C*{xnmE&V!cx1>qEL9+Wmc3O9SB2WiVhwzd&L`P#1x7Wg?HDEsHqDPz~N>kdv6? zjG>)f_vtfK1hQy4S~bm|QKVVM<>nL;nnOEz^{}x=c?*Ps#Kql3#_no4e+OAcMwLF4 zbO{E-G0l^u`@Lyo`a}4!T78Q%WZa95@rMkv&*B>;$uQNT0Yw3GsSY$a)c&VR@;Ix- zb~misHUKFqW00mQbto7=)-A{e-!#PU_ZUR@boF5Qs_PV+0Hu?mTb9RHcVs+b21P68D<-G%92EXX+zMJIJAD*HVkfg^kx7Hm!s4~#&Rgf#MEBl~@Gv6K_ zB*1BScYz{g9^P zo`-^!-b(X!D!Bn-j8+`2+ZZgMoPjQdu!=_|?i?ZtXI6KZg6= zTL|ZV=%6-CXKVjUFwE+k?tpLCk9*E~(s(4)6KBvQt~MOqfTN<6R5;(}<_*pF zon|SNpaqK1$^Kx6>V_&kOSrtI%0VpFOFdlCfpq>{wj+8sBMit>bKfHn1ka8mL=OTT zc}ADe=hF$DN_z&7ZUeH?`&jB6(5WBNu&D5i^GRLS4Ww#LcGKhE3#txm;(x!W)YJUe zL2;?^q<4?6kntSa74^xDlYK$}&T608@_7@4?C- zRj#rVaOG$ut7s2U&Y5jt9Z#+SvMJl&(qo3v>luWp{jVX!Uvv3?gHJPt!A?Sfx%evdB0u)>wzty?R0K$5)r*pkwl0S#Z4WyHQ z7Sp<$5;ex@afaR7LwP5VE)6?qtXt5Sse_-rlfl~zqVza=S_fw5ATG@_c`;;0kV){Y zI<1v33n2JoLK?024ib&t)~p}{vzBF2OBVM)l#Gia#9To*KSUGkJUH?pUDDbixp_dy zM-$hv)T;e&6;r#i*P+q@5FW0f)GyYdED;X0`P+adOKuw>%x3=Nm1XEnXwT5OyYbB* zQw!KKf2|3^R+9U&EoehpDws^iasj|u$~!uB4qXZs{*I`15V7cYA9_$|`XIV{$fEGN{7tQA8tmlJ@!UhB5PN*C z__XeaLz1#1@&H;CeD^68pCpdvsCq{~AbRb8&qjiRY(Q0Vc+1VGkV75NDjpZGB(DZg z(T8Xu%?!f0dK&jDH-TIRP4Fp03&2v>YEMTkTOiKO4t2t2x}Cw!FZ5~cMwkRwckWv0=Qs1oNS{S8DE!z)t#+CtZ+PBTC5Ttz~I7OGS?91;wKEB zV$SsF|JP9gwf`fUR?K)d_$~$3jHeFisGMl|K4v3B@SqjF1EfWQo7%X2vwH`9%IXbWZh%p+^H>lyAuLSxpkm`O0lj7;jR0_Ip7SyYyS|#wZvmQ9R!Y&e zv_a?&b@#2qrv`;jT&F`Jpv(eBoh+hc)qDh2s+id$$KCW5Kvf^g(vi|@+~_A7x7*bG zVQBzrrdbDx$0308z*s={nkeQ7%(Ct3-1i339Amf^+m~Yyq$un5*hZh2Mvbp7wbwR< zb}kt3X_tHkP}LSg*c{R+#tVF8dFvEpsu5>-3utMLE=Jt(jZjGaJ#m*%PU|elH-OKvD?EUMn!jIb!)*p>)wsd~1FA6xFavM7?X3&h zw;&cRc0;;&$sm$};Bug0;SR(iVTYz1elNi&o7_o-THZse?+X`5tBeqf_bB`Wn3>@w zkFLnB0bD_PngY3o%pNl;WAlZN32XmXC}^1YZ18Ofyw>!*ZZeEn;k^NAX?>4UHlfTn zj(K&>g{Ha*hO);?=qNWsemaf!%0Eq}WD%Q+1|KZdKwScrLVRuBiZ<1zj zgsI~=gU7kj1d>wX3k0R$+x!wVI*0^x=YkT=0bEGTHE6K#dJE+Ih(nMCv_<*)%-eLR zMW$2S*e)|2`W9c(ya%GceL`th8T<*8e|Ub#;D1`S3fjb810jNo*rIxbumn-n=uvC` zcg5(7YWlGOWJcMBq5A63E^X}Fpb`LZa6+Heg(y)ERk35u*Pm4AzGr>18E zPzGqH7FS5o2LOv|yE1?2B*bBc&Jh_*G8*kiOOhi1*YuOZ*g6KS(WN7$M1 zCk!cbfBv<NDIsHW;XWX!DA@JM|V@354%y>L0)o?NSBaM(luD2$v^6F=8OhE|3}8 zi6;V&G6=6KGr5Py9uSh{NG&wK59uQ65I$_CGR`@tf-#>N0#T0Gd&368F#O2bfClin`DEd*N*l z(sDz;)loPwk4CAc*HA9Vk7u}f(1P~y|0nDHf$LnlH7@}MG#3Nnjz7=^1G{r20vdiCn=)vH(M3xkifi_`I6 zR{UUn86itLLHtq)qK&Qqa_u#IbVIocL}EZ&Kq8jdjSvN%T^vmoP!`bU0LN8USaXo3 zAMCIG;xCy#%WTZDsyO>o{Q>@>)d{CXz3UKesRguwx1&VW?ONC$K!t8u@|RwkHZ28OnPcFQ;A5GDCQV*t9O zxD&+`dKZMwK1OHX<56`Ad>_J}pU_=?2ax{Uk6b^<_YVQQ3lnR0S(C%3? z90QoK1Yaxe!Y5#6&w@IK?4_qbZjbwOGcfoJn(yoQqt=w?CI{p8EMx7vf8gu6l)Kq*1SA@2q=BW!y`I5JfU3{D|>!OTMiSt`qizD&r_jh~iG z-~C{ep4pb8y9_`(d+d_onN$a{aTMv|NP5{}Y-B<_W+F)VB#4VYxDYn3DS#6FQ1M8# z94)i-7XKj6^f$yMGhz=iVl{1mxCOc727!U3h-4|p8I*}>tqsFrVLNFZgF#gC^@y#! z3yGZEW}>m@$6v^FpU2W`v`!B}KLB&xhqYUQ)E$DDjc6Ob78A82KuH@ALIUAogtfHn zVWHYjK+OuKw$jI^39$Dsv&aV2Qs?`s~B{JT^GMg9R7uf&VOBff#XVX z>o$PJa3Z!wyP#b#<~z&>?JL|33Nsd&3-<76F0bkQW-p{G-zwLu_kp>CS&;z2!SM;CgGZN!{V+;%1YNNlhOgWFdJIZ|%4HtZ^TYu1D`N`4DF7^M z%RlmrXAc8EFm#duD% z78I1E?Kww4Wf4@|7guQ@b8$A44l5Doi85pZl0xYWr%W+HxB&|T1O*CEtlH_pbGX#H-ooNe%YE^TH zZAj-m5>bJ6pyBpVgf-ZOpbR63$zG;obwbAb&{a>zJgplFK@UL9v!5o0kO@dKqvukr zBM8@#C6r@>9|O7lMo~xhq&NX{i5m>j+QmBsp%lYSWFh}#qa=<)oF@|VBnFxb5Hpz3 zsJhm$+5zR?>6WH<@BOH5z32E)xbQ^NgF{o?^$+UfM383!?=R z-49{gON^*Mno~cr!nzs-T>s9**pIn+70NtL1#i8U1TbzvSifp-&U+wI0P8b3E*tdJn*s$ z+HyybyI_1bhAhInLFEC3^nM9lxTfZa8IAfs(4S95kn*P2nyolG!RLqecnNnQGZM=GfHpf8y!mki;9?VyCyaN;ApU-ngD&Bqb&_vQ&&1KL z(*#&0$yjp+Q2H~wxtLX>V&|Ex#Z6~?{sKT!8t0fhK3C<18|55%XJQyS6sJNm0%l3) zW)X%ANLLA+h=%J)8a)k@dy~G!jdjQfG~8tyStmIAq0E4J4k`i+0GMkmV|9T9{H}x1 z{Ojr;jeKE^tvFto%opykgH-GmV5QCC7BNcQN=QrXm}s|wC<)imOAt4pq+n^$O&(#( zpDYa!lD$=Q+&0gyt1SdONjF!tWZi}K2C=rwG_7M9E{WOW4_A}*mF9Z;0B&cF35B)f z0K{xvkiq@%`x|t5DsYtl*CgM^kgg&}Bp7(f^8~1L0fM%nHvESFMW8vK%oAIiHsQSiFlUX^k9O$l0M2@qxB$`_ z4G`LMd=wG+{wJ?1&nXapSAve_#rlqnfEEs|Yf>8R-v;!j8oGrYK>=`ihB1t4ct&Et zBqX}5VvA=thi4BG6hps4;f{ohfH|Wj)vhC>+IeDNHPTK&}#VxHyx)58^M! z5udni;vjRngC8aR;V@qdlOxPJ%GbCFj9P`I3k(($qDGFLCs4(G7;VA2F$5P00;gJo zIze{k=S3|j&UrTbej}1dTu;h(Es*X=XPC=}~5m&?s%KSH!j!aFJJ{KvkhF+Mxcz>ROx(+el#4%ZJSbnsyJ_ zO5l;*R<{8x0KNl~s_X!{xmTay6A{w53q~>J?J8fiy?hH9!RZ3d?E{IAj1(~E0LpxR zv~IVKLl761wYjdUJ^~=iYHEB8>6(|#!?K|YTtsAfIi^=$)}Dg9+&aVgSkvt@XUHr7 ziyFcLS#PrMcjBoaXRm@rqNB;p3O9e+Y5v9zELpc%j zSMu4&tnisYk*G$KU@qub7^|kBsV{zoYLylg*UdQ9djkVSq7u|@yd@Y@9OW3>#u z32hd75*nr5aw6F50M^^k=2*EKDpKu$x&~v53Q~|S;i8R^lfY8^w<8gVDy3D2$*hoA|YyFsha|V zYmc>3Zt?8qsNF#>{0~k@h=l){#^6OZoBk&}M7DUAGF<&tGZYAOWQ>b9DANu=;YeHy zgC&66b~BkKOk}tRV0nEMhoknPU0(%fw+c7_bNAp1hHx}G1Sz_L`C&%vKg!I0T$F9< z7@86;GNdxrKontTvyOjH0bD|KnoI~`{y78n#~2J_VsZ}R7HViEeUS;~mf3KkQXT(e z^$M|p;+RS&h->i>w=9q6U0_xdSS+-0RyKLN!CkIVT}FV205E~j%&Is4BZ8hxPkjK+ zSlgqX4EBRndX6}I)^h`ZRGlWIfOQD7tJoIJ*9Ne8-;Ry6Nx#F)f=^FDRQh#(I;L7J zAb*IvgqX>3=nGi^)POmSLfYWj8RFPT9-GkS?RLl3OJj^+6_KIx59;2moX3_HzW`@344VU1e97 z9(fF?3EVI;lW!A>Uh}O}0BZY0s`f1Z#A>NM%yR%~DdGu)9)aXb^07ch{-;Wa*o9(F z0_he}@R_vRcL9?&8lfeXJiD(@;ajJihf9U z?O}{U15oM`;gem5aC;5TYE*CJo9oLgqbKuk#0uRt1?_L$W_cW&Pc0w{{P$(cY4hyD z#Y%@I+6G8zpT#(Fy_rd(10b4PnS>UT>0}$A^fn3nPX4cPkKVDHS+Fk~-W_`&t_9X1 zin$L>6}j9UkvxDf7dcLk^;Ame5X|*Zfb*EW9Rc~PcT|XDNLK|(4`=!lkV;v|WyX#O zr=XUfTWErEy~-J&%cDIy!~)t}kjE^Oi3JK;n|3}F$qe&Sgy%AVhXB=1BY`Zm@N>OlMk#YY7RmJX2y?c7QF%K z3bSI-1WbFvo0*gLw}fngm<2TsfbiaSCK+6|zvvDKy?CU7$nfX}nOztE_n=*lvav)4 z`(Q3dTmAeX$yvD^9737}QlQ~2a0EnU6)A_;kD;6;YMlXBbvXg^X9<}?Ca2J52%;Fq z{WFj?= zGjif7;51bs!m>TjB z%2lmNYaT(ms#?vH`Z#lF-~x*!8SDhih5Z4a6oetupb0siquKUZrdf@}{rT@`t$r_{ z-7XU)Vm%pm{4Z6;M65$$c0#)}O!Tys(Ul2^0?#Pb4Q*MTs5bt4@*RS$DwAFS$<7R; z4&Ug5gg*qnx{l(=#C$-D_yGVH%f7R*%~uCB7biH!>@!h~Oo&`nYoAF#TA7v@MT(|O z$@UJDTA9fmjgn!xY&!)fU9_OtfRa?}7JyBjT`|oyg6mtE0U-{n+Y~by-Y^ahy8}f1 z79Ym}bqF(XjO&N#IeP%^AX)^eT>Bt=l{guQ2G6eO6UFL@Mb{yi1?09Yl}C_NvFs=u zL-8?;?(pse!XML~53D`~C@KLp^R!sY&Oj|an6pWB?mQO4W&Z-g^D$0(MU8edH!^XrC zTj~12U=FU{hz8GQL_=X+>q!o3P11mL$>b6##!iB`5KLygUon-?C`KcT1rXebAV<=4 zZD`jgCQ}%@=qwxgp27xs904-3hpof609+;J`jQ!nu$}M4_@^4}0C}>;UJr)C$E&ez zxcfb&ZmZGDdqDnz7)y=!0bl_2ixM1|l&~wd<)O((Wg>qB>Ee_?-F*8P3~9mtP|^Zn zF_?Iw&z(XS$z%emTAzV1>dC};o`0eT;_Dar`uegwP&@v&s$R@dm$ALs!J-q`mEcI0 zCRAMj?xMHH&>yB~AnB|gD3@7YtK3A&mwOZQZGI|hpWi}AwDjtSaz&Qo=-2^zqtu??N0BuQJ zQ>HNc5bo#--f!lkO%l~n4MZ&(?l5y2Vu4QYCE3~4#2F(FXLI{_&bEmzP1 zo&uUl@(x*8(AMf17;VdT!LSX5Yn#~{<~q{6T?ItaG7?!V4Fb{>-NG`Wpao>)vM_8f>a( zA}r7mtDuqDG}tOcY@lfY)F2Wz1tImuAO}lZrjV~;ri^Vfuk11i&S7Ca~_%?c^RKDgWb zk*<$AfOJz0FRiST)ggeJoN<};aa^=|1W1J+5-9;Ofp9+_9z{f;Tt0575B{BkxSG(u zsPN41vuQ^O&hw4I&=rVBA{uza)Q*2zHQWqlY9s{O%_>HAL0RY(?~^^|(3;(#)xrZy zuC9iOt<@fI*LXZU#2{VZd&QjD2jT*2S{HTb2eRO>Tg7Z=z%*H>JB}k$wGLT?Sq*CgP|%8l2#W* zO!A#Jh91tY(kw?{s_zpt=`qj#QUsS{!K^QxfFeij*lj$8;1f7lc)&P=c6AwgABI`) z-03M1l6e8)h8xByNNT0ypH)mn4(UXSo$ZD^O|2Q7>?^6v?@{E&pCxw}eh*`yz?Xoa^8UAV=Fs(I2*A{s5Vy4nSC zBegIC#g)z!Y!9?*2lhENJnsX!tSDc^_W;_Aq@}cIIs~BU2k+Jp1D>s!*(*lrl(s+4 zcjWmjVMu_Pp)4Gy0Pa*xI(5VuWMlz^4Fp09E}cWV$e2GhK@YR^MZT%EgT4ueI{x>H z1A`36(p;S&uGzfIATj}T0hO9XpzdyH7hZmair^E;dVt*+yvEcd-x~Hw7iee9vVDn= zl*kE?3yOwdO7BJaK;|>7ZPPlW8;PzO%k4(y#Lg~$1^5wW$Bh?9K<>&+8R#%7 z=P{W1aNALyPx8Ha)JwF}DUiCDU7XLLC>RrWMD!ejCaiWpF8m)6_E0SyUn?mfzAm5Y zgtktt-9tw5UlwNUi*zTA5%wQ56)~-=3u3_Og{Cts>ZD;G&n`3`=bF{_1Gr|oMZubH z0E}`HHjr=EAuN&X0Mpuyq=$&ei#R}=!A!p*gxeH|vt)IuI0v1@MsD}~Z&;Fk^0quVw?tEx34Z0=a=y*I2@if(|joJavUYQmv#0`L7LCIxG{wweT`! z0LnCXhY=PiXa2Kn0b5C~X)|;i5>_%VE$3XEM-a}IZ6oWpV<5N21gtB&k0*dG`>^6npF+YIEq%%4 z49c^}Rn*(6oS>pppO;khFv$v3CCinAGCHQ?|Ey%>hjKy+z(tx~V8N!TY!{GU6+18v zI{$89_a*Er*u8t8i>h9OYsJV4NOiGVQPMt$O8%H&Ogo%yzmqUwqzew1!2HORtPbHX zXq|{+$@gIvL6m74iZ_|5u!&QpDQLGI9XZ6W1tOl$hT$E;y!=7kYQz6|FxvF1+WBA> zA(gOY1gX#dkrl-06}ingu&f9h*U!f&j1Bi#=;MTXY$ed zG3Kn0e3sf!+m_Q|?nY(!58pMm0Z}w`3tMrt^Cp<9^mEu@wxBJ%EHiN>=U^2@+s-%R zU}Ugt2h5e9#ljH>UO-&mxYbc&v4rDn%#FnQXtkSOYY3w`TwYf=N`W+7gyvV{~J=l`XF6F+soC- z`oXAA`bG>uo248#a@@+glTcPXsiiKU&66M5YM?VrI)SuWXvisOgaZ#UYTAOZmYY*h zz3PUzHgHr~Dh+i3>CZAFDZ6w)NMG#!Z1L<*-oZnMnr#E*5}!2#kqyF8eg~XSYZ}Pt zl5ag+!UrAU0MH~y4kr6NOVpXt^3Q?a!wW%z08x5f1a>%rwuoXUprhF+e+=p(%O@rr z8cu*Lp)FL2OmwH2RWq(CILj2c91!>xNdT!NEE@d6uW*HutPU-wg&wK)B30TO3D)dx6|xIGayA6IN453LwmB z;lxIR_d5#&;6F!cK&atLG~PNSv-jUnq75h0JPVnCDE)*E#^{ms*z{xMv<2-B@Ds|f z4P_=UlZ!@7uUmO4AS4lA59DJa{zx=!WEWM5E%Q@7?X+rCzwHj)c6o)TpgZw`7b*_c0>5P z8Zcvvya&i1QcOQy@AV5Z)zHlO@4CIZ`u#6K$C$1S^-MIo_Q;rwDv*DAf)v$ugN>+0MupCfoVQ> z2tb>(v z6QD>WS|Hptx)P$UHUoA8x$IS#bn4s#Xr7GVFD}j93*zr%+LUJM%dCD|WJxT(^(Tbi z7xGPC82~a9a4g{rma?jYS;B{~nl|z!sjKiuo6H0wcM%n+@9m?NN(;nXye~D{&K$Hl zi}ME?K;|9?Kk28N(5@sJj~(-{K5qd^a$@s+J2OF5FN7g%Ctp&@6oQeD?E<=&$e9U4 zeOSWwKwU=BFSFp8JftMu!0IJWVpZP*<#LT`PTmWtaSOTZ%YSM*+R_hcsho~`pvY+e$lO!+ z5p(K(M*+^2TLaQ%uKj>A$>b5ih76`a+}uOc3U=Ir_BRyVo7=k(l6;BTYh2H;0b+^1 zqa87XB$M2y4JG4SAbgh&sixnCGH)!J})&Lc@!Q6Cg9^7ub6WWma%A2HBrM zBjh?Yh#WxpTTkE%e7*pX0@9rv9sh6j=Nc9+>edP1Z?S*E5J5@0fC{5mlzVr+gGZyb z3wr=4Ou5pg7s^6Wo5NJu2T57D1(;btzf;1Q*gJvpw??Ytz4d%~Vg##e1G4bxQ5^J` z1Rys8Oj6b<2(JVhsLSt*t`@kNi5)C9Z`&X)(}XsOHXz-otiaWu;o&Bj3&X_P{>WQE z{yL6e+N0ji_aZ25bm1K^XWecgwY!jJ7XCWS?)EYd#b`ym_5rMwv~ZOjm5(0)R#K*u zmQt*TUGY%fz8GSWPiCc0E%(w7KvmfROw6o55@5qJ|!SWdtC9(nSdaiPfUaYv! zfy^(~adFV$0>lbWD+5QuM_B&9s+2Oh*qYM`R(M^kw_PBVN(SHv@7$d!L~q?ZUPROX_h|#P`(G3q`m4Ohy)=jq%`vHh2o(!31Zfk z%@o>k3e36U1R(=Q%%3lc;;i$ey9%r!F4@o4W6xW#@N{8{$-U~ zQ5iI|y9DWa;}{r=z)mNioK5YaUWP1Thxw#-Kg*!imFY(0I&lTkr5%cm?e3&G+zo@T zLc4k_!89xD0Vv&ed1Eo|fV`IP5K4_I^+FdjL^(bZTKak>n?O{NJm}IgT$o5txd8xM zR&-9LKmYuZnGprKnQza;sq+CR4qHQwXL@c^!pYbZX%kqfjxYsPP3mNs3@tJXXekZ*S70qL=gh@=+Lio6 z#}bf>Q}-5C8^B^YsBXCiX;q7n7{Lj>1#dUN&0Czl7%hsg$TMI#(5RtTc~lu$7&25m zhjI;WhwIXo|7B-Vt^Webg_TL%v)64ftD;zyYu?FsUxK;{3NbLJnBzOnD!jh_(D4c? z1y0^V)jfv0{Nb8D@j8Lu?6tWE;8K)hzw2pk)AJ2Ke1D_ZFO5 zGS^;@72g50dJAe(n&cfMEKzh+vOYAWZb50@`&IhQ)0G9f-I4$Ohx+}Y;{y~TIU#vH z=GlF&jxLrR{$nCDE7+>Y`vj!usCK)|RX$}ZMKp~0B#68AUB=wcP;Mf8P?7YrOh3(d zueIcTw-;tGVz7_8}r^!M0arV3Y~%@`&C zm5&gZKhV(a4l)B=OQ^;wI*fL`nG-8k8uL0dAL8VH^sPQfcfxQUC-@d$y8-CRV0DP_ z8~q?iarl!ZDNgxr#2zi1Q zF|}g1yiZUTF5;LmIXz8!^{K2*pP}7g*df(J&N9I;2fR@q&@LG^In+m{$Gci)zpnr= z38kiKhY5R;e9JeR;%vviT4mH;q>7i4tjj;C6Vm0BwTmeTG}$t3MQ3$ExR7fsWhwQQ zOpRL$Jne>dCQF2th-_M13QZ(Z?7frH%W++;j>xs?2WYi+8l+En~|7kgNVECMPv$*Oq&s zw5qKGlqS$c5~GksKWR-u4vl;RH!RK4n*gpwIm&|004fZ|Du>E|rt9lc(kULDp$gOgF^x9}qE-GaT9j!e8b_1o*ZM?JpCCf?Ga{5MlhAd`UJqZz%7B zQAWIaNbnW{{St@r4|p~k6n%LGL4OD0?4HC{{vq`52OjU~z6UMJVuP2CAYIV$SPn@d zK7i4>6a0P5qsvTG4XJ>S0A`CqsYky!0g?t`=!S2fpsKHGdtxyxEvKMl`2?~4) z#IuV|ID>H9Z2JY~@hi(kt*!yVY+1f#{n&u?*NMx72t0#!12M+OY@OC5zZA9$l^FDzffwLh{oMDR~X# z36@qk-|zh11E-$MF(7)2RL=tl<$H8|)Un{Dra z+);*5e219;kEQ7~HOzY;v+l?6BRxvG!JSY1faSZZnOj zauPTJaSO21hO&sY`6oaZ<{qp(&6n<#{fp23f7PKV&!A`m=HqAxKNH=OnFY@-F)cz3 zzC!y`)3dX3S&%GW0Gc}rix5+%Z$M_~8u!7#u8vEUylb`KslX*Ls*4YOG*>4iGQwnn z8O&t}*HVT}6{QQrS=OBz0z@h@hN6SaRZaB&Hadc=RcC zJB4(yu~%W22*LcEFZ~hj&_HK?$s27bk9=wHyKDS8pgBr+C}AJIg1P>~ag5~x+NGIy zu&i$YmV=*$(I5YMRrA5oQDz#Dr4LWfhmEE4JEEn&R0xQ=Ty*KK{F~KE>?vOXaiOqB zMU*}!8;LQ+R=_Ws{wkOYHoLN}8EsD{s`!Ei0dur&1R0&eCm35~Qt9O%tBP_ncj&t7 znJM?IYYNi`V0NsnC=&aPOc3D@Q78az`XO!H+=MiDZlekZzW=EmlUkDXIOq#$F;T#{ zJVY@H>`$%`VwCAezO^P3TNAP}Pn5fkLw8Jq7v<%nlBg3PdQY4ln&P?0B%Ym%+<+0& z_*q~VpM80h38a}iI2$1&mH^xu1W%jwmS8*I`C0P125C-y4-1M)Npy~0vRm9cU`d$}JlFRlcAI@9L$yAc$vcPV=MJ`Sy5BCHA1rjyuZ{ubnTv0W;6IMpl|{ zAHWPjxv~%C+PnpI(b;&4V@C&>S=_-B<$0Hxjlwco?0xVHkJrmUbetIw2ZQ7vpz*-?ORdG@_Fv7a_6|xd2xxT9nFTmW;aFs}_DD!U# zDUe^RP&@vOD1yvk3g2}J(){Ggw(uD51aa2G<~!WD4Cb2Eu^*Jp{VqUvq2aPL*O1K2 zB@=7AyBzH51}qK0oB+K84O>`pQn?-o7lNbtA&=KU+@T`N-b}!0*3i_~K`bszSv1S( z%QxUI8V1tRQytNgRQ6}0AJyk>LL&7TS)&P&5E81@5$QW%)+Fhq=UYAep@6d+o(P z$rWi6fV$!h%y|AB!i=aB10Ib6?Mw+m9k&9#NMwvN&oXjvgTUXvcO7-YV`*Ns-43K{ zGc5Nk&f*}>uM#YQKtrtnnyakVG($(Z0FkXusO<4bsW2Y7m%M?dL*0%w56^s(Wl?m! zxBhEGv(p1VugHz?9nXc+j9rxaFaZLt$?f|+fQvAX|7Mj*EpAx3 z1Wgwpl!F|xJE2@)ZjJUH&E3o zX8J)#SY6BXHGR58i*4@87v|2UU@O--=n{b7=F2nMs_8*#a>u*oa|lm$1Lc zRFCI$ryCjr#I+8au+}v%fvQ^5xpwlMdA6n2Kwf2nA-NvyW&&-2s_|cgxM_UneN2+} z669 zT@~zgzlI_B6o`JavV;J9=Go$1)4T)NeGwXp8dxk07!@H_Ez+WDHL6E z9J2`&1>z2=*v@Q0QXnmZBG};)v~2_tI`|P*SkKzq{Naz#P?{aB0l1;qEwWK<1E^{! zO&hhqq_EF`&19WU-h^^jlEr`3JM*M^4sK43J-p3y1k%}St2|ETz5r57vS3EBnBXEf z%JLGzg^R6zluEu$P`1U{n2aPejtos}-MI_oPu_`XNajMgC2@HV!1eiEQCa5zm*JlY z%<=-`0J5+T>H`1ds86UCAh$NAAd(zFTR54Q#3JBbroeu}7Y{Q9``~DP<|-ic>A~?P znuKSUx2cBx0O<};gh%da$AB(|?0#X}{s`pH;`9`gj*~9yat(c( zvnB8|DEw!ejTg=!it|k_qeBmW;SWvbm~Wo*SjwVX%One5fv65^R%RzWyZrO=HMGt3 z8=#x=;o}Hu+3|NGzhAH-T=L`JqR={dbjO*{{WNjV;c{Z^)=+o(>2`s^GHM-1RIWf$ z1^9r}bVIm!m^c=~NOKj)EygW5NLddw#r;jYAJ=#`8>tDdh-eQGmlfY=>DB81a51)% zQ5{e&BhD<3rz4Qx4KO}{)M9|44f`{TvFTsb-Zufnm&>t8embA!i#BS0L5_jqh=oQ60c8leR0OQ@sYF3iVMG2ao>J!czDazR6tJP7OO~^p$;JicK5A zq_+^R(`d{S4lL(BvkQ-TP=2K{Ei5kZZ2|f{1uFs%F>hVJ| zYtF!2k~mMT<>r@6CL4tgVW2O-tPt4uU?Cfe$gi2O<``%6>I*O`d6!~-;}NEyJynH{ zzo-hWGlKHHy#&JlC}0)26H0QzwizZ7{{=JZ4KxK*)m!5f63MTC(Ql^ZP~Oe63pZF@ zKEZSY&@Axz6Z@?7WL9_(u)`wL!Zk2>&HT>hkW9kKE$*g>H4T`#1WTkKP$586v^m0h zhDXcP2uHeMLO%e_p^ML>`EEkH!BD3NK9dg+7g^5k*lE?SH9&ub3$)f^OT7*Rk0_f2 zYc%s2mM00XH2gn|x(7#_P?B>NkCQxFB77aj1L&#&BdCt5H3dk)3EoCWls1?Ja0#$+ z;qyY8R%S%-6SU3}H0{K;DstC`pm@ySv@TzRay!jW&#be+1ls_kox<||jAxf=B5nuV z^xKQ85pd=?l$oR(zjSV8E7Nm5A9=liHlI;maTqyYX+$V3^ahAkXoRTS$s8uuLuELJ zLBt>9GPT(SpaL4u`M~RZduWJ_gS{k&?=c5`14&ix{5-;xBLAA!-8F9^T{Fi~I>^7n znc^KJU(rDuzH*p}xi6DyzW+|g_ty>8(8K=7 z_l1xKvJ?I&E2NlrFeZmq{FJ$@AOSeQN&-#+sU#jkEL3=Qt+;)Fk>Ctkt^JUwqNoWd z1YtdFq2~}TGhrR<-|-d5t@T9V98e7ar5o$Q;jrz019QbzwN&o-x8e(U7fSRlLGc9z z7eqfmxIeR8mGv#^a2eE`z_LSZyy&f6fG!Mo1A2~pC85<7j_<1rcIT^DSSfN?Rjy__ z-3Sn$=mCNwKgr~Ajc1C>!NwS1dm$~d9NmgLCayaH{c%Q-?)o6jgHfin)Z#||Uyltw0K_8+W^{yuxr*%PHDFh;fon$`7N{pgMWkfV7MY!c)SxXu z_#9)H>q$nql;-UCqh5VMUbp3Qb{VItT~91fWUBzJ-rsEjSB zY<|VaSOTCmWvfDQIBhxMw+8iHp5>~-kh9_RObAH-48k?m>@V776NvIesNd&2TT;iu z)ojajM*n#MZO*c16t&q-1Wg;!LtlcpE$+d*9p^yHG?^kY*?@K@Vj_qC6!DtkBFVs#bBjSoP3hY6mjT_85-uUsS8k&Pg7e$^W@`8B_LKba#0w3^P>Pf8im}$NP zDq1rPC5M^8Fub3EQ}03Cv3xQ}R=cCjNCONOF%~8HBDWL9MCCXUjMp)5{+O?=&MjCP zP7)%D#3LNEm~H^Mz3?tc&5@bip&KqxuFoKd0Efra1vLU{GjcpaCk?@VC6N0C66#>?ZB60uHj*MnK7 zf=4N@%YewK{5zO>K={*aHOCbG3W()e=4jG)XOao*-^%tXh^sk7|6!?;DI%OH1-u4= ztl>j0t=9|X_S5FCw;Qg5`7>*F8tqGfsQl9RHvkZu>8CVEzn|ySI5&BAZi8|e4qv|k zFpCxU7x~y(4agq|H%?Y709M5{t-wbimA#HZGbfU2MS0l5l&dQrNov%+$%LAxAT8ZO=WCY12M0W zanf%{jpWXs=eXwq7kL3jbg&&QR zT-*8Y43Pmy{xX5~VuY~U0dV!SMI}*r1>!Gbs+H>x6Oeo>oK#REnP!WsS?V66VLyNupJ%IUv zcC0RHKgzc-!SKls&>kkHB?IMb#WARB&D|n#Nx?^uQ2b*434~ejXk9mC!MIOAF3poz z5}!i*Xb%62dmeSV4B-lJ)e-ZgE@;}~ z*|Te`Q+T#KvUEgQd`}v7uB*^yM>s-BNP05KOlXg5nFHQbC<{mzrN!nTSzga9nzVR$ ztn>lVX_$cPn>V1`2V_!0guwd2TwmmAUiOrmAQYNF224A6c7^V+|IK_O>4~s|){`b+ z8@Y`_`wQ%}YR$?!4KP=HP^!KOS#dCiu}J{838!N4Q(I6UdH_h9Xx_`}1j3~lqw}IJ zlCI;{hO%&CR>%-b5YC^GSyzd*0ZRUiE#-*FT4uo^F8ck(_qQ1A_{K8;ILDC(1X2am9rQZ%f^(C5! z^1lLc#$5d*MUk)QPB#*~25<#&s=)U+xV{G{2FCu$o6O>F^oxCHe?(penghSh6l39e zasX{@dxr(jJ193HrXbcW{u9f$Ru%7|+~&+oiy7w#%$1iXPVAR{0CF{Mg)`@ICeb<} zI^stl>ybHJjD9QrYA2v>P(tKO(|<~Uz$22d)6D7~!)~lcKLfc?w-_WH`{FF$BWLY$ zeF5-nK%>5wzFM`PgQH?5Fu#1|(KSanQ5BG`L^GB?-;%E5dB}X@->GUK)BJilUUdo7 z+~AHU+51o%KyK;F{_KV*Q}~-!c9SL z`$1h)+%G8IO=$D$NA=7BNPmEsQ!?k(05m!&|GJ+?CMHlEJo{S{HB<%^4RG%^>VijC z;}`jz^4cT_Wu&vCpdLVY>GFFQ5(+lOA1>JPoQA?#XbS`-dFqp7Eie~$L8JN-qzgOD z^nih(oq04vy{{#W9^&I0&{T${82W-|FVL`?;~R=jkSJ{eQ`knhZ#;*nxbaj~+4}wx zv)=UceF03l=(?e5wjtfWnGs5;)KV`2VHn$t>H#}Y&bg@&Cnya7iyU6y9Q43eunR=4 zZ24gFKx>#}U2|8bw)QfI+l!C2$@d1pUtI}jhA;~31M*e6tF+=?}X;@vW zobRAuzO3T+T7;K<4GZGm|oAl&vfwse)-alSFm5zBDh{g@EP z*1S+U0iu*TR!c6QOljX30X&70gg;UPe$IEg2gxewIe8B_#33Ed5#zxs>6M5MHdjWG)2TBRrW%e;`6 z3lqzm0AAFzLq;bO(=Sz>Ky=$~J5Z)Ey)|j4-}Xg(W~Y z21)=80%`URE^((83D!WYu;60&`J$in<_3RIFA7y>@9Y^AGQdidM%r{P)!`EoKL;^8 zwe|$q0&u;@?c&A+1jrdG^22t%!TBCd^IihDXe??XPUH@Vo1|Es$@dkQ`8yG2tKEEy zNZv82d=28F49lMiO_G3yUGjJX04L|E$$tLVWMzmrzXfo)u}Mf;s! zZPSepc{FG5EaOr6-lPfBe+Hmr?A+nff(C&!%j!7(p+l0U&V2IR$(fUf z#`~Io4`b_NiUD2t=}_U{oF2nWOm;fDs_a`fX+YD=D?SI?ng8nEIMfF;b+L`L3xbBY zb&DwlkH72FB_w_~C^ezq#-#8nr0dRNI1KSUAZ{+LTSKQ`11h7_hW2ElQoVqdy+O7h zu0s_a7UqDy@BiLGYe2aP*oncTS4)}xe1qt7IAY#}Hd7Q#i39@x2nkvO6~S|9B8;!< z4fTB8Tlk~UrB~`$bpukJE8I<*5YBp{9r3Rw0bC(@m@sNcC?5cd%Zx(gKb1);lNL#4 z6U0(ave-jv0qRq+R$PLh(8TdVGw^Iq%8Jcm8`S{hDTKrImzn$qFs&x%muEb>Xfkl~ zn#TUk%t*!J!_ML|_7~ph1QhQb(djZ&*>hkRkJv5h3SGd+&EH%{i}2`1U)A;R z-H@*KLk{X{;sM~&Gk2H$6S`=fuvmMDzm}ofjx z8}S!anhVdYO;Gb@8H~cvy3Z4nDsp7SEil-@K*?ZEZZDuq2V9i92G!aI@&}6TGy1|y zP|2>`gm`Z!-;;YvtUX>O^st37O6p@bp{403p1!^YGOPcHse_#M0Ng-0hd{r{1mn1U zpsMx(__9>H*2Hf!J;5z#-~;HQIMI!&)4R+;y@Jkl=o}D<1udoCLsnXv&gDip$}~TT z2_GPx>$t)|(|X4slyi-`#R;*G(B_;ktwCl_fJ*M2Y6!06g z%RI&xV@spsFRR?JIbttYu`hwSpfloUC#2t+Si|Q|=f*C7e~SP#T5h6uz^H6E7^8Zi zDkjU}G&)B&AZmd@k5;(~;d%{oLk`LZTJ6kgMZ@MbNHeqbbXI%my_volQ3kI=n-6$@ zuqUQw>vJBo^lhE_zma5Ha?lTn!kun0B;3ruFb$(_AYI%hF%l7RnTuW zKLhc1q8R@P{jfD+&v0Y-SYkCTLMS%aw!4K&DioG_g>_)Ih13&eF5!fpAsq z#RE@96Oxrb2JHqA#T}fFF4;6i)6~bAra)PU8T!MX{U9?SprqCC6p)7kwmaF*E$0qq z^WBFWbdprI5}9TVpn2R0jFhrWS1FQ28~8GKu-K5fvxdWX^clYM0?2iZkT5jlwqKFa4C-aR5{5xRJ3y5PvQf3> z$kMMs5y4xC_AZZ9WIjCMUqiS=KSaRzJ!nb)N04R)haB;tA;kwEzI<0_ zvW|IHYs5(K5yHiMg6h=$3aApWBD9(im0%N}KwVv~Z6MVtbeV6#ecM(>W^~>*Wn+p+rIS`#9a>)CWf$>I9&Msb)6Ud#xviSM`{f39akc6 ztc}b!cy=v_Por79^hsihipMr1QOLBj_XKw52U}{VCLa{hihOKg*(G&6)1CVCcFZ#L%S4Q6&|6?`as-b6X^W773T(! zv-R3QRq6-x(7|e5p*U|kCwbBmwonRV0ED6wmW6#N^amuZu@J5^bqHD`0?d!{>@qP1 z(VZGe{}c=VCbU1birX2in@oTyqfXg8$Tx-v5Xj;t6W}7;Vj!6XfC&*Q4`l&W+E?K% zSnI;sB|tc#6K6$Xwv8al&GpA|N7x#OOF}v1RJoB!II9-Y{Vb7H1xlcBn;@>eS657I@DQE(wt+#4{<)WkhhiC>^`BFDFoE(?oP z0_-y-0kKFCj<^*)<{glk5N%QoUWd+#qQw;ZeZDxM7$aiS5fDwlTAkIR6y=9ZL_S0Mcjt-NL1>;`ekk#5aI zB!X7~{hdcEEc2s^J(+O43Le5#Ci5FyqG>2|Z@$VF%_4`-z^;S2^69_kOLc<25jUOu-vs)484yp{H~| z6%&_yQx;(+QeX`*4YY`NlShpr*r`!H5S}rlEh8lL0l2?3&RL)(DU>O{#|jMA&gTF4 zEa8Fx)B>WOEPBgyZYk3cG2PD6Z6H^Df~YY}STYIYk~ai4fZU9=6~$#^=>uYZ4r|uA z3F!$TBRO*IsI3(6d1f}aR4kxdK;}IT=9*%@04Tz|f(@S2HoUkE2uBDr3GuQJ^Yaip z5H9bd@Q8f{jo8BzCM-O=**J$;=DDnWK;3LQcI;RsdtmxDfG!$l4*5In z16YN8KbZ)N-U7S!&CorNl$ALFdAx%#uY8L(6BrO{vT-`|d#I|2G}H1z+@*2^>bBL~ zH|)9}fLxfG)(oheOoO%B_JNN;u9@Z|zr}H~6EO1!cNW@zYKwQ|C{Nei{El(JN zW6DO&Zvwi=%P}kuK%2E3AuZ!iEz@x97GBn&sn(dvJj%1b8j1|P)By2UhgVpWHz6%d zcH^>9XA+dED#{o0mP}F>pTW#2(4w_8^Hwv>0y*oNdUy-cW#=*%%C!WI6wBwD#BC@u zkdnzdu;#xP*9hLroF!i-?i=iT1}*tg#DUFx^$GiZBz~TW*)Y^B1zIf?AQhl?BO50a*eRQVmW+f7^T42?0( zI0P+qr6;c7B96KMb6MKN&sJYN0)dx{x^eIW&n^ePf!eV+1}J)ji%p_xAA#JEHIyIA z9q3AMDe9;EdpIrtKg}El=jR<{?=z6g<;zOd-e+J8UNz?XUwA|^2>qt!J%{q=#=;Ez z71||wNR8qa>WfT*O_dUTgD!n%Bdq)yF+2W)s%pAP0CjK)z!`I_AA#(Irpa_oBf7%G zTI|wvK~Nw9B1-nIK*6tRdH;6v>^ufzQhyb?>a|>sXtQ&VUm#-?1V!OI`{48|q-vIdL-1Zp9Cv-A&4k zFH$U&XDZ)rI|gKMWERxl&B3qGMX$_p1S9<3 zo%%N{)Zch?JL9*F5NSiIquXl1;Xf}y`D2e)W@6;+1TklRj(b`zL;DNbs*W#oGh5I1kZQKG8H`Uw#aQ^Hp(gZ&1(+_F}#LN{(7N#R$eAAl-0}vK*+3A^4xrf() zEqIu23BpQ1IOZgPG{X=pl~Eqel%a?qMk6(VG`MSEYv$iSGo)F!B!ul-J&ZXCkU5PI zk^fXC;W`z(4#{*D$SaB}j)BfAAfYY(Z~<{}S|Fl{M(ia(*ETluqFc8!;n>_#b*sY~ zpj#b_cg#LEGMSGT#z_1O&^)E*L=0}p*JdU~r~f*)8uP2?2@<16Mk#fGEkJXrxRA5T z0s1bH8zD~HU^EEdz?a}91kE1T0>1&KA?NU6+UF6OE4Saig(_W-0Zd5`K=|OwD$|U2P8(O?qE?{z zm^Ko)CRlS609WltgorSsaNr2cO*|IcHy@z=0d5w=?1+8=F^@3QdT9L!WCb^@IrT~A z(I&7mQsU^>ZE1jED-R4jy7fGOS4Mv8`38;`to#s$Q6P&DK9*&Vz5!NxI;v*D6ZIybC4FW+f<{dOAksg<_XnBD zJ&r8<=)qKCB@!xJob@Z${|0TsBjv&jhkPwTxYz%-zN8CJ!q(UZEy@ay4`wUSF2U;J zhRkyt0B#0u!uwU5sn0-M8?nTW{hMHXNk{5f^5iS%>{zvLfm9kNe7!<6jD8Qz+7Txz|gOf9%TOw{Q>P3(@EULc|`s-P?>LNfM!|5u(StA zkuf%q;thn0^27AQ6?*4>0tzaQpu7b@EZFGR8IJ=fbA@#;|iFCa#G;Sj(bU{)LK z?;(pIFF8CSRC92&WWI~eUjG}RTfph`miZ31AId_UhE{0u@=74l-jIh@6%J>3| zB1q0@`EOAE;*VkT==cxgUwX70qc1`6?-HYKm`*yOU7n_byI+QM0r0eDz@}}xfU4mT zk-+vwF|H&ix5@<@x4HrSW!5d!^(r)6`g?yL+lU_<4fF+%v^f&MlB*YjF<@}`N7Mw* zuA0UYT89qT2j=qN3nO0+v;~k`gE+P!+5PzT4ww=OmEXG%Rr#)kuMA_Pat~@)sM3+KYfWX06_HOV@ zHP(p@hKhi2HJC_aBqr@*k}in3U9CCyodBRhN^9P zW*LCaafVH~sON{wh4CEi07;wPoj@&Q;)aZT(r_n0RG7X5vp+$&!n%U}6pH!{%LwzC zXIDkrm6{!$`QMe_=(Jz*-&sbp7XO}un6bLs1K=w_DQ9J$yZ~~2=9Yiqe!CcJz5!AM zwzZ`&JN~0;-oUg^>uvTX$m|lZdZngBojjXo6S{fmGNgL~!xv+DSN=UHrrvjNO>*aW(^~i!1sH>DB7Yr>UEx7wCUMj z&=r|n_r3^Yn@fbp^Q zN8BrkNI!#=9tnN0nd#+5;>#|cgISPA5acZ=K1qOO79l(rXOy@tOO|zgbsOA8k${(J z{mabeKJ43pa*anMo39|<0JSQf(r%`&!O+)GWkR#k*3H|o!?KsJJ>tr(7_8nTG{e0> z;rzD`IT;}zU$h7q%s@w> zJb3n3CVUf4OfCS+S7f9^fdH79qfC}RLKPvRMrw@{5P#(d>HK+ptcEBH+{`iSVc>;rwcGw0z;5Q z?HfSVzF`THDX`;ev?yIVg4|q!bk?&a+FK`xvmxk}hNH`VgCT+Y7|<^HKR$L6)GGjP zbM6bGDP>bY&48*=Vp(}L-ijm6Wq`xafr_qk5QUu0x~Ao8#z`{OgbR zeuBOMU5btep>_oNLCiaCw{lGfrN5b}8j6)dqYNZ6EEhB0tAS8uP3OE~u4fYYMoS<@ zodhOe>lIfTnPg7pYw~S^m=8?TP_|a?lldZJvi8CsB!Ek%lKqaMGY&{#9GU8d1@)?NPM%Xblx~%h? zV3vYm0s$a5&jH*pjKsXOmD&7=Q8d&{rXY}y)C)9y2t6&ierXE3oVd3G3P$RZ$0nU81PvD3sf<#wBU6s{ufL2qJ-= z;MpJKEHsWEeiO|yLhpZIYQbi#DJT~P`!Qvmg|>WScaQC*mXRwI0|g~~C4JwS7% z&OMH@eY^oN-)!XHhjxppqu3n%7R>c&N_q|;`HZ7byvy`Bj;#8c!o$Q|a;fh3ehFJK z0FO?()CU0q;o)Qsf|IQ7F4#n;XuDC!{U=4)oab&(oG zF96(ga%bmTLQHF!ijzafe^ND>;)`5@BJ;&rS;jgcTu2t?>)KDf3{tv-)=H>^Od`($ z^_VLl{wkXTI$p*M0tl&?#)=Ph6^c*TlF$P|+p$)OBZAi;{lziO&-h|5NYx2qPxU&0 z^XEJ?s~OVtITPfprs3xXq-7*x`iIX!KUl?o9mcr{T{R&ge$Wm8saM0wD_N?6(y+1I zuJi1&(T$=%j)FMTM(pl3G6iv{B6MgIgytE$gLdH2l7xAvid^*o5MD6JgE3POu!;Gc ze3xdStmJO7YGU^LD7GwHz!vslO@WpmiyX0=B9+hvaqZcKE3tIfz+BKr;m^DQ&6k+m zNKT$X6^_d5Q{7=RA#dRVJqNNlI#@(y;NHrla{i3@(+eQ;QoC7?sZd-51L~48+T+5+vnLF8IP&&TmQ!mp(v32uNKpWcS*BhhfX?#wvxL)g64p7wQveTcR*UO z@Yhj)_~6uvO$4uxp?=I2-x*@Yi z@{+eBuYy`^bVgY_V-d5c2iUz!DH%k?>}v^Xd7$+kyy^v1S(&jTh1a2Ah0IUr5eTzl zLDvM!SaTyG{QTy_PTmiM;N5A~Q6oINYBjluN7V+v%rIT8Odd4=H=*_{!$}Mz*YWaK zE1N~5;H51%v=X|r0R&STaZeOej!ZF6b4<&YG6_^fN+CvKzZ!8)Q z9!rBDw+<)s1iwOm-xCqS8hgX5jB zatXrijQf+F0op*6L03A3N?L<Z6R!H0ExEd2bRWj>sEGAB2I+$M`CChXK+ zS3U<;{|o)Jm9K?YoYqP&fXo7}B873=&9PY5TVBP_3 zCeCqlN6dcT0lC!o*ljt4G7F2(@B4gX4BJ)2jXwf%z1cm&wEY3vh3A?kn!<%S26ZRI z2V#zMT@vDtfUr~w8Px${O@gfjy#eV@|GFHf?oI&^aH1kI{qXbLGJYv%`QF$==2>6z zJr1B~Tsa3Yqo`p>-&YWqr5JW(7QM)1Or;$?1jH4?v6zWb$6r@+q5S|hobAwc3D^ZC zl_mk50A|1=7I(Pp$4d5cCd18Dflayqoc(aztZ@a}(y5cC%odQTZZPxYp_U9+AxnEL z&pKd94-j0r_tRap2G2#{H?Uzc^8j$wb=*`LUI!sVMfO#%>w}~L*o>ur-7wLn$e1=D zTpGqoP8+IXH~li=Gx&&U*#M;3h=UH^BcX)3Y~ubXo337_;Zl@N{LV z&1!WE!k=fK&J51OLrD&q}z;1N*4Y9o_!=vA0%yY$?4`4JM1GL7JV<^wXG|fe2EYX4K z1s}m-#=5MwCp;Hsu#$v3p8(8oDj$ZgQxLNgH-<+M@Z>YlpOuGipMlYAOrR;;7YMWT z*Kkrehvt*C5OWTm;S-V?^|&ypPSIe|{VmCR4-xl{|E$Vr*0xV*mlxfsO^Z$dccN(~ z04z*2E4!R;;jJfC0qsgXouUn3tKoH%|(|zyLFQ56m2x*&CihAP@)y z0)Ze^2m}IwKp+qZ1OkCTAQ03k1OkCTAQ1Vj{$F?V)vIf4H}4V4 zh~&v@tt3~`=~eFKHe+1Uq=zxF>#tXP7{^SDgr>4y?^s!Am@;8EV99xgJpsUac9HBJOe>VXocliYa(2js zitm0w@BGpQQX~`kLw`g}oG5#YN5~ZP=W=yPtN^5UTzXG@hNI`L!Kf*u3Mfs>;Y+if zd=U8I=Wsf&i>%Uh>ErT6$d5uWMZqw+MI@=r2 zP)<#Yp>F|cy%X(D?K^Zj?feB=vYn+;9^Ut8cF4;k?uSI90mPi@BU-*^TJ1jd&@*$F+bzT{u`_3Um};$H!(u8wUI`-bd#XIUZTH1s=ItF)_@8~)o7`&?IO(rqI| zzi7|Ak|wBB>U`Va+uXy~d+AN<5nuz#aJ&r+YwYOXiu^X5o&Rs4&Zx1>hd&0@wUQ~L+bX~vM*8`8A zTgLtU{qM1XAcv-(*HmQ|>l z%+qbxe5Qr?$!dOt0Jwain(w;rRFg-Mze-}?fTRizjz`Yo7^lFprz^Bv?+v}VemL!rCSl;^Cs9OyLnI#6~rlDXfLX|wo){{NqO#QPy&*i?y8 zQb`i=9`%4nSf=6JV?b)RRDNgF?g=ElW7-bI(@<{Jl{v;UR4U2*tuE&EP44IZNeitt ze>^YHk<6vj<>>Gw3?uAr=#;Mz7?DT8$q!^0QM2G^>--H=8y(Tx-ug^}95mH~F4A}h zOQti*LUf?R+~cKPe?X>go@!{%OMirnq}0egSx zl1fqA(qolj-kFusWVUu}y={q?P}l~GNV~#f&!Q%lSKGnqQWGod<&bIzAiQ&5Qu;g5 zvh(_FyOFo6|83DpX6^3r8CR9S-vdZ3Ty$zDU_?m|WK*M#1NL>`$R&r{q)CrOoHLcG znjL_pnRJw<7fg0Uz1r@`6eL4#xa?J?EYm%)(di`d(xaA|Qk`V3BkgX43{OG^Eq7Q& zW!Rm9rxBt=33|Fi^$-s0R(`1TCW5bkJu1WfV4gl)!qMkU^$y<%y*Xl-D4- z^qghP)&6JXH(3M8C^%SGUyZpYZ=h+r;daMO0P*CslwQ)K#6U3f+-+#8;wAefDlJh1 zO1UR_9Hp6{jmB6++(V=_oj2G4FUH+Zfcd;Mg-Atc2ki#MLrBsaXA?%Wbh@tXku2zx zdNxnFKLKD=vn(W7`rk+EsS2_MkQ~mgTye*dVggAecsQ|#sJ1UURHhv5#@)*fF?!4V z{R)&ZP}a4zPuax9YjC(*T3_vylveWwnta(Ix9;~Al9G>^by*p_18CHE^Co8c{~nO9 z9{iyv@t4z;np{5iAQvS{7JUMw1Gn|qrKWy-?xE--b+GgWpmSW&t-ksUdk?pZg5S_- zAta$tlks<0itZ{&bHTsiKQtbjeM@j{L?sP7_L^i9I)%vdz3rCU=0vfYnm9_#ppxXQ z0b6^pTOlg%|AW`0u??9t&#zRGx94xW=S_d#fz+!U66rxZL(KATVx7|?t}K+ph~1qg zD~QNF=;S=I$J-0h%_;39iSLwRW^s}nL8gfg8H7?h07zf)HUSYT_sJe*stjAEkSVy= zA@%?1{N3IelfVoz+0T@BL)~w#M=;|EaT1*@taI&3V8$ts5;~C~Dtv3ZU#v(Me5SC! z)$YP#LUCwJ7s+33WJ_}unR=|Rvd1GH0E$r$Rt@UIRY>OJqX$U{lpLE6lN-KMBS~wi zn`peZ^h4Z2q`yoU129c+8wH&wne$ zzX!fm^3S1|50R0>ZHGmUI(1A>RmL6Aw8VmI`89xc zLa=nAwncrW1?%?1Zj`P=y=oTed-7*_?$;ytqK0&;xum=gqQP@jXFnpH$Ri3mOgj=g z&_QXO<5{c{ngq&|XUy8dnnER6YJ4+dnTCWRv7DVj4~xNz)j&hXo&yh(r&OAg$S{N= zG(7Bd3Kmv6TFKt?X;2bpUqGBMpp$(a-_%sL*b}1hmV(T5c^Q^1fZCQdN3=2syxODl zz1m8@1_?(GpP)+g|JmQS9(f%SK9$jq-J{Z&ZU9p%cP7v`d!Vsp+lO&YeygWtNzgj# z?H+8xHoWcin)L1fQ)w=2(hPS&=@@01I(oC>UXNW)higcQeIFQRH$z|CZf+2Ds#DjHACXDb8A-e`edeB##plCO<;7 z#vyIKf(E%5%dc-8a4uY1C*J`-)TX{?vf)2=PP7x?jp!uAQD{k$O#rP^_e6;qM3ie+ zuh$8Pauuc;Tj1fJ?yjCC>bLfkN=3knZ9R}f{NbMVc3A4~O3=mTsd-0_H%&lDXeS_C z9Y1&3G(g~Vq)x$WLfj3hQm$Pz9QX7TR?9|?)B&Wqf1m%mBd&dDjnLfMEB$_C$&$Ji zN!dW9NOm=B;tDVcN(ugTo!X=>Q%S*aYYIR7EG8>brW(NQ=3On zrJi3G;<4}4v91%T(v$q@(L&-R-^c6m?PtC*nIKb<=ZLiGpQfF`Oy)&GoL??qJ1-Hc zuC%p-PmZAVq`8&mA?0=cHTd~n(`awd4b)6}#Lt|&P~Mrg05r||1qI&f zJGG*_EwnB4ZJ>y%^7&4gjHb4|Co>p=?MO0XCngCA->K!{8e6+MIaXW6yMIJ=)2~EC z|Nm%nyN*n4M{6e)+?Oy6bK2^3KQaZaw?tPhL^%eVLS_c93lh_) zaA0!%OkE_N0gMDD+n)=yb2|r1W0rkp^@o#?WXXOeD%+`^l7)C}x}5$IhNiJz!)5`b zSK3~z<4(c?ohENiL2KBn-ZC`V9bAa1_9{e~iTW~dU+e!@2dD$;tB?$IUcVIYI$HI4 z2qz^@;w~f!4u*60dXUu> zn+jU>ep0Bjcq^6=_24Io(8JdKwOYwOf~4%@UdJF2&`F*dUTG0e0GZn2uf-+n=I!fg zkI2qnLeF|c&#V_8pZ5SH3z_f&ZIsW^*I)LqH4!P-D?oawyJ}R7PEM7(jW?Y1)F3cxB=B4V9Ej4KF?9J0!I^*0w?asVA06a$^V1)7vDm382Gp)J-V( zPIBW^{nA&r^x&&{KVU0h*ykQDLuCwXgAe`LM9WsE&12i4Da}mn^6x+n3cqW#m!0m2 zOiqAP<5||3fL%S>T-{mRjZCp9>1}S`(_@q^JRa=@XoKP0bYK75?X(gt`|~$JurxWa5fu?z{j4wn*dtW#LjBZ$0$fO1rc69Wb=2JG|{cdBZ=Cy23oEx)GIH%}pJdB}&j48sjx=HzTVt zIq)J#`hSN?pWT}ORNsi%hDZ}sU#Vruc1TKIc8?0T0}!^ib$Z(i+}We?qT5`k@9Lqn zCAWLk-n(HLjt6avk+;SFJw2+e&&I30J(#Fvpil+d2Mh<6=S^rezwHMN1)*WZX8Qmv zd?3_Bn?$QP6F>P+g@|duG-9aNT61qV)1$hbE$SRP$^W*f5nC5N2@89;QfPiRPC<-F zYHF48G>Qbbi%QM#o!SnD{3S3JfujyJ__4v`bxDHcAHKV6_rs}CT_-|GtW3*<;cZ3=v@6Fo{OCv&YDk9#uBo(LbFK+<~u+PvpK z?IElXu9QR3XFbFP?|~2y=rmGp1{C98z(!L4Y;6daFnXDY{6;Z6U4tPIY`2|#kXX#XSH#5-8YininTJ_*!gpCzh4Ahp3^%HKzydWBPGa=~}_ zTz84u2>YDBO$Wq6VjY?#6nttq#xRTGeM8A=iN{KsMgFP_%1XZBzi&BBO%ek>Q%Gv# z;J|lN{ywfVZ1y>H$)$@U)1)mx`B)j&ZP~4;w5<)w_CDq|NJ_lwII`UV-VRE0PuJm|(I*%SGj`KH=w7bH2?N^sa++zm`=+qriWKzm^60@Q3t?d`N-i{6J$*3NRt z0A#YR`*b?Uf&5=yE^0tdB2)Nr+Zii0mra3^1I0* z%x0)l>&TNZt^2cfI^|P|EalscxYNiaKh<{Z7SJ_}`Mfmz5h)JgSDTf_e*SlhR_#_1 z$*N>Vc|A1rVX0!dT_8iQ0tOi}zuL_Ou0zvsb?vd-4&8vrSv5quiAc&GO&zAXTM1x3 zlJ#vwvYE}}Qg;B=FW5?Ah=QwieGpGMX&kA(DN8 zOi3<#MW`}*2}y6X`f2W>EJF)7(wGoyvaqK6` z1S)w?Hw&cA{okKuzNN<}73<|d4BiSeJboc_w&nALv9;Z2idnCUwWm|@Xa_Xi;!y1w z?L;Q)y7RjWh1s*U6xr=NC7&tdW~u`q<>KSYNZ#9NccHcDK6Le9J2~>%-~Xi;ihTf` z5kWJo7oK2~iJ_N|5n5BI(P%C8LdB**=}S}GH`k+(Gm!Kp(%fpkoh#4vVAUxr;z>x# zTD}rUjnK7|*3KeNqf_l)Y=|tNl5_VVL20Cevtu6A5OH%X@5R61c~9(Citg4q2FtH{tOw0oLT0OndShHvK{hTRrG(`H31R=kOnyBK(Ea# zuX-4=yae-WK#J28y!dQ;1Jq$Tk0Bj=hjr&`Pr$H#*Q3<=ou`+pG2R1}gdGv)en5;? zrz1NJwfy)aDUaY(3x9&7Lf(X_Bfrmpkq{_%lsbaMzw}^MsMW>40#dUhS1!+caG9V0 zrJS`}UJh@+L#he?PUDx?hW|9`dlTccvapkl5}8Fi*F2_?pQsBTZU=`2`mhBIwaGg8IzlctP{HEJ;BS#=wTJ4XObJbO3jS4d$A<|=e zkw@lNd+bBbJZqcqIwTdJaj8m>8-R?)F)!RZ}xz{@`0lyw*YBNT2f6}RlD0j zRcTKKlPf)rd%!i#-36pJQ|;=_J#>0}xrjCj+Q7e`MAT?j-+q8fjVJW&8u$+(q|dyj zdDQ=;j5XV($B<-U4QGr$L5DrFrRhFJVh@kp5_*P6gT+1$H=l!sh0j_uveHG>>jhZ7 zxL_%{@e&oL+4O41t39D8$6~w&r5x_Z%IghUIZW7^on+}Ttidm~OPB9JipE#JKJh-m z+d4&RACU6Ll7pJ{BTCxMk6KNHpU|lXE<{QB3`!Bl?X?@aUpm0Pt&R-70>W2)Wxno2 zeCt8#3bd~H9e~k)(Bn4z%cwfL{_BnVBnaC0XvaRAd}n-+>Wyh62P{RJptrO=k}abs zGT-Vu+*w$u)$um;Ac=J({h-u^?ZA{!zc^R^S$05DtHab3wc6Q()LCi`hg}_MF0_T& z-6OffRx)W1Aem#AdhhK~4maa10t2Ec&NScT`+NM$d28kEN%gA)^oKsd-in0n;-`rbWL!>?o1U3CtbaEbi!ZxN| z@2IOG?LnFfd!xs#d%AQHbjo*ljL1NZ`suI-c^ed7PL@yKJIJW;bEQm|LO#f!y9sD9 zqsZ1TNF`<&sLk&q5g+&N~_I*_Cr{@lG~N-_~j8KWhr0$jhT-@>0PCv zW6Kjj3inrS(LL>n=zHrW?w&ymkeNAxqeq^u8+1elKndAk8nG`C;ll|fe1+1k&Ftc} z?{M4pCyu>AC(r5HPI!wWZ_3g+lgf*byaR()N_ zUmFn#TaO_s_9j5us3`(%A+;HnzG-!9>0FNj3%aGl%LK!qVmC^TTwu z4VsLm>QW#X0!T)+K&TT~Qlw+EXKkpq8|^z`sej#yD1p5Tl3`pPF$QHrio4-qUR`Wq zz(Sya2Iv)H-oLEIp&_QxEhIO_6H}O~O)Bn+5&m

!1 zH_f1F8l9T2vf0oRW&kOa37aHpm6`(%g}=zLS*epJd$_VabJ$cnv!{Sz$GG(+ag6K<6r(@2VSjw z7nCM(=N*?y55ET+#*mi39N&)jd%$rMXzoco=;=)?I)bSQ=^-eLbH?4aT_5$-CKl_( zCG(Vp5HMAunANS`CxDV92ZXL)YchdRP9$Caztq}1B+fKhN}c*kMESqW9y|6yaj}Rzm^X1E@8DA!Q)$hsEY!q z52&!QY|CofhSY(D6`Tjy#U*zjsoC7+Qky>aSi@V%2Im)8s#R{@hi4nU0y9X)ej|B$ zGR*4;*YEw`iv;L~J*~_R`Ik$F*@#S@{L`w#CUixbyYiv#w+SPo$7PJMKg3r2$ zS8Ux131h|O8%1cbT#uXWNK%HK$x|igx$d8 zcYg3Rvj>twTfNDDZ>MdN%3)uRQ5Q@~`0npW$+$ueKvL#qt_^kT$sU9mMRO581qm-` zfM*$lX|?H&IWHNU>dnAvDsv8GNrRBCUhB47GveqmY(EXhD5qelk}dN2y6tm1(J+Fw zWOb&64xGK#E?F-E(qB2oEAJB26_EaC_or<{to9J?f@#%q4TRUt>gKBNFt;ugX}Ah- z9T-;TwC_d_lpfMl@0&oQ?~>)lEuZP~Q*{V%yOV8!dj}bgjoxx0LK68`3fMiLHT>4~ z%KN@koobP`_w)c1u2}Av;)sjHJ+7lZ>VIBZ$pxv$kQ9BkdHH&Rmb%3zA>sSq{hNP_ zooxb-P{=mIGk>MwOQ7j3&jHC+leWhgUi2Urw1prq0a|$JYOUg5A$|y)vMXrq{JKXf z>mY@H(*ep_?IXPf;NAH;cTmQ6$PAL1;{*mOwK=v_^V0`(_`^$HD*O?Yf*P4^?)%gu zQ3d7j89nsab~CWV^_QLy2fI<6zk*a}vW@OovS_2-H33Fg5v=e0&fpUw>VQxve$Pw*e?H&ZTmX1 zG)GU9#C}N1)(lx&8V6uW*1903f0G@WRb7ufSlNJZ&V{*lC4L&2eo}rUn@K>m#|${b zaCpM4`odfX>o{?EeB)#Xt-sGMmc1U;1DN7bPzI-qr(r`6UZKCM!~!IZXpuk%L57Pb zB5syDU>0dh96)MlAK2zh$qtYPoN=gxbyrbRZz& z?*j&gax^d)3qD8!^R)tfm~UAUBfeG2t@pZE^ca;vmuH-wpi@HD@l8c~3JG7Vd9v~P zEJ>)=acabKR9IV&ugT#BAe>+|XL_pUec7?*=cdVvSFq%DaVf7+y@m{$EQ8Ps&51BE z0*&&<;3b#f8pq$lm7R0@a_&x?pw+B&V7=@;D#fbXn#%bh!IhdVp<;eSC;M`6T85&Z zAgL;M@!4wn+ykvHUXJ%-NSHnL!|T~sV5)q)dEF}Y<{M0LYF`-j9hIsbX+EH0(YfJ& z8nx_^St4*FGOaN|S0G)`89;0gY6~r!O`E|($(u0&QPU$bWFiK)qQkQ0*uOawuTinB z$82uEAlo77TQ(=CCOzVC20mP9*x4hN<$uixyCAC9Jkk<7sBn96ozD$I2at|^u2~D! zc8p;~0`-*YdQCBEA0)MQ)z+}vj}C{KVAU;}8jA;h1X{1`CwnsO1y;xuBphxYF8D*d zU>2F~iM8`PuD%4F0V=!AfznOqdPLh(EVv>yPeRkA)je!biFXQ?S~_uRx8eY4iREjg zo*)a5@UxvDEcR3uy2FcQSi0FoyA`b~uY!`WX{}Cn*Ls*bCXKVP9%gNYr!1Naz})i- zE%JuX6t=l0XldSrDci6WRD$;wBxRdkqNv^Oe4kB%mnfqR+EjcvD zeRK+ZDZX_c0Fw5Z@=@>*nGV=p2)9+}qn=K+5BY;m!EHyhlQK&kSeR;JX?q?|6RRvz zTh*SSlbLlD2^5km@z%p$ActX0d1*@POHi_DR%Xqo8~+sXxM#Fw6iml~FA7nM%^6&N0oLM_#AJ=%%JqwnZJmew^QGjgeV8~*1}B~Hwe zH>j}11@`4U=dlT<8?dJ`iZ}NFv#zjYqTAAwIIMJAQK{kJo2*ur+kmOzpG%jn=}y}N zG`+9Qo;v{gZoTWX(`Wel+ZeHeO4E+j!PRbL8iB_b3GE48&c>RKwYNw9Sz-3|&;~m$ z?MH`8Q@CFhIsi%E7@v1-vDCE54z&Zy;?fi-%&%`;p%wjew}3a`sa_68#TV4@VccEEiT);su>bNWNmV4%rr zakyl2yT_euZ{$@m@AOc%#@c%ccOfak^#~_m;r;&B4LYRQkY-8Sg!1dg~ul^<=J^u(qJ{}2 zC@r0~j#q{c=x}7FZsdJLrp~HVest(8J;dSKKlqF^u4c@9UwlS+7(0K#60_|lkR0Gt z%i-rp+x!Mhv;MxG_oBY{U{>`_HfYxkdq>5$#iO_z(SszrbfLBdHvz&&^*#GX8wr~e zDUwSiwxBi2pS2v>+7n=vT(?HH0a9oaZW+`>+-(Pk55Jd1&<^Av+e$<5{AQ`lJ3D$r zVt)=%cR^D|XL_~Cw;K>99&Bciy4D_0l0R0jp6o@Y7EHWL?UMLDSU6NWes$8i9}-5M zS-CdaUp~;mR;uVOlfdM7$O@|+=}v*P(-pefv`_q)9Qzx^#P?9xC9wpTnfTXq?)#;8rytz3!t=< zCnzR9!`T``!xrBipt4$Rj|7wh0pvgfEY zfa3!umiB3D^uFlua0xKhpD%l2WA9kIRxr5MzKrFvy6_9@^noFKhWmp2XW4?Q_&G6w%gbOznfwVNXHsiS3eXo21q zjh~@u?D4YU{DRbp%5a_lLZmM9%V*ZBcHi>%g=Uof9WjUydvtkkY{UQ3SnYVGrml@B z1IBq5f#W-M{;jz~+>B05ve($U))ruLoVF`h9b;>ccBa|8ZR^n}4rLlqTEKRoO4#ga z6Y>sZcvs_&6W`p5+u32~uIK8}#5Pgx>M1U-Umf{(*$qr5sbeo$?CC&R#;ea>fIN-r zGsW|L$dsJWX_pT7LsH=*m(ClX2NFS-pDQce$^Q45^}6Ug)#)l2VS`SBbA#vPnI6K@ z2W#d!oeNPX(JAv}Zi<}h0291A1UL;a5SoTd)b#i>cHrnS=ycSnc|)uW&6hzb#2B5j z_ODkvvUI83EfY08TFgwy!mB;d7~{e+Ke`S{mn4tM-mrDa8_?cRKfE2^1c!6+N_Pvb zQyw`yHAmziQcGKt=OujykTTACf~dyB-5#WjI>?WEfMFJD&)vt|^FB1$@KLWt9-s#~ zRZ4)}0Fno$mF>;6n|qItVUe4BnI9e}gl7Gd_IMCx7~^z$3CNz zjq|N#kEYaG`wKAHw6{t1tgjFSUE?K(SV2gC{W1yk-S;4WW7|LMzx=PGnwIwgZMv}o z#XhRWCICihh@VUU+Z?hqr`pWef=mexwp|XA1(Jd~FC6)F+?H5QquMtAc4U}Ajni_~ z-*+U)IS-ALfZB;nm*8H$PG_x*5<*F)Y694eOe2lyN28K$`%Z>)but$Y z&j6EQO|s1`#~i57yXTW>@%CiWTB%C_r+TzsSC(t-M$B}`+v{1xEPzsFPAtS)M2E55 z6cEwHmStGVJXc#A5?Y0%T2qFj?!DFl@4#10t^$Taw@1uD*L#>>YloRW)5ACvw_k0P z-UJy%W`SzLTd0VKVPCf`t8Vvb=C6xwbK*{i=CsNDsL+-nWPOU+?`&jJ!s>A zg44qtoiB@7^?d+R2$Oq_#D_fqUm#g`J?fC1@p!O)4AP3nj~*myeAC0$MShykBhY6) zGk)qg$>yj^E7Wr^=0>+~h5;`il3J?C`DOmqW4R4;$D3q|=58)Nn%U8NSh~cl)fp`&bRN1xe?&*pofxxv>YkEm~*KGC@G9X)iL(arM>|Jw*UREHIHV_;*4 zuDEh`sk+Lh4*lWoC6$+;0i_;iR_ZqI7IZjAL-EE}mONXbL+4((#v`r;R8Oi2uYqA3nzLB_pmco1wq7|D*eKG?TK~j%U z4)4J|=->pG<^y*h0@c_qyp3`@F3419fvIxa`jG!)XgF2d=}n1!(jk35u2`OeN`q)h z=rcq*gR840MbKJdtTy9c_?9OX;c~I-We;k1$qe)gT}?^4Q%exxSvg1ryaA}_us`=U zlzoyKK<`jtJVBW|Rc%N5Jv{7c#)LwC=!okx=W8YK5fYZ2yV@KFm7x6uOvQgIC91Ud z&!AN7Qd{bLK@W{RcoB67AcwC%!Zq3cttWMs`-V1Ae+Q*UmH98W>Bk%Pjgq(hQp!rF zNq)uLgvLkrv&awMsTQ5PIg#1|3AZ?NDbt0mfOP29vV7i#OouCf5^OMX4zeAbI`P6N zlO5>vf`J#U`|gB}I$iAhcJ+8a+kB$0pu?`8>{r`WAx}Ulz+Bma?nP=n{k*o4_MyT6 z-FR?Av>&GR3C&vhAd=wE%)-u<4ap=ZWj)d!MVdkH!1zO?p!EM| z++N~(!4>!4{}ajLQv)xZZC8ML{0Vn0$QE?URa+N%NbnUbtrTNYYL?QzPBI+fDA=1G zWVqqe#_L;X3geYZssn66rmBqDM=u?0?O-!t*}Vpjs#yHbkXh;+r+PbtZs9>+FT-Axf}FHFJb>ZoU* z@6_pURh<3E)u3K0I+ZItr6Thrn+H05yEk<*TuB&43> zbz^6)C*;txhULkgkj*={X`}9*f@V#>Zfl3lIpv_~45$4ORcLQau+=eM+ZsF`XzD?mLYD)FBnOs zV95ABe6TKPlC(QBec%tRt;>)xzEjI)bS2s&$WW&CF!SRc!d{IxYsr|z|8M^3U&t5V zqxNqle%6Cu@mzXs0X^@C_fTT|0wH{Ad*WsPlUYw|idT@K!?pFrYjl{(*miUk?@dxT ztOjp;3i0bktmMzxNn0vDo3hhR{63((#>huYf9>dj4*3X|oDG&98 z&iMtM4qgu!>VoZD*kJFZjX!gkOitg4=tDpXTdfuN|G{ zlTO#)*=c$Po1b0i@Qv^q^w-_6(O|%pJxJZ!a$S0RL(~>9d4gyfOM9kkKT6-@uZ?oz zJAH>PT|p-klt}nl-=9LI;m1agk`N@o@Iz(Vg_9Xbn#_G@YOKP{fs$~oN>u1%4>Nvl z?XTw3`czMdv0J-2dKywAvsu3^Akq$p+H0VT=&*pp%RgeJv)sebJk1Zt97~RCM2af*p_s+dK6BaYkT=NAg$4y zRop@1@{xnHw2}VTRNaNS*Q1Uz^2g@;faF%L;7ZAP0KtgmmGV9Mu!nbZww7^^I>2dS z*>yh#$opb>(Ujg3WYQDC+fQT!BrWtOZBUEYD(Q)MvZtivbATGQxw!6Fy+EfzhsvP( zB~pdPW~VG~d?z_RmW*q$Uw6ohN+v$#egl#q1Ljy{ybY~y9UGe+BP7si(DU`+O*?sh z-@`C`P_qwcjHpXP1O+1XDr2fTNB9IuYqkxU+z$AhINY=v2wxC`z{$Xu&R1B9pG zx8i^6kg1N&h(X^$qqOVsuGUjF{O3_Ou+US!jc7xwU6&y$`oE0Bv2Zgw1+J;6ddU_@ ziqkHIwKTUT4SbnCNLHZIg~zRgYhMJA(zeUgHN)=!;cWR7ZYk_UN4hN%?{>ZTxmC|y zJ!XSyp}IRU4-T_b~ zGsTY+*i7w{(4o>~ehdPpdZN}%xKmd6r8`acMB~^U!xTkCDF`XnIr z_$6<1R>4mJ!xl>g4~PwWy{Exx6+XV(8R7yY1uUyxLlE;8dx~|?#;T&&w+swt&d!@x zS3_c)t0IKWk7SnT2g~oPu#|M#%p-~GfRuFMheP5UNl3FS)cw$#$Os!IC8nTTJx*;q zuDbH9>)i&X8l`wvk?(Y*?kQKdz6;Sk8CDf<@8$ocwZCf`TkRwd#PIs73D^e-tPm%T zlRBuB!I_`BKI-&uZ7@DYC;OGbW#JP@s`|Hi8yZ`02aFjj zbu75qH!fXnb=ZOk+Z|I=PLRpOH0-ih9q?}JK@Lrl%-fM^dxKEJ?LZI1%}$x{I|1R| z?9b#(kA1%Qyt~J?u+z?a(5bnLW1fhkoj}r7Hr$to`#1Z5%1nin<9tSuZtd=H0Dc6~$KdQe^0aEZO6(dbT*WBNjIn&dawJ2)q(m7B{cV>NQxtMVhq99J% zq;M)J=qIGDEKj48ACf~igKsZ zaRGR@(|Owc9(ovo>yBo~3_wcjEw(&0`=Eyz`*WpiqaOlB9hr4h+e3N;3!~dBWZ3i= z5hW~TKzgiloR+|_$LKA1Kt~6~zrrde+PO6(h11Fn(#};%N_~Odw zov^JvmAUFC+j=U8MlQ*=cj)|loz+Ts2Ph42&@TUKmS8)9$<2I6qU=Ja)$Ji>gHzPq zJruW_wE#k=O!d6TAMMa`FKm!BS(=n)w+|SWd#${r{QeI7vTPYj)^xxvfl?(V0ZL?R znc}H(Q|R!0rVOPZ(?Q%D#?q#A&p>dsPESe}%_Rwvgo>&7C()@?DIIlKS#$I$sIfa| z3Lw(#3zxriK=w6?OH+}P)m?D7w|Z{|yC6)fF)MwFt0kgjKQiyc={ z;Tu=+=BnCYxDE@87MA;w62nC)NyYnuxvKv5StOGBe+S zDZ)}2!_!kxO6SrB=HEjkD{FUd5{Vp0`sdLq{sUwhRb!RaK*_O(K*MIbmQRoJ&Al8U z<2yV&dW48UrE50>joNv83Q9frK0ep3CCTF%)CS2q*Bf6B&-BQ@(bH=3pM#{OW^3c{WDnpyq;>#s3KEfEVlMN?++aTq4olDQ z*Dc?Lp3sF-*cSW$SM*>REqCBxY*JSNqam4V&ubkyUWbYjy$VT}aO=K=VAEHCDf0Ny z-$)T;m^gl!htBp8?oC)q$M&G4-Yq~fJJyb!Ep<8~ul9EEogT?ne{BWd1%#D9wWmYw zp;H<&=g1D~J}{*jTV^Qnpp%EHg&!h^eEFnW(DfccQZ%QV<%;)l2j+AFA)Ww|OJX4DX^OHY+W;ahDfUAh`TiU=a7srI)L*h>AvWY6Nt>oFMB+SXB(NXdY~)9Tsg0y zUV|#9Gj&e!29eqwsZ+MM$nd(>2MqTGc?U~je&<1@a()jzl?Ng6qgEp?DK7)o)iv>jXmn1^TXd>h*lKdGzK|%2umessBEc?F$Q?NQhscbj= z-=kpOIj%eXRkV%3wDM|m;I|2#Y>lrn3*8LJ2rxBVx?0f{}R@Hd+r+J7%7 zbv-e(^}ZiTT&{~S`_XB-k{dOa$P(DlcWZllvPU`8DmB%kFu`TV7lOWM7_l+h#39(zO*<1NkH}Mdl$#j|QCr4P(?TRjqTn1Kb~%3bg>h;c`1ipddy~ z%;1%-tfwsllP~jo9&qR+5S$u}JJ(RjW7<20{=Nzb7vwR#OLiTWlxc?7EO&}EH-M>U z?Ip3XXw|q0m3nCh61#;OdQMwl-tM%EPyw`8;%cDye$x3v_tW9Q>9yHKlzCO&hq8(a!r{K~kl*_gosz>m+5? za(PN?zCmY9m5D=XpG{|b3l7I@DVME6HN`t%Hc}WOEM96idRt$7?~hc-_E8;~eCU8n z5T)dO1e7eJLlH5EB;M>oaxVS3M`43dQ{)%G&>nmwwqwq(ppn$8d)+0kzQMxpEA`a+ zcVyC5pFbT~RPWqyp!JqAYesEErL;?z+H1p`I#ABUaB?#sY0e&GpSGow3+r_NgbW)d zm?*gUl}pjImmY4qntnSn6|e=Qr-F8Xh!)Bvy~VdiuS*|IH`;|x^Vx)L>yX`$6tuS3 zlxq(_ujl-`*ti!}S}gYN>me;@YnNa@AT>C)Vlq5{9CZZRMDb)2H1rntiwc>7gmt-h z-P(K_7?vB8=jx1%kb%i~_9(5Tr(#pC9WtGSq;FQ^l7Y?2LE;`rUxh!83kPH0cLsPB5cs9>-f2A?e6e(h$>8+ zAYgjnEA1HiCOS2yVqdWpCBa*;@UI+Cx80`Op!9;JcA0*OW4gw%J3TC|!Unc>y9-LS z^p-kiECGHGIJB-M7XIG{B;&bZdZWd_beNS3I?qGjs!bh?9rYu03S|?Q_93Ikpspjf zJ0$J&^a(g+9bT-za4y?6PyLZHthqC{0RNa^O*wC2LvLhzUp#vUNtsLT*H+YJ2FLjJ-XE!U zdyk~nj30W67A(U}){ijU`isnM8@8X&BoKKj!e>OXJ5*;=3h)JzRE4MQspMDCAO%XM zeM1iQpf*V4d;g!4WDH>2dywxrc<3o@lB zpEs>LZ3QLU<&3^?vHi#>XP1rUO$3gQD*S zB-<0`nn5;E14@^@k7qlzPJ{>^P)cW&QV$|2zyX*}Oiv+p zd`CRg@*baBdrU!<|MH5X0;)|9W0~VwO{##*W}vCn!Pz;ICsAs?QnHh%)L>@*S|)~5 zAdDMMw6xdhp1{w=vjyZ(QX@A9SBtPz(BZ1uETfhDV9kE3zDGU8daoF{1{&I?`X@mH zP{tVM*l6kh*LQiYw~OKBEtqUADJ+kCr`ogE@a7IO9cs1ABL%qI z6QFXp>4t#uz*Vq3R-&{HYXfSOt!c4XNRZ>`-QL6Y&|GQ@w3OkKxq zrEAqEfbgT7B)9qbX-`DkH%a^q5`km`lzbd)#-I1Nb*9-?iZ6QH(hzI>y@cSx{NLsN z%6AGgRofu1dwj1C<(Tvh$oQ_uj))UPn0kEq8f(_tjC|MQc(f&2TPITG59rj< z8FZ~yKLSc^v7)E8^nZ`Ly#@FT7^XTL810mPNgVsXf_&|9I2W%y-EV+Y#pT4?|@IuFy%;u1VJSKJR@F8_Y{o$ zD1&@;oJOS?f8s$1zh(f*hTACPb`G6_l)hRb=OiSF)bvF@sbjYzQd=%_qOIBJD?dK6Qi37tDuYqT7}i03sE(K*TCr$GqqZ|icIlm%LIfN z0Vv3vgWDTE2RXMqf31z&o1m~-A1T39J^2s5Y5l0ICnc@ zHQR0}-Rp6VHK&;O(aEY=%wBBk!3V$;=^RC9xvX3t_LR;K{+J$hf>HzEFtfv-?C zjMq!9y+))FoI3wFEq?WsC*eCK=UbS5cVS+Mu8WIA`Pz0&jnkbY3g zF#EUG4?aSZ@Z>o=BAVh8q)KnHYxjjdqp_kcLMY!CRI)njL^Y<7Utu*&t6zQdokn0q zGqO(j-XmR{zsfqH7U~-&Mgf~~c1p7mm~Oxw3C3?ir*yWftg;A^j@ZFyqjU>o=%1yh zZbhcvrtsRA*#_u6P=hoja62%}pf;8%Wn;z;XgW();@G$ol+?)>%1IAV>bJ!aVFF5v z)m8`Xiqa|K9g`{LN7CDB}J^;S87kTz4xGup0w%w*Z0MRY}>y7{SpNN$N z{i*i4r}77{`?8rvXdQ}D>9#Wo_D4<4bH2mNzuIp1$xcsY$F|ddHB-ye=v4m9=+f%~ zC|nx+Of@boLenU=2ivjrG9;bCt%zZjx(ZXd!{c*Ai0?4*_nMim_TUy#PFKsg;yNse z8a8ax2os2!mDP_pH&H1Ht4FGQmgTp4tlH44$>%mClTRt;OPAL5;ll60!+cwk)lu(u z;F9cgoO^&2^2C_6&f_FW>sM;`xW}F- zGdmIm5RP3mZ7@%!_B`!@ii=`B1EfR~rB*ygj#$t}d3z1v1vK58*BE|#+sW?Bp49O? z2K@?vnT}m+k@7m!p*)cA2BoEz>WQSczK0fXJ1bd;y@RG}99nXXpxWy_NZCdk#2=99 zY))TT$9+VnejazQDy_$z;;IlF_R-OpY=+_-AX7jU70X&7MtwBy#MybAips@Xu7s^G*}4TY$;hWtA*+ zda!Zk0Dqi!;!U&w!>xnyy4SW*ws&;=AC6>qK*OOKdwc{-Pq4Z!Q~c_byMW2}0^5{g z$ZiOR%pE^Qr2L3dT_$I6dx68S9eJ|d2MVK06V$5v0i&MNZdA4^9e{=rKbJI`MCu|F z_FGKAQ>cuiu{g&n8KzOD;b!}@M!-yuf3RL@p6ldKE5zSJjI=c zg&&LU*3?3$>&`K?1)WMPF!NN2 zNPVa3{~O2@z^xecP4w{g)za`V+#ow{CE^n2Vl^qq+bPP9+f;JY8P49rV_KR~7!m(SG({KHQF+^$?aLZ=}vIaM3I$VH@dycXKEPDp3i=;F2d*YNJHN7d@ihDA3CukumhMI ziCN)}uKxJokD-&tN#RF8IM1=kd9ski0YyezR5_6RbBT+8E29ouo?Tk`i?LFvUSGiJ zAG4*ke?^8R^LA+#bAkOEP_O3jg81>R)hCZoW)6-T(FVSxHijj*94OT(aJc00 zi*~*HGz1?k;mC{y#K5w@gkDT&I~k)FDwiQimgdd|W7v#X?eG;_<7=z!bc7xQmiCw9@S=tK~&x31~6rCTt>_wRm`bkCemAI?b0lL+laaiz^QiBawmUk zqv>1;x(kRTpK|28wn)CK*F9*m`1QyP3DaXaR4nE{=)i(QKEOkOW;$>6@yMq-Ew;1L z#|Q~gN!tF%6Qr`boKp)Wk_4@21|ccpJ6+nwdMRnT!*iG>zkJOcLVTcx9v8m~F9A|t z@CtTKZLg4NVztmMUZca%N#{-$kmN>(E}e6Oqztm&f|4qWg0h)>*ONKXwxr&p<>J-h zHkE%sD$|%fLa{%h)K(8roll5lxO~oilH%lNQ2LYG3hSkneM$T)?h=UdwMVIw?KT~J z1Em@#oU#!M=oGLn`_l^Gx?J)Ag3_3^IkLDAt&o10Jgm5LcCgo!D!gMf4yPN=}Sa`(-ee9=?01$STMN2XGAta5oP+L-ukX7{)lN1l%WXGWsRxv&? zu5S4@TZgA;rC&%I&rr$c!lDgV;-Le4BbL_j0+0$(vWGX$B1QhQDYxEb*rZ$hlf4)_dv6k>h;zG=rrkE>%U~~WDhdARI9D2PDeAG zMu+bssYA-ZQn$%^89{00dMK}}@ZeG`IoVU1KK|=Lat57|($2b0_eAXfHV=jikd#jA zlyH+woXb5*>BPNk1>K359LZAe%%cKHuW=sLo@(=nkagtb7s+cB;I zXn!Sm?!g8@%_pk9a37EoG(*9f4j+J0lf&!HVE;!P+PSuR)Z@4^9R2w*C}k@97^cJ0 zdGaG2eTZ^`CYS!goM%2$DdMe(f#(nfGw;`_zkB#6&_qULAiQ_fHzC$Kmeszyo(xAhnD`ne2-T+b-Tb9;x zc<>RF3O752rpbKj@n&Pc`Wc{qjLpuHHNJ-`uiJPsbiP7{g(HW>obwJV_-{R#W^+wp z0q8TVHfuuJFg40~X7SoEpKOE~(cEX@`zAzcccJ!MHzSjc&h2LJ$clLjyb4#l>05pG z3hHEf)HYCUtBXzo^tFpxcd94g z&2ZUG_pr0I)k3TQdgaSNj9dX>Aqg6h64IcZDRFiRnIiq|Tzk&6(;Rlmd;y&bEwDlc zEcO6r+EQ(~)3eRV&T5aaNRv~>H9)vFEWqMr@hW%_wwLR9m8L9Q?~%vKMx9Ur47!ZF zO{1;zo6xkMnY`8N7DTtQuBz+Nw?ow}EG52!3QrDEvzP1G;cic${Mpq}+C9jS5noz- z@wMZK`_PdnXlL*0^8gm9cj8yNOAk@z((~@FktsbbWuvRa6yz~9oMdWHx5J(Q!ulKo zJ?#Mw)y-Ct1(0f5yfR zWXXd_5&p%G0)wt`w5s2{^PRLOEy&t^iua&Y;pcO0h4KLq{?xu=8ulYB4a)=7`tY*b zC!jKL-dg6lpHb-&?cJx5u=oN_eRZ6sd3=Rr`1657fk7tecDT=9u4&*qFlF!_Ng2l& z@EeYdQX=)5E!IXzYRX(ytW9WxYSA)la}Qh>?rQ&j3nbZ2j2$CUI%!9mG(o1T&}c1d zvct3;imy)lDPubjVbl>f3l(T5V90*G6y;t2K$j%g-6QZLR?nmC0Stn#RFl2PaLjaQ z(KU$IeLWhR48agFPU?X`zJ9Q#kd2EKJ@i+t1U|Y~Bzg^qd@NNZ-%4R_p&kNgt|1e!@ zj<)f%y(d@BG)NdU&bq`yxa7ZPQq&H1(COCeW#_URsYRCBE0cQ?*7QHsRPw#(2n;$+ z^C2I9?)%`wlc8-8=1tBibO{h#Di%#6F}!J<#0e_x zrpP7i<^bVf9nUw<2{?GNqi5`Ojh;k4^$(H%th7%9l$4Psanhs2Ry#MG7NK&V_hyM? zmlOV6v1Zk`l2pI2=>b>+q^jkCxItGzX-Gx@IDh>|5L-;`#NkGg!PE(AmN9X&$6*kr zg>Ll#SNm84iXYQ+L24-6)Dl9&DG}2^G=S{ zdg=vI-?BqQ#J@y@Q-|Aal2_=|tVYFfVgnn70Y_)ex#t^@MyO-<+~ax+$hgREeY+C# z4wybt$N0JWYV={shc`pZ3`L6~R73(j!Y}(LP@~ zFl{493w{M;Otnt%Eu^sPb$|JL4^AYMJJJn5kE-r&33K8mJ7Bt@dk!^7H=#1l2frK6 zq_zBJa7t1I25d;SewRoM=cR@jaR_saJ2f}L#bw4;OWP&G3& z;7(wT3v(fAR}aEC$H`FZdAmEn(yaHM7qqG+VW{-nCh`EmCJh>DD5;bWw=7guQ4%`bYx}foa^Z< zoZ}*?ML7uzYp01&tUU!#e~-r2qb6GYmLX`4&vkzHDgvIKy>|Mud z&{bfPCs}K=Y7LOO%$l-e&UzI#$d_vu&os+}>mVury`A&iK$O_6Gvk}XcdZp}B@oBW zN_rcS!(+#1Y=9b5S{d#nnA^u#bQh8QYY$jM!R{r%QrkPbj~M2OONRCXBz6!G})CFuaM#PUs3R11OETrUS@BALp#!_Y9r|_MCtzH zf24!>#CRLo82lcA@fNw%58uh^NNEEfL)JU%Re?_-XU?rO!@$qT6!(X&DbyEm%4dKy z0~0`sa>7(oJLBIVvTUvxzxUu}H0sX{)1xv_4(T1pBzHJ&;x_@pMh_L+T~LM1psLS> z(g(L7lHKa^db=pRwIh6ww`=#?AW8Im2~?s3kVI{VXLRlWNi?@ncOt^$W31i&tOFr( z1DKqS4tD`}L(BtencG-<&??G#FGJPAk-I9Ma`6jB9j z1Q8QG>}>f#p>qJz$&JIo-f*$UKUAA?%gABWuPp!BgxV?yU-fuNtRYfR_GrxoWeal^ zoC?zd4bbao~Em-9h^6}NcI6Ftv8eQeh5f)9L20!-B{u1pO3)RKe=zC zejrk}-`n#|Ptf5ws~wU9nVx1de6)yt*1_5f8g_x71A8O?Ua!A3N%$h^wKFmtdWp=u zSsNAZ=eH8Qf+~d7bV~RdJ#^)H2gmx{npC<`@>47HW^X+R-r?kK( zQXoS$+X3DA=4$$Tk66#qVg81rqrOqDZQJ5_BPgY0pwv79ZR&xtIVs*|kVZ1hTW(vB z8Z>?Zw<3&@nG)gKe5V*@>!#GzuF7_3vY2okyr#?@NnnZb9Bl-V9xzsE?CPPX>wXYq zcS4_AD1o>K5eCIlskpeeV{ULc->OcxFOkNbOA#xGbhc-ojS zL8K{v{$aE;-NDxSHqYfSlSFipxnmTMB;qzRex2;$Czpm7mkASKO73o9T~Q>uPA3f~ zt+mU&Fi^Jq{I`f2RKBM6#zb%#nyQ?+Tng(dI;oZy0LQhmUxS6C=SxYuid0)?fmPe< zsC2nYk$E?I3QpUG!|I!`*1~n==~j=y%TJZ;Hb8=Ee(0!m3@QSrys|X`b{CWu=X$#B z4%|zWy1uML_xu0ta6*wEK!y%Hzg!Oqx3WHjrmU=oYkl;n10MLTzVW!HR8KjV2!8?z zgO0jjK%$^iJY6Gl>RE@*mQ$fB(en8g#_!$k|KV zCEReVRcCsJO5#)BD0>~Nnc%-3AtZt`3^6#iAW|1%takbJysa>0T`!5bErH9oC;@>` z$=aI}YzJzXz^VwpRc3S+vltaZlGkya#@LQB2q?I?6bPKmDnYK+U4=v=m^na zoz!+nfq3qGMXOLgLQ>6|$he0LqsFL*7T)&(X~lBoSao;+QPVnJZCdL?baI&~v782Y z1RP4o?Q2W=aZhQix%?+1AZk~xag6<^sBqAVgMRr8oud4nn>5dR0<|Grr(-W5y7KW8 zw5XT;U$0#@9sU(W@@|w@=X#AwF|FtNA;BrG(SX#^pUaB^)d~J^(b32SSAY*h5dORQ*1o!^u^T(N(Yf3>mdgZ8SE2 z2Va0$`02?OXrou2zrtl!mjPDWU6^lx^f0Ek<#x1j`FnyX{)CmOOg0=JWsk~l$!A;mi?^L9Y`Z@uHR z1BE@~ZHVqfB=tk(JA4;1yro1EM!V6&O!N^WQ#Hsx(u+^fd1N zj+EY2UH1Sa<@<@dOw5_=LCSi*f=oe_!wo3vg%H!B@$A?vn4@- zRwzzTs?ATKQYe?-Y#C8CAgQ)%m~;&!mRxD~2^SG5-ng}$z{>!MjWaI6l2ue#w05R^ zb*!OFke3&ptG<&F-?h!!=z8J~?mcgWvb?Ar>zgQrsDW8#kGGKNhZegR7(dst`!+DS z%{KM#4musAxnHP@ad$y!ql0CyX~VxH%Do3?`l=_OJnJUh^j+oWG zOm2+=NyX25R7#c~0J5;fu9hDWL!#&ER;3R1sY9mkVlue_3P;Uf7M!gnUy@GU=C@sn zP}KEQ@>>tzHvMvh@Ew@Ko>;lq4#YRijEXiVIi8<9cne3>3_R!+cxgzAnJN~MS)HxD9bHH zK7|N3C-hS?1KozJf#~mr{`bL)H8d8{=``67mf12WEqD1`dmRO~3X`1`F457WII&EB zxSGF@|1xKB(xdajI85I+V5u-mo23#PH+v$i*chlw_f`@y;}Re}LY=tP0mYpjp(gNl zqwOw8L!3NGZX_-v05YS;t+Ut~?hhb%Io<9%5P_VL`oT z`UHltOIHWO$*1UWZhpZ@Q3>Q{Ao(!&lJs*#de;?)-V%KQXgn-g;XAD~=RyQo zfli-4TH9l0jHS&nGm79zIG;_)L~HR2h7U{-tAtdqq3AJ0qySL+NJXxKkn(tF*$2W z>0XFNHJvvla$iW=z|rYh8U`{Pwk6tbl+ZCccJ%CIu~KG}9V_ivOTea*l| z;ITLD*0RTU@AtzW|GH zYlFot{TR_L!o%20YD$2FuFD}TYzPl@23g&OU?judLnEz$HQ14(2dN;wlbuw{OvV_w zp8uI$TD|yABa9oTTrrR!pj6r;S2aLy0aA>k&5-AIr%g%HxPwj}wFMHt<9B!cDDF;8$<NfI1YO6kMM%WmUhGV10<=F ztfjMj1f;07)@;Z0pAv@gd#g6-14vU&nm|b&w64;G>{s7}Y?;hChe1ScPRzUi@f|sm zdTpz{xV7O#i#%3myBmEcz#+y(MrIvPZ33o<>z8cfku@DSOp_CBOOjZsJ0@Fwhda$9 zK>L3iESz0!OTkiilx{mXrCXWTL3bcibAE?&0I{K>NZo3N>Rv#N9YkH(u4NG

zvOQbSP6HNuD#t{W`ErluxP^2<>Kg}-O%f@-Q`Fi7y;$AtYC_Y*+QVAckvV^*Ii7bY z*tfWE_y^l95K4)-iIOVgO@bv!I1XxJ@HR5l()Fu@-2o)m@>tqB#9dGrH0@BEev%|q zmA_bGr6;j;sokY|00`#|I78Z*yM!M?6~Y5KG!@@fJPz`4@-Zr%aDBB5Eov-0fu&rB z}_85Kc`n}Rs5>c?4oeyH98&q z(3t(zH^?x81%N^kOm9J{n2|L&W+XqLDSov=Y){|ARMt9>qT)MMX$Mt=1|U@_KkAM- zYM6fNq1Mh_t>S$KD7%Gq=+H$*@LKOC5Ij7#_I~;Y1z%bv8PT{8`2DSjg1C~uJBJ&j8R_MS3;?r)C zez6TUWNn<*A=dVum}ahBryYQS$2lypvH^uTn%TA9w(q{HM?AuaP{iE;%yHQp>-Hd$ z`K04vr_SoJw__LS1m&iJ9DyY_%byr%?+2yL7wYEU0c46i#T>tO?Q@8oR{z%tAGY5{rzGe8Y)fnc1(Zzm9E=1#*0u=OK|J(f4 z9xJ&IDM4{g_j}+w>~~n=hFx=g`w$q8m=gYAD1`-&V9B_f3_S)ZDZxU!Hc4cw7Y+asrWZF~5O*4Tg`Sj9xC4brf<|nSK~1k@Zq+ z`_!Xd^0H*f)6Wozo~<`MzW7cP96{SuLEsWW3h;*CDa~=sJKL94f+2d4?xzT^; z%at6t0Z7-l)~;-9LRZ*2d+{kdPJQSbzLVWC(r_y(4dqT+c4fBp05fxh1S&;v?W8pD z9RLNiyPzIB5rd$i_r)%2SC4Tv_Eft8kx>SEGrB51?|}}Z#5Pvz6njC6He5m6he{9N zji?=1@9)S_R#7EG4nPz}#iTeH!rD>ErVvS<;9?c7=F^}sh^rF{_zXH3%+~caDa=8_ zo1=dt8c-5n@go@W*I)79a=J%t??Xs@0h0WTt1`VI780d&5RzpXmE4b=tD6R^=rH!& z)h2z{04WY>$oCV008s$D7BaYwh@d()HBAs8liw^y5H{nb6Xo9o~T?f9Ez;+`Bypy|}H5?{$PbR`*LO7m(iGEE%dVKY$Gr^|-$F5SbB?*InBg z`lFtb#}C=h(lUBVOvo&VN|Sj4Q(xa0{uGf4+8fh?D)FqR!ixNlaxeKDQay(9q$Dp; zDal;jXMdSLOBzvb;#ypl3?UZ82C;WS1vN-U940-cg)tX{lY5B7Z_$-Yi>oq z_5k|*@bK+7SjsZ7Lb?+UJ&+DW5wFJC@XJWZv5Tn3Wk{ZN*e0Ng>b@j~ z@ATHA?mdwj==7i8%HU0R*_zM`b=!7Zz9&j4-tIeEa65IQnvF8u;h&Mdro^QlE8$LP zQl_Xh)pQpm$`c<{<>J`MeQ{|wJfm~hzb8L>Jg?X^JLA2OaQ5QT!eGL+52i~r=iXSf z9~quC&sYQqAieC8L-y7SCp*LgjO^8KDyeAYnG**I5@a$u!3xO^u)@vsAhW-?|22mU z|7O?ALEA}m^6?1xpRG+#{fKe!1XTl>qMdM+Rg?ul7-PlKJX0=0s<*GT>1{cbLX90F zPEayz3U4*f8hYrdUU}3mR{;Z`7i(=yqCiT%F}fj7`rlm56Pd&bBt>rWtkSsEA$-YU z+-=an3k!Ne0+;XfV5Q}@S6S|Y!Ve}LCcS%TRXJ-ZsId1@>5jEBtOMYZEB_x=?*rfU zRp$AgKuV&fB`R86(c&1_bfgwn9OH;1Aqj+%KuSWPidzyANJ=0nNlK`A{RKh^8gJ=} zS6p$$g-RULkt&XHj3bUX;`n2YE3R~>-Bx*JfGkB z`<&-I=Q;mVEILIWX9-UNxlpy*{)L?ZfXK$%qQ{eeI!|AVaXR_u795H382~4g?bHz; zCIB;I9dk8aEfH4{C&6tXaO$v2M+D=L{w%Qh5)bxjKK2}tI2pIForiEH>E3mkAfEuq z3b~j{02{Pq#a#q2cXXN%HIwM3I7e|g(VVj&f-BI@0&CsA(DB|b{)dG0pTi*Z{1_+p<8an;5!Y%0|igwBm)StT@04x z;Za1Y6w(sctV=>jrYw1A&}NcyNEe%Pgb2mF^z$m6j+mdwY87b#B=*djmDnKZmmL~V zS3p{^=+Px+sA4;=1a=sh#*~Neq9fOiAFM-JFmNpl6gI6VJM-MWj z9ZySvEEt75sA4TG12&iSZWvM50%T2&aiXk<`s9TxnB=7az${oDH}W?@7y6HQ&vb05 zEue@w?;oN`aJzPTdC{AqQk|Q?%=k5}41zX8QY5zU!M1h?r?8=G1)9AR+Dt$-a|L&N zySy9Fkz#yF=aIGE6tF4ktNujOkJ0xILc4UW+n8gf|K1CN&}jygjOLR6rr2(VlYfpH z#8Y}B$p>~Z^)m`>2Fm=UVh;llAu8_PdofT_1l3F%t0InqINN9}E+|%*W62k4lUwpo zBp`4H?!{S~laOWznh55NK^NNVE?FMq08Vwgj1nk|L>t4|I2&>X$R%gA6BC^PaRu~g zF?}+T^)f`xLb_75MW(mayL-=pI+szeG*o z%tV|mhb*vArgAXnR1^IG^KJoNP@fNA!Cs~9K96?k4#)+7q|wT&%!FBJYJeFVSY81| z&g1BVN~?r)MbiTG3=ay!{#n44crLbo#ur47u7Gq&xpS4DtDw!gc;=MkmL?x+?HC)- z)B~52qZ^X%IyTxVzWU^whues+A^C_qa6p#I!0~k-gBallAOZrU?B2t^(TQ z*6aYxARtwMd}+FUFO<1dy|XWlJ8~%&ZeYspov(G<|;L_Fq! zCesGl!=R44nd2{UCkP0dki~K&^(X}Oy*r!dJO=69_j9iq#XAllSz#kAP2(g~CWH*y z2ckTTfsr%sb@KA`xC0^IYwJ-KP>xei{lV5V4k06rzCVs9CV=4j2JOk5Oh2=|Ju@$W zoGvTQEOSYRpSy)=PhHH*c`#VQtc}Gn4{eszWhcjEGf#m=`H!>77gIbupO-BOTyi{Z zS{{)gTu$-yaY~HHX`iSKP}giGtcUn^_GKv^?J&VlEr7Gi>+w5vARn+E&|F~S1B_~bwj6o<{#dN)CdZ}g zAoRxo#{hz@SzqGW2;j(d8aUGsn*d#2&8tg~Zw7H)RwMA{N;?=0ll64&?LhHBA}~a~ zu?eLE<#=^%85i$Jih2PP7ar}vx|EJY0XZnwcu6P*pbRdtyBGVHPP6k=s%Kpa5GIAd534(N?xFdfOF-dZKY)wBTgyqv za6Tcmj0CiXGzH?Uu_0;~<7yv1z6fktYiX6xUV=7b8zn-QA)Ptxa-)%70dT#@l=JO~ zSHWDV+~b*cp>IWd$$W@V8Nv`q@qJkX~5XE9;y$1^h$ z*!cj`)@W5R~LN zWDt)p?1gkbHQP-3IRNsh^XpK3Fh6qdg4d z+}Ff$(h+D%#;^0coZ&0LOe^ygp(a8WFH2 zhAUV!DF&{+#GXlHbvN2$0+O6?@gbT7w+o|Du??5AV4139l~@Zu=aR4LEvzT7aLm{T z%(+;zRvRqyiPkD7+63C#q;O^-HiwHTs<_)DVwb>R2ds;e=9g1U%j2foE6~ml_F0$q zG+qVKSUFD2Tua{B63m3ebn@Q7PE(CIXFx1`Ej4HpDDx8&S-sm00Aj-?qbI!y;bJi@ z2KU3eBFz+qdNGs;G|XGOb~V}qx0zS%9e-r43@mG;nrrg_tsKyj%Y8qHmM19Zfo4e| z{wNY?HHCJL&_OPMB&BFeF2t<@N4vTL!Ud@DLGwz0tYK)fil@8P02Vn6s~Ti$F@&?# zx|7vAkF1senlE};whGE@ewd{j-A}p{1gVZw1KDY%Wk6(#sTeF1yV*y8L4w;7y;{9( z1EjOK)vG{F0B#_0F-9Fn3y`_DflC~_<9gu6gt0!NnO*#C0&~8YYmNf48N^M#hv^bt z1c5w573q+V6r$!0i35v3L7?yP7bb*^{u}7esr>zVwqa^kNIV~oiS&>9xfJivH zbl`a}gtNS!S4drpxr9-1*ag~=G7RQ)b|F$DP-X^Jf;ys60OIV3NglReOip!sM^eZ} z(mM)eu2#2EAID40T7G1_p(vm!Xj45Z!at$%bt5OUBhW4?H=%TG(Q5B$pe*hh zTE!Vimt3#;YAiL8Jj9JMlaMaEY%;&1aW;9Wkuo`#$Zk!Lo`*CSRx#I|hjQhRtF5dU z$EyjaKwXBKDD)STulOK^wm4AJ`yp7VN&=)QqyhTH&f%Gmt%RHvyHK)IyNtQMg876Nz|UJdVj^`eF`;MF}R3U?5t+3S2>x*WSlnHf zg0Nx{t8}w}XSbOO*bbFtCeBZELOWq?5oQfG_G8@%rMro5#;SMg1$F7PksTVjROJu?=g~N=G1FY>i3Plt)1jIj&a1 z#=xCf>=-#54REtO0_=j-@PGws1ll#nb?Gt)jsZ9uuoxQxX+M4|D$UBWb`x#BlfW(~ z%hkR%2JHff(5^s!$8nOIx^)ktQED$eX*1#pq{))VT40$c+#OUNPjW^`zla2n9rYT~9$Ob%%0dgatnm5Wn*gp2<`+nVLWfO-9uP%+TX0DG*t z{Y`R>egQR`bXAwnk5+=IH(HB&!Ra*EMWN>a7AHU(Sm5ar020*=y--!A#BW%+)YNvJ z_Cc*cnXygUBUGQ~gc4&z$e8Yh zGKbo{pRgam#cZrzhcbb5Wj3SH(IXDMQsW?02$a+7803~Q;u{9Znxb!lqhKQ`4%#*~ zKML(MYq(qz4~GFPhb?R)i@Q^fB&Y!`95a3t$h<6^a*FV=yL6;71^I0f?n4ur_u{8ZyhmC;# zfHq689kZ#-S}^CbVI@i=`Ow0&g}x#AXkLRY0%gXq(&39eT7X;tuGmyBu@S%`%y_wH z3+F({=_Wu|y;h4P8=FDM4xO#;$B@N_U6SHz#=Iq1CxFvaLt+yjBN-rPd0h?a11cI` z>P-7naBiwoI~h!Vm!pjL+OKc3&q0_Qj2oGwSC)psoO6b+?DL8<03(3T0(#C(iSJ%$1OqD6FBVgI?dd`jPpn21gMZ@x8t(y+E=IGF z_v8HF%=ERgJVYa-geL(JjwbEE8N=-ksk)b}p+2;NG7d_0#B&X;IxT)0Ad_Wy9B+v} z1LU%EFBu0L^)~7Rn3bD$1NgWmxtL6$=?FaieKv*8Hr!K+b6`}1`fF4Pglni-qaP@@ zRhIuzBPn$3b>leoB3Q=acpV_=UIHrgXX$_r_+@a*(Q?$Z?8GZ6@bZrCZPCoX3S^N) z--~V60L&_`rRSC;?--a)-r^IRs1*QbmGKg4C6ODH^Ba&(x}L6FI^kyW&^!PW!hejF z!#q0X3T}%Cmn1MkEroV%_O!_Wi9JUd;H|15uc#FuXPqT4>Hc{M(7{A}%mZ8R@4nNLZf3~7%Its+y^1k zs3}2B()-%Y+mfI)ta3&sZcGpx#5I?SvH?QA0MV%Zvmzp817WUn2I*0_e#F0SjaTU(O~2l!>l@H7 z0A@Nv6PSJ+nmX#z<}Wv#ARer zRntjCHUZ{z`+Bw_50lW&Huo*d)IFP`K@l**K#71jIo_;EFM6B@%^FEOgF?`J3QC2? z`;n&-(l*c*r&lh5P_{0u^b_P#iiKmunCg`Na)RjO{TSI5AZIdKHP|Nr)D+gZvq2$CG5t0GKJVu+?_1gIEh{bJ-0jmqPPtQE+bpWyM7h+2$V)hJ}X;S=i9g zp(~P$0nA)BVs?47Ukc{9b%jwGlv%wpj^@k#D=k=qhkKjR`Jk@)HTBJ? zkrV}!QPlB5XeY{iX=VvjfVl_{AX#y5KqV0Q(918#{~{=JkF&tC_Z9)2an?{@2x!|N7*miDeuQXBraL zv9qwu)dc1&a+#^jxE25xYHjOw9d^NfN#O=~{+dxP*d{QNi|2S@&t^#1HfPx4&i-}~ z1V#4_VzofHLTaKd-3{#mG;qsNNBb(8eXj#BQ%)o8PabN~9O5|I1k72D#}6@Ik|$kr z!XzvyW4JPtBA!=C>D5kAquAS7RNYgmRL+$EJxw)D4ii#6CB&IS!r4Tvt4d zNY$SPb8@VQ#5IpIw|uc>#ez2hMp8UGE2D1`%8YC1iCxRt1Tdn~aPM4-BVLad1?oJQ z?H_D7ZN2k|HxKMg^7b2g=u_azkzOhw&?(PbZOH@k| zYBpCBg4*+Q;wc!0A3Yjbo(4j`G7l780| z>>i2|Hpkyt$5Z%XC>NB69+9k4Xg4sH7SS<~nKg=(WhX<+lb?0!|K>qvnkJrEWwMf{ zGauB|!Ohm1(p~^y4sbYgx9&Jt2twgn;}O#e2(q1xAS;t+E?S69aS@0M&pCkYo;Y0$ z=33dpmW4Q9v;>Gabs`i+lYZx5HC-0DSPDcx$gWuw3~pU}Zwfs=fo!(p| zR@H-2;TjJ$;53h`t1(!hh%-Jm(Sn;|tJ(N8$VPyy;yYN%CkvYr!rLnGp!#MIm!qXK z?#jbd0djF=%ds?1z)mowqg}_{xGlGZ8&0D*^a7jf8X4fPAHen7$Z{v@1seXaiGs@B z3*kftdO0T-W5|J70C=_mtpe$+)u=#X;RujRr17=JZ=)b^=H54=P;fd*Ety-39szS+ zb#7$a&TKO3X!6Qhv(_Yt=@=Nc_+9!?lo6W&N3 zS)1n>)+>I$zZ`>)j3EG+C zTAaAVx;Y`bIz;7bPhpq@gzkhkb9Z#1I;~2(0nNEJK5@|tVD_%ULSlr3(Do-xx8!(6 za1hMpV0wx+xEI1kyD#4Ccof>%j{AeqDJf7q#}nJyaWJ#e2bEu{O^FQ(UHg zsM>Q7W*qkhaOo#=EFdoIRxbwK^3ka1n%b5nhc?E~OYaE6nKYGPLs% z4N!CzgCbN7unV+~8LX)HuL4;}+A%^|0n}u`H9!mXYK`TlA*ncRS((9EX!Kegi`EJl z?TRD142^JG2C7*sz6q737vBPgL!m8tXu;5?vo3}*JGp^Ct)mpcA)2v^nLWc=0dkT$ ztjg$(WXe-GL`zeW^8noIYtfTub#&K^GB=+e%-hDe`)Waojd491SmV8gU`~`Bl~gO) ztN=0tIY+N6H#m?^_9}tZlkrp_3I@Uz$zi>ye-|f&p@1iKK??E`cL7xaWHQQ|A#t2Y za!V7+z7lGSt2mh*SO)64S))hk-GFNWv#wkVU-gjAyEc);Imrh5h-V9ky9tuoTUpNl zDTQSz7bOE}-q7se>LzGPC!>jB5kyusOtQhrb^x=au!A=;xYL0;;{;bX1ftNkhkEP9 z?UJTf2uQOZ)J)S2rm_wOL7Wghq%4HJ09kdg7dj@AOQA48*gl1N0Wohn9&PQ;ZsZyP zMEa0pHO5gWN4(PS?;Zy5=th&-dWINgre0MX-wm!>n+-cEx%13Tj3@~%EjnVkW3 zTWZ;WqJeTM93OSpF`1yo!VcB5Kvb!29EuktoP#!-G?dOB4mod66lFQqE7lFPMe2d- z7EBvRr_@HMiY_}rJe?JOK#L;qh>~Z!}LswuSMJaX}8xwkhWbNSQt7bXv?hiLaz0GX%ExN0XzWdblM^}^mF0B4L%*sQCl5Q`JG8x`lP znU{b$H)}XI$OtLXO{gxk3$%qGKBj6tvMhPR0A?Pc78-`sli7NluDR80Ns;B&0D@-F zp;8nIgn6TWy-RJl1;{DzN^)<157)t z@=Uwbv;hShP8b)GFzGA3G6F{SnH*7tj6ykM>WU=6he5K&@7J?XVZcX#;h`Q5L%Be> zcq`+6fMd|a*@L z&TY)xoJo;%40ddZbCeVIu49oJ2Tnq|IE~EP2{{Ym%=C3^lSu`34$QJk&nVmTJb=r? zxhcA?JhTOZ7fQIMC^qe> z+yo|LI^aX%p}oK%I-Ynkrx?WL*r4%SDWtR8!aE2srwqXPp~ovcW?v3S>)>KFtQXv_ z+j#0(7T=*@xHmsF&x@uAK zAS)wAv>3<*Xi%$J0%;jQ9OK0+RUl4@gE}n1$52a?x0PI4C$U%tLYW41*F-IDa{=YA zbnE?N&DuuviQU`uhDZ}`1hjgM-g;|+Agip?>&Ws(C^JFxAzq8!1V&!dnH~pFUq;u} z4&|K3GjPa4XF_b-p-DipjA8B37JC6KP21zmB$T1G7qGHH`Zh zJy_(2aeiz0^Z2#3P)f9k>h}ECXGhPHUzv2~X zogN2XNqto3gWSq)52$nv&jxF6i;XelPS)M^u2TxCXcwXI77Uc5+v=6j&)3 zIrZ&nWe~2GRdJD%5fX^=-p?sSlo?tE$a!Zc2Xa3j+PqxV9$)EL08nUdTC-F4wh+vD z)6^eH$Fmz1pst(l*4;a_^%8bfrr;dsV#uIJKo)^H&HmOww8G*<*VFx@YM@~jI|ySB zQ3Z)G)k>I_`X^d;-AClzX4Jp1h z4Ir9Qd^+aKxp-~qKmh@pDG2yu1ltGzpSdodK${?Fm!z(-*Jdblfk_!Hi-}?FfG+X2 z*xoy#oskWN%TquoBy$7k9^6jue!4^$)(=1mddQ7Z45ptud)nGaa&H1?yJ{u51YlRl zoEipjC2pgmlSzfe0w$fw#U+xz(c}g7ubh+8VX%S>-Wok00VOxoK?{D4nxfZJH+Kvw zYoA1mU}(^E{7<5Z^NrEeJ(;{}_YzlXY4_w<@+xx+?H;Ed#zA2hI*1+r5jkyyJg_Gq z?qoOvViwa8Ql1HDGm!;P>FmkmfjvDi^=zV5(P$iK7dG9;MBdMXx~AAT80YTtAgcTL z9M}{@)}nOtv`Tvs$O&Wn$+EqaqQEYWuN_=Yp_r1Tnc(RPkW;VLwgL`XtKzRZE);?; zaHZ0&8KB%8Wl;N?25@CGwqVadk+&UaY?KUy^VSxZfbf0e7Nj9;CkfvKvS=`<0hHku zwiarUX~f60od*6D10tdQyXk{VaZ4a~#~Y-|pd1Co(e2=5v>c4;uoXo=G!NQ*T-%K% zNAt`FF`wAotP2ztBro(e@v!IUP&UWyi~7 zY#Q#5^9wv#rR#l9V!hj>LTc1sVmd7Fa zIcPJ98&Tt4qVpiMD-KL3k$n1{9o(_Eo&qtqv^7q)BKig_Q@OgI+RrpyazvW)p>i&p zVyGhO3Y3%O6~@@3Tm>=1<1KTj834x_J9y7cOs8;cM~vrwWsY=(nOUk9^7QdfQ> zg<sg8001jPG} z=A{Uj$kY{dH{vlFMw@E_LV_D*+O(vfneXu?w2eTnDjjp&1ZA<%4k58+GYDMeBo%SB zL%6W&=uq=A+B*RmkFO@z-M4}>v{RSX>);$PBd&f3XSb>AF_}<<00j}#uGOeY_a>07 z0$c+a7fNz~)<-*a#Dd?xQ9zfit%HQf`Qe1yqOMD06W6u086ICmmeV+*mUZ79!oui!}r7(x84} z4OZ;=1hK6KPvj~OWWKNt9!M~f9!3qM6J~OS@S(HtGpD98Wgz|Y3f9t4G0={lw|_h0BW5tX4Ajk|j>+wE zsKT&y7tff*ZUVao*c{}chB!4gKOtNh5Z`290Aju{c9(s=5P+De1U0h?D8vABj-M(a zoE7bT(xYn8ib-gro~YlYvyZ46&`iYC9WQ}&mDjcFKoLc#0umQ7JdvlR_D|m(55O#g zbPHrKsBJcqh+05cBMUMb+V#+mR6ARvur~m?$ZK>WhfFttpl4dQlSm70lGIzMWMCtd zv)C9n#6V`X-Pfn*>}0v&Z8Nwz#LHN+O4|XPthSQHWIKUilXv5FrvTWtYAd}dzynNg zv)>ez17y7spE|g;QD=}JoQoP#zx6o8UU0ImR*eyygR+pcP;N{b|EehlGB}ccr9C4; zqtIpwr#*RZ7!3oW*3FCv$|ZSUyIq5?qtI3&?AT*Fo=m%ADGIhY_N&5$`vd9(-vN#DrC9ZF-Mw{TK z+L*-h%q}FXV4~lTcAtlKyt=qkMo1pS71+eMgse!Z_%{rG;T|dHtj^onE`U4>zXl8 zpwJr28_*hdHa4O$aJonu4r`$$bVcTHZj0;}n_$5jwv|G-uuM^_(Ubu=fm)bXZok}? zMULmCpVl#HJs*nVa6%zY3oL+iUVFA~^@7sE1hGY%1!j0#0c2KfWgalj!^y0vbX+|8 z7Uw7yL0YIW*{~7{O>1$Akdu+jqSFy90m}G64y#P*k|C(2P?mZg=b}QECBN@NMrxs) zVL#|f?&=f7T%c!s8-UE86}vigl)MSRTA;cORoaq%uh|oiUTp**LtKg`6}Sn?iBxZ6 z!-R6R87MP8;(f%iTH67MRhn4*=*&nq&PuCxLz-)K@u&}=1gzJ-kjv_t7IXxp+aZ@4 zW!s$N^+Blbs1020;)9+K`fw6b%v6ND7k~qu?$c z(PJXQ!@~(;_|zNw{v!!$-JKl|Jqko?)!7u3Mf#milesFga~w#C58^(FJ8Pkxd-^*O z=P?j#!&~dzkx}FP02|p@6$f9Z9SieR7$CUKofT|NrxPLO3IE=$$Ji!uyIS-(TDDd5 zETGHh`^WHi4#;)ExPb)}ZeW&Kd_D#5WOyz4hZp%2JU%%iXbQ-M;rwx&gSiL-E11Zl zrY}KQy4Goa`7)$4%>l1$@9dR?VP~o!q<+=Gbc#cRf@_djZ>X0C*aJewrvYgVC|~d8 zn1P1HJpY9r!R>M~DX8rM=#vyj8>;{e38Wft0$OZ1IVi~t564KVwGF@6?tylx72M82 zU%VqON_81vRvNu`5e?#UFt^biyn~01NdZ^TbuosIqaHw5v?o3`umHjwjC1Lztc3~L z)8mKwDiRW}L&a*U1fjGWwBvFSZkM8k!78O#oV;rSPD{H>K%A|0@qOs36r^?+b``oM z1<}D44eXbJWNm?)cbL3I#Q>q6bq7s7PTN9=4Nu1)f8@O37Rq)xroH3rUK1#Zqx(=p zq}2jV-s3oCqundku0y%tHYc>fQS*SC6T);-95%Hlq?#k&y0)aDS&H=UDQY-Nl6v z9|vU=_4vT3w@3dZ`q zk91D~SxI35#CK9-iWh;i8rjvOYtmHMOMuQss~5~K10Zhe^y8Hjk&Yqhvbk8bSAi`c z{nDM+Ak6`0A9;zMY)%6eCf*7&TTz9~@Pq4)hM+=HBiB=Sf_K4g)DMtTXl#8L#goVZ zt(AoBg-DuQ5F#lCGlO~RJziN_3L=v*&f=9pm?hb+NS}@^2X^VDjj5G+DH<#Q1}GRh zAc*YxCT3gg+>W#_OaTWxpH%@sj(LSDs^m&Y%L-MXcL!n(ECRJ;aqYIQKgssyE(SMO z+V`-2A1B|Jfc;7Pp^3xSssv)w=(vRpfu(?My(_T<+Xy>Rl~u=%nVvmfu^0h$Ml`o3Q5Xd= zYvY~jhaoAp)?A3>2o&w6SK@yZH<@FW4_Q72k?Bg^-J#LX@dPpT?a}E;Am=L{rik}Z zjDa}^dO<kSHFvaz5#X~IX)EU4{m@^U);X>XDFNW^V;lEhtOS!w_~}6yv5L&gfZ6;P0vyw za~Vy^l(#$hGEnDl<+?hCQ;A%^q6S?9>4s8`LBEIAOqu7~&u;A(rZFvmgdrIDH4M5I znriFd(N;!N_R4STG!;|{>0Gfc;CVO?`G>g5)xwpk15a!Zb44?zU=_7wMsqdFQO&2Z*?;#^r1i1Uh$;I@94 z+5+UfYC{dOw-Lb7rtNO*cO&9WDW>(@8mxQcHiJkH#A}AzQ;=E`Vs=jk512XF0+YI- z%yVY?*k%?VV(kUAa#H6U*+8Wdc|Si;c0ZtqW`WG)NxL{ScP#UJ@4tyt+ssKe2WZA< zn%e@7rU7-iX&abcFlPiPYrwJ3A5C;kysP{$v~!Hu>*8-24@Z(G7BI-@(d5ai{d71e z9zaTt2_P%?IF$Lzwk@@elL@G25)~g~_Af~x^f(mx(mBlm4RKCGTMnAmt;NPkp<8#y ziM;E1gWDe-hcO%knBx03RTt7Ak?94&yfo)gl18#cG|fT8x{vdf$M$ z#U&7CbzNM7txEKkxYvFuw3F^u(#s$najirK%>zKhT#bgl!R@+T$IA#ZARChRMkY9r zh9&@sGbilgHN!2?uCg@}*0@t^BN$P(sV~`to8)^m*+}rs{t07`8C~3#-gv8iR~!a( z0y+yi2c?d;J9%qnKLYatYLmSQWP<^hPegs&59qQr`G~+^ij0-UI0)ODyl^l}^WixI zTQ#vBEv(@bMlC`a7y*IZtqFX>4}bFJKm5&NGpDIC3^Y@$PoDfpaOMRXZ6*oKHSj1nU^k2IxGH z<7S05+nPHCR_N2RC)45PMNqSa8E9If)I1Sf0?x{*nIn+PAQ+ns^w3vuT2iu3Kzj_Y z0=je$=s@qa6hRzn!p7-@P%k^h`xy}Ds4#D;(p?923u2lBk-d?8_4@kmC>J+DVfz*g zVee}VI}5Da#y+z+PgxA)Labp?K-F6c;>vmeeL|_q0L-Xb_9?L_gQxNo6NY-nR^@aa zkaN0?xY1!gsEZuCq6LtSoY@1}aSKyKjW9PJi>ycxYnndbPzeNM8|i%(;Y4)$ z;}QAA5Qtriw(~5(?WlD@D*3Jgu%NWDTM`Lpyp;SbXDtsMwG04#iV$nNRV|d6p>v;F z2&xAmwXSV(pB;qjl))$FJhn611<Bl+1j#;6;2vdH;B2~+>dF~3z_A?BOB}&AWI09hGY#U8pE5S z?uB-oP27qT-zo!f-ZbgY4NRmw3;>lcT7eHq&j^U0# z7CNcIUVefe1$J^a#U<9qz+4Uv_pwh9=s1w`sYOurRVP6l{c>F}IF>vx=;&0vkAskD zUNqL4&1ooSu~9cxXf7T#187;r2xThSYZMcpmLQ!2qa>368RN5EfM-F78@&^i43x97 zQ%6D0C+}#~cr{Jl-Z3K9vOj4G%5}1m$yW3Vv{|4t86CUSiY|d6a9(n_oC37R0o9ch zAl?WO8~If*=dn#M@Q~|kAmoumSJNz}Qyg^_N+=U}oNWnI4I%UXOZ!GOOtx;|pK)2~l696B-Q;q5bVxo=s5B zbKM}4CJ!w@WUeNv(Txx;dG_prI`2(@NR{3fiuMO2IbwE9DykjI#ldid?zCqryH6v* zZb&D{)T(EmdVyTZmFQ(}PwEGAx;(+6Gv&7kAba81GPBxfBbtcRT$2 z>2UJ2Qu8$^2mm)LZU4u5=+e>hQBW%Y?9IZvEvN}#b9xo}=?GL-uAXRfK)Y#Gw^6QR z{+an?_r1rVU8tRm_kF|3Nidha1>ugmbqpvg=Z>_Ca~zPUQ3UJ^K|v%xTz>~s<7THb zfG!CqY2*1+$>anm9O_hOFqwkILrY$-I}1qVa!E73&zTUv&yIN5e&`xZv#-Y38 zX8O<@^U7cuh806(A-LxVbph>2=}Oz$#NV>yrM5@Ag~}bC?Q$rxc@UO}cCC2KPkwjC zg_{M?=#zFD&_bN%Z7_rFnZmOn zjP;l96rKyH+1-GV0piR&tQE?BNLQbZ3ZZOJAj!vl>WB71x(#V9LL2U6s^&ml7tPEB z)A3QxVF%>asc5B*KvFOoCTiWbP;5>$pmq52p$Vo*T3NZS~(^82y$ z`)UoAk0-KE1w07}!`Q4#wqU>*v>C>+%y#H>zuprWp55&t7AjuaK z8fXUGu8QV(Anz=+`B1~u5@&@XAI^am_{Va&6#w}IbqwkpB)Q51nOmM!(O7E=%mrn! zT0O)?0Qj{nZUDcOB5C7A<~aRu8ORypVF4I(CDClEguV*xqVHy1OoDd}gczwei0Sk* zCe==9^qGX{B!&#Z>mbe(*TbkRHvrt69$?2TtJMvVleZC z*NZ$LC*K?t^Pv&hxJ#O-7eKn4>w4JHuF;1B zFkQ%&If_~VP28M_C3lq&?zdPQ--EPT$1idi#IEj9RmWmzXL#*EzuvNBC<5e!v{s{6 zOR7MKnC;pK$kOyP?-IlvP0K*cJ|0OQVwV;}B3ImclV`!P}&lopAb{Jx5`TleQcCqId9-T?Ra&P&b z0C0uThaqy4$pedX+FEfo0lhsPnzc9w;9Aq6((_QxVYe)fd?K;9Q528}op+yJgm6;Z zTGc0Bg0{%5vweLzd8oxYKubWw_v!~o=W6oIbq}hiYXBtNo4u1fZBlJw{W|mmq|EKDGyH~m}O{1bK+sz8^DOaB#vxVY;nP-*%dgFOmI8Hjaj3v*b7 zWu9_SOWM}1LFTZfBOAoa>uL6ML zU^k;Aa9iwG=oT)u%SFk5jiwyY5CGPN+7E$&sRq5ok<_RaRHaCmz}wcZ$8MJbQSdHH z;bt26^5 zhEE$orKxtxX4_<9TW2fQ{bmTKxTcvNupQD>vjPd}gfh$bzz4l>iIM;_osg!9d@Xmf$kW|krYX-jMJl#Rt5~z*xQaZ0h09wnqn&~>l_D`EXd|Mnc}D( zKw8Hj-3m6;>-LdxXvf*VyQ4!h6{kTmn^WC8COy@12GH4Ma;3L2{E%p|e+;$jnp<-5KZ9E<(Eu z-CPhWIlGh)F35=b^)g5%R~28cqcIgJUIlaB7&5@IYtRyUALyC(uhLGj zie{jZOsrX|<2r<8OOs@<;|74)!My!8E?7df0A=hzS^^cMPCbZpEa@Cgoyb ziitJjxGsUeiWE>gGx1dkK>m7o2w9w7l;T+)I~}ydV*B8U>wDFtS0MRw|SGniRe$Icd{t{uR+vQj0I&g6$_QC(Bi4dC)fQS9j9 zx&lJ>CX@q&oHu0K0Tta3YN=%dc07DF2;vmjtIDkG8gBBo{k8Xb(l6C~ZN%=N?mQdoHM+zDz zN|f_sKu+&n(V{&LZQk>WZFYy~Nid5sk2s-rART#6daDwCjRT{oHmDS*aZ@Igw5T&s z&K?8tK4O>vaH36d#&r_fh1{U+vy|v82$8L=S&c=Keig@Lv7HBTgxtKQK0goOTq9DP z8d9oLV3w(k4GdwRkP)9HxnyEPoZz~g{H>Ky0_CW+en7FV0yuAR0iQN;4aDqNsSbJ? zGD~|vTb{H4IRoZ8(jAlBNrgTE%Ib(R5b+HFN6dEJEgC@H1X1gX-BRc;@QAah82H7| zS;ltlQRi0*;B3_C8t*bl>yC9yu4?2@KFR@^vshk-d4ki)b#eqpJ5lF@xE6Rb7KSZ= zhUag*`;F)iyX$52SEP_SlN_fuKuEfg(VOGB{+qg=}sFq9Ad091d(UNv_+pe6|_Vd1AEyy9m7j;&Nco zaGyh_oqxN8WH(DNaIVrD6^R^^k%Tw!u*JaX{*UCy`MOcDHtt z*E6`yxf&*Yx&I91kf2rUZi!P4lR%bz)Q9fFJPSY(+0sv?oP%%%xFUfPorflStsGiK z0`gFv`l7dFnJL~UM=qx9i@ILU%=<+sSC_6%^J3kl%FxgtnG;u{{_8z5?jT zVQnWel5KIn3SJPkXn3MEl2=S^Sg>$(%c}d*>&YwDh4$y` z#KH~0EGZd%(tJ08U_iVc9fM?`kjM0>a~w|>gJ)ICqI32fZYiMozsqf0r~=D4 zn5))SX%E_GNwqFY9vt9pl#8T^65RUuQ-_%2zRqHtza%-&*8?b!p{ z8zHSx+R&6T#y6#4zMbDIC7VH=AX`qldo{vD`2d7=YIqpZjPq>b2(*|R>s!z_P-a21_V5w!QU5L-NGXp&k(qw6_&9F*hx&DB z8JvG29_rl2fSk~B-|#XH;0zVEGDJZ?4eWf^#!136iC!1)7@L6h7Cq}YY&xt&lAq)U zGQORGKh#+$Cs*&z^<0YQv7WdBV-NRaf2bY+>*?xltTN)TJC z=v!$dms22Kazx=kQsLZdzzp+M2v|jCQ7^bHF>Bc=O%v6tIMepc%s7$HBp+)!cI&*T zTF`Ys3&gD%33Li55%1Ql^vzp7uv)w0rX`fhqlL_`E?lWr43xF22de4OOQFp#nv^W5 zG62^+LtY}nJOL?)xfX)r*1~y!nV8e3$K$s16BG}VtA#EAGH=(2_X{E2h`obGUMj#S z1v50L65Q}j^Q>qR2$>xXXb_yvwB}Y&AJC3x8@7)23S*^!5zngHI+REHU9&+R=r03v zL2KBwMI^NVE@<3BMM3L9%;x)bVI@-p(p3$hWU3+Rza|J5TQ4lc*4hGO_7tvQB&v;o zPOZ8v4l6bpVDDe{qV8q@C%=Uw`{G192&F`Cc}H<4G=fcksM+FfC|Idcsj8_L(g|=e zkM@G}12|_7Xl^L3h|i(mwi= zz^-|ukxYz1QyFxt2EeLwMQL zmjK)_1_u%R%aBr`7?W3Uy2LH)Nkn}>yApS^gC}13fF1$NgeOxG(<#{29%W(%+BsaV z<9q0k1aR$ucFf!Wu)s)9MYHy%BZ=pll*_QYP8wFt;^%c=`E8gfV?7IrZ>qC^#;=9i>+-W7X^suWLkv=TIP z0%QqgC4{j8O-wvm25d&_`Dy&s0+>C`*|TT$$qRR3XqB}AAd_m|Ou#CErcJWJKwHUL z(ytrhh;JjbBjnn-xN&n6h}pH4ywI}6!_AI_IRnk;v_rw*_y##-CnSZfZmFw9l|VQ` zjEh)^UJx?GfYY}S_9q_$J8jhtg1DGm6)1hX*Fhu`u^r`75Dj4@al;_43wFQqk~rBI z0WuF|=4&c0Uj8uZxMbx=#dSEv#g&rrMC%a{s}UV=!)@NI>&OmsoChK&tP_&$eER*-j%~cROkt;h zoEIK+<%xpGsf&P4w43R6^a`}Awj}Q%YRC z5TD&FPEcmH9W#VP zy1=0#Kcw&%LKmbGHqe}+1zG`2g>Yb+>{dchc`Om5QE+Ecv9k}&8Jpi?Ko=LwKxPsN zEdjICYBAJZFqR4^@wUP{vQ)VMlJOa2GglsZVLXdU{ z%LR+NzHF=$NR5w0Xr#Xz%0*>1fRinf7OWId$4W!rDwg!8i25*6V$eZHw1G)dQrZhe zvW@X&w;Y7CUgv3~VE{)cp^+{f0dadqb7Q4Ix@@d2Z|CSDUFcyjqTJQ0>t~KYm}PAD zE}Zl`3W!{8s6&h3q@oA3o9Q@iN5*6z*SBdx?_|O_DV`lx9Ro9W=wYy;@iy*gIPr!S zNkIsq*-`5844C=Fp(&Q>-Q`Urv}solR}R<^oCI|dYnrI^v&mCyduwlDKf<|$Vua9G zy}j~0DB?4~>8ZH6GY_3rGgYKH+$j*4vlWfXXa~ZrtA&2z5|oRt69ZOams14Tzj6G3 z1;h;XBO*~*T?KT-F`29m{~7>n?NZa9#%*>n!^SiNwK$Xfx3Rqq;JW=+H&P`xaFfNZ zI2O7I;Y@K~;x_jmVNW4K-x*K%Vj$A1)?Uj}+|CSZ&C)7m00m)XF*iGAU+y4{tr%qU zk`E06;t?9c%?CC2^nR(V?gbz&N=<7oPZA(t3xS;4D!uuEjgtIu110tnt)&vkt-5bO zXC)RvQ$lV)AeF@?*xUrMBn9S-Kc+E(tNc3;ox|6q5YAE!{gXm01999d*`|XANpuVA z3uq8%*B7?KmJaG2aclr|rn$gT+q{|p953rcT;GBkNrn@Of!Oo zn9C*sb6&W|jX2t&Ey<0`tFcPb&&ykNu~;{BmNkTCm);;nhXBGGDL2#y1WBS^rE~_N z%>BOZE>DB)1b<>?1wlh%NzgLWPxWxON;69{9d$^a&)K*9IVW>0>F- zawhk5Ll%r22Xu8bv)ZP1d)m>cBb23b2C~3u-RCj^ZL7Bv%^4@*V=*UFuytLirn8XF zbUTBS*f`FmklbuvxKN6Gp9i;yY~{8Fn3D%^g(H{QR_rM-6lwKpbV>5RlBq$u%}W3j zysx7p=Hzk;j&fBWN*Y(dT#AN0dbr~%w2Mt=&R%QuNJ7@{^i2WNDW2@EK(aQIFs&r| zQSa+u$dR^@qf#JT7}hz|kZ%GYSX&Vq@(~^@s*sPxBtVDdQ_x2Z3HaQ$#q9dE^; zD;5G}TA4vIvxdlb1z6TYNH!`VT{;#&wFhBQ0=QDHgPv_M0P2yqw@@*-NwBp~n-{7e zJk7@3RAHC4%-f~>;AF7~Fo|S1Edz5w<0dV4Ikk?iAx?YLr%>?(dE`WcJ#eO**|8=F z1m^Ba6bw%1x&iB(=r<;y&h33u0;C+#QkxC1-WX4hpkM&ZWi}hfY17Vxv}YR=xn)UC8`p zyEG$e7jksfE~@^TU5MwjU2mk}+NJplyB5(_?V>BVX;%vk^u4j<%`{EB=zvP?s-^+j zbq}r1t{QrDyB5>H>_VY0u$U?zFmJwr)t;h5p}y(BI9;(oUzlc zyXiUYq80VpMaSQ7*PH1l?RpCWYu6)miFPePLhZt`9I@-q5KOz?MyF@jeMqBSRmh-S ze}M$r^#BrR*Mmr)U339scI`z1?RqN`XxD3yK)dcm0&zvo976^R?oS|v_Q8SBa|QIn z$f41sG*uZA#} z?+)BA{$>c}Uk~A>7eaW+w?lZ@4?>vty%6qvE`*mq8^SAo7Q+0Wgz(B=h4Alw5yGo} z7s7(ygmBmMA-wtzyvbShg76Ue+_LPB&^A*VIx6Re_S_v|+oic->&)EH)ms)i7ng)PYY3Lh!Y1lRW(lAh29{L-~!|tgUhezj? zhdom-4};gs!v@lL?E1??5C6CEz5jhh;b>XNRp(}fnHPmz&HNCSpADg|K7`uMA*?(+ zC#)DQ3acvTaJ@xQSY1^T?wctIjlCtIp|&Kfohc4$^2On=;cL^P+d}j47lw7^w}tiL zg`uVCg<-=~X?URgMd88v(y(!&Gz`Bugm*114LQm`^obDug*0DJKK6Ib3j6A2g`v01 z3OT-mJ7$N+*Ut_^Z=+s1W`(=S;|Ge0!=WF{4kN#s9iDhW2!BV|w}X%3uIB%tH_Z-D z;?7?RVJ&g)=KInsLvB61$#oFd?Zo+x5ca=ncF32{3KuBXzPs_?KRcWlniIZ;-`q3g zf$v+mhxqL&>h!y~^VepF3!7(!cN5nb_14P&ZG1cUeki|P#B;eg?0a!h_%U=J_|N(O zd%l0=`!(O+!lV50;&1_f`O>2BW#~_m$HxitlkkRcW$)|5PetdD{pe6lEf zj{m3la?cjQ8`3y8EBs4s`>ZFxN){0Bjcv1rUR)CP{iZ0)lznK{q4n_fXGLKyza9KZ z5gCNOoigtU;r--ki2u2d6oq_MQMfR#D2#r+C>#L)8hJVKvZC;fBX@>>I5sbQbNJ41 zuIbM3t?4_%KjvN%4wBxJ`1=lW^iNk_PS}@+mGJz#lMBOtJNAn3y?jNOy7ap6{c9EB znfr@EBfp(`t|WYkaQ|bnB0PKk_2JURKMOy)@%r$3C}7Xgwx36 zXP`gFccOYucqd`j@;yPCEBU{H@99V8gopV559Bd_1G!i@D;+J zLQcNU|7YGYCmbZ+$3GIn*sI9Xp%A_WzV9>C%@0C&x~L?4ANomTbKl2`!gq+{LfLJ| z$ZcT=?BIc-F!I%+&_kMEgAZrP>nO6(Or9R(`xaR4cgP3+_6-$<{hufz4eFOTAOB2I zIQX8xr3fJ$B468xL$W^#w(s^3p2g1-t3o*a3)VgOZ5HvJxEr~79r3L%3hxIy^%msi zB=HgM$M`=8CRzL$?!S3KQTPr2^Vj&ja#nbP-%e2O;oIOl?qBlTDgKXrF@#Sl9O-_J z|M{EvKU);Og!^m456lj)CGDY~%?`g;8o!tw{+oFD)a>vrexFU5pN9Uf{CFU|x$6zPWNx$d+ulasXJg1;@@L}jxAv}Y7 z7Py7y=JUu-W!mkPU%nN3RL-1kv9i&e?9Hze1XXRcXT^GXd z@spn_33>GFzv6!K5dIqGgbT&UdoTa{`M<0reEgodAwO0UE{vCi$EnMM>*pfxbHhvE z)xM&#upc@{-3_5T{+4__K^>3&Duk1`e@GcVNWMmP%ngSKJD)KBsBo_f;UU!n{OaKU z+`G^-e0TBP$@hA`e`IS=nMHj)QIeSswljMxK2Dz{;H@K`q?}6 z8~nJicNVgAd$@<+w-a{k+jGOgw}tRO;r*dc%?)MXPrev=%KmzD-!-T?_qAzJt`$2LA6S&3E$qiQCA_AE=YD+2JAZ z`@u$6QP01jeL)|>-)b<)!EWdtzWg|Hc0PnFl<8ns2tVcj&{t`L@a_K+_T{3mfiO=# zWqJB2Vcr8hLOwo;`{PgY&?WRQ@T<0e7562+|IYV+^6jr9F5(#C%RN;T-by_^{s>nf z^V=|a`{2t$_yFZN1g}T%`vm##;kP{BC~2HRXMcKCQP@r1Pm`w;57IY$2ASj=(nncx zd`~}86yArQ&nn&z!AH`0C*KM9y@CI0`5xjM+JS$*XKpJBCrR^Ou=i7r0m6-u-V?YF z^3kS4{xtcfE$8_5UziCetgyc^ zgsbu6PX-=A0t0;9%W+>`J?W&AaM4uYNd9(4`g!HO3kA-{<7gxkV}O}B+#lh*&kmp^(3{T$)*$nsMB zU^iSi#{Y|R!-YX(7pm~{GwPjhKi@vS9};$$|FQwzPW*?Sq7Qi%{(ZY7ycv2mUyagT zQWS3BH}@R;`b7v0(wEPZ_Y2T}5D!bj7W}=N?+|6oR~Cgn+}rtnOa2x9SNzYt2frQ2 zO%Zu$#f|LcR)z2e{4Ii4U&W7XlfS}U%hwG4UcRqj7i}cXeXo*@LjV1W5_BQ@40$?u zH~R3&lJGn7_kk{C>Df8l$&3vDt|Y7@%oFt8qtBuTiSLj2c`fn%|NMXQQ^ZqEJbY@$ z-yn^B#Pc2gf1mHfN3ahLlmyuX`NP!vmD|EIzaa1M`9BhGJKw{6y?n)dPj!@p9}zA; zF*}T{F9|=z{V4b^`Ts}0+34us^1lS^#M6uq_&@r(IpKEwZzNvwwQmPJ!4BA0gna^^ zJ`Q$}|6kDm7h@khT@*fxJKscpuRZqkF9nPG|0lEw!j15i>;LWWlX^ad%zu!$kAU3= zKTo`_C^Yf^^n=t3xZ2TI`2Y0Fio&PB-#~nigZ=OLe_4!P4pbt-#i_{KNI$YxbtV}*Qc;QzY4z& zmxOEJL)3qM{;R@;g|mXjP1nILOwjj_&j}wV%>OMvcZcv<+$Z$?3jfRSk1g~C=&$e% zy#V>=f5fL|g)wx%zgIp;L;5tonRMIdg#VAY{*3QC_=aC{we=| zquJh-h~Iw;tv+=wW%?uT z-0jrY^C8Se51)AUu5e<{tHTNE?q&S`^jq!Ide}?jO^8YFDmy*R7@x7Msm3+A` zQ+~dEuYr%)Y{T&74d9O-qAej4BedZ|zbFddC+-hE58r-Yb1fHUXcs@DP4N5BuV}YVNiWhqH!%)> zRR|U2ckB<$UGV#2+&hV{pYOHMvcKNI{}Z&EKj*hMD~|gKf1U7CMd2^NWIwG2UyA$f z{I3^AxMtind3;3ZP^R?>VUmb>jR}{W0{~Mtj zs88DV(DT$YaXu#A;5WzreCeyh=x@*o*h3evhgK8sA=<#uZF52m?sxMw^KIeVPd(&b z!*7&7$2Wxg&BDb8@-;;NI!KyB&l4Yf|1kIm`98~+dkw!MmnW#VFF=2V?;Cu1jK>Sh zW-*SN9sW1M9%O9&eemz_-6;_pkt{Lpc<#2?t`>;TS7wDD@%soo8EPaieD5OcPr)|x z|H-$Ngk|^}`DRJ@8|V*Uv;30ZK2H7*{;VWyC(lFl-#NY-{O$V#^B>Fy$Og_ahag#a z{3mn5Fky$0#r^pI0Db-kKZgCVhyLyp%=N&#CmzIp*ijOGOWgS!dZuDlIE}rtf;itx zm{*ejKK_4*cyb?tU(|6P*}8D8IQ)??9|k|Msw8}j|FiLX;;SX$Mf`sy+4vW-_agqE ze2Tmmlm2b^fgWou37hW-Im9z}9<}yr?3Jgx@O%&aeFts&J=8I_xZ3{&;l}w6;vS)G zANpcZ_`p|-!V^q=PZHm~s{cXS?^`peW2E?ths>|HSwC+ls;$-cb}LiQ}u!)6YIj ze@`3}Ma+Nk{T<&@(x~C1&4(|1Vs`laq1oYY@uTrTH+T=<%ShuY@%@~yir)?$psigk z319mZV@uL2A}^yb^}^LMWVb9F!mb!u&$tJf_yFI5FJjm89sCG$fJ3xz{4e8c;5)hq zT|s?(n(^6%QDlE?PB_rT+%4Ff_;BX(GqXbp|KGxQgY=&HNC-_`hk8UHu&AEsSkE;c_|5)LS#BF3b*&kb9M@9zk6;5M#{ z$KA~L_Xm)TyO|4k-Q4gH^uE{34F~QH;SA{fgr&SJbgHymG&JxTa|)afJe=&$&GPMhoD|F8M~82^9C|1h(ScZ83OA`^%12p_GbPk|4gnShVqqn-Z>n+g9@c>gi~4?H+G>^~I3r{cMzjpYx$meIK5#-+y zKlsPt)fbVq*Yo?AsM8!}7@|D+O4>8!$X$T`!yU1n_EUek0qWox{{M#mKcJlv{$9eW zo*E{2#{VR7#PQiA_Aldt|3Tc3V+VYc z-~ShB_a0o;buV~+=>=vmUct-aUlw;uZxs>55lJ?AK1>D)3qu=&*AKY9k_Az==n6le6m zZ);ad#CJwIPe18dwYb(jwrM~1l+k12xFC*$WTies*h}sgy{p3ak>tO{96JAPujQJD z^PU@W!p100{q9(K-IJB8{V><`m+a6pu9U8~^{J<%;VjOh$9^~Nj>f6c99pUCqi0=c zd>7eiXI@o?-WJDGeaxr+i*Wio{n`1-#nb(?^Z$eLVIGTNbo$y5aKF$uocYTWJ>~0< z(pxxRpNh-g)tR@HAMdJ@e)yJV~%wGDetu;X2SRh(~( zcw8QDME5rai`Hn)G{`p_ev2`~9?xcbAM@g5moWFqi{vA+H#q<~c*uRkGwVwqgwxIH zi?K)4p+<8qQTZRjKZ zhT8U7et)}VTDTh3tHZ9tR=lxuYMAG@sE_Mu$40o{l*z54PhX)QLg z)zAEhzd8Ri=KfEAK)z@EJ9m26)}3vD!j`+49p;F80Sfa9!uCz<0_h#?`9*7fb!YIijjhVcw(HYFj<9_(2xDARC5@}eNs)Vm zeuqp%%~ePQ2L|4;wHyEN~!LYO@ZL+B}| z(x=%gT+c5Gb3`0H?fzlE`zp8z&66Op>(LIBxzGnPhjRaEI zQ?LAs<2b!1zQN&f__4wl_zFvu-l&|2FY5ocZK40JAdHjG(_f70ueQ>-eur?4(z%Cx z(>%zXzsKn}vcpfwGuNgm>&m=( zb6?#X%8hUf8r`t4G9ltn-$Kv zPm23kbp6w=e?lF7Px`&DlToiJs?pdj($24*!w+lDOZN@88q+ zj?m7i5C3kUHIpHsaPl+iDjCHwmY)1WPH12|M{!*GPImYfzv$Xa-xi1akLJGiL-QJJ z@3z674SB6FJ8a^=KJry^HU0yl`g6K%dPp0vZ{cqH(UYN_-ujm( zL+aa4hGy~&{xxW7elpaNNt203-^~e$cb^RVxa+@`6Ka+FxH{gPXTQKVa>6h9m!ab8 zo~vi@JNgmw*Pdb78_LQ=ZDm1D_^EUz$^A>&`g7U*>ew${GY3GvgG1Z1!Y|o&CGTX1 zbS#9Ql_~%2-v3w4yZvNRINkh${_TZuXsnz5kgy_eH-o zeBil?GfMB5qOiZu4h?^irYJ4%W{1D|)NjA*oBU>eE_|8aSMU$YVbfRDH_!8{+%F*E z_o<)C`Q$sE*+I;p|9eDh=-(li_{De5XBAyDZ)l8?_FKDn+mH^=gfexvykAbZC4AAd&x9`WE=pZjqaTl< zR+#uu?q3hfj{Wfq*;VdaJ<9dp92(N!_(HfZeA*0f!?iDDCx4%x-T0N~vb(&qhx|L< zHQ)Kd7qUCGS*@x@`^(Q}r~mS7b|yTPy*u}O)?0u2f3vQAJuCe5iD}u7gwL7E_C;T$ zYn}}Q$Uzu_VHl2)7>zA;t2{*WbI<-=*!qX{Vfz<<5sKbQhmt4Gsw4j?y!rh}S#MnX za@LM#UeEf6KMcy+d95t#Cx8C?tX&`9%KFFe?8>UUFeKDxy&8`0Txkw=WoVeaG9>q| z4r9c9?FU(59614#Fa?F`Q8b?v)z9hNvv9a9JN(Lgc{d~E4wf(lwTPZjvV_! zsIcx=*)cp+%^n`Avz`hy_XdSnY*47pdOE}(3<(wEdxc6=&FvMcXZ8v;WQ@&TyR%n_ zZ#SP`sGdKQ6Xv+jJS@N>EWt7~o5yci*E_Us>K#&K8+W^Tg|xMS4C*R-S)c0_j&A53 z5@;~zo8g5 z{u{9w+fcsPGvHrBug;RUvDP5yyAZ!^Z38jXpu#;W$7Y2p^{BcaE7XuN?piXgUhi?u zJ{-UylwxbUcYs1PA2gpHQ@+a7Ep2PtI^}DVx$DT~-RA6%sgGz#s+;ZVDB0w? zBjT$@90~0~f=rT)XrdP@TP4PotN>0s;wTbmK=O77jhFN_=zOww z==dVOq78qdcUU0KMOcDmSdJA~iPh-(dGD~6Tp#IgDAR9dhmG{jNEdz~Y$F@_B~e08 zz2el+`HWanVrK!-9q~VmOdAbl8Nr&p+UH$@Qq~Cm1y6Ay$RyEkB4}K z^7i4O3^~ln>iybIaTULzPyePiLB4B8NMrOYwlJb+d;ZwESozxjT-uVsg-m$Jhw`W(a;8wc@=p=M2Xn8$Ac7GVjNVL4V{CH}hpujXEljVLj8 zFaBkxm+VPs#_xxR&KL0H zuy9e_mvI$0a0^|yi~C4V?GqkGenrDVMCsw|@QAzkgWOQUILn#t{cZC8(3@TzKiTQ}L4F&8VQ73)d$hAx7*0=VCtA5j(nlk6PkY4Ot{rJ3$M746 z37CW_n2s6fD#{PD$PVMeIb`R@xi-akZuyVLFZ1|S+|d@Hs-LzgPaB0;A8nTQFs>b3 zAj~54)W4^4!V>y2EXN8o57ZV8l}2r63T^j{3(R4p$;?gTkGa~?h1%4m+E%iGJ2_sP zizbxX%dk>BtFadAQT}G{P`0yo*hsIof3b>tGkqK4;;rS5Z83fzOZX)^j4#MtXuRGl z>>>9dc}Ke1rOUHCzBB-x0j3+jJaUT6|$ z3K^V4`Qf}!mdp#M=+*D%g{lX6q1eXnl4teFBh-z_>d1I?AkD@@`}b~sO7M5A@~ z%j8ugJB+vW;i(Ndp_BUt(&N>|0%L!5vKH>L1wKaLl{`8tSzT?dAcJ?9WPj z3&hbE1270fFbu=dRq=(8wsEZE?W{18yYpH7Z9nCAjq;1p{7UcTgfZke6tSl#k>%=J z+4Y<-g5R;^<%zvuaVxnGETcD3>uTPUyS_?ELS!gW6Fe9CX03^VAnFbDIn0E@5$ z%di|P(DkEXA^p*Su#&zSYmxBI*OSTp-Y$-)Li9bq4Kd*`5-g1z5f^c zhY5b)h7#;RIlr=n{lhMLwKe-H>+XB#`w+K&U%S7*cho=BkkMZAXkBV+KD*TKUC;Ch zhsaVKLFd$dp_=UY&XXZd7Mm+7Y4a}VO{fr0rFg2|?;om79@kipj~#Ol#P9VFMb<4+ z!ewwACvggAaURVkT3gVH6xz^^G&1)FggW~U>Sy*3N9Xnr39?~n|B#fwMl_*fzWXnB zf8nYRy1)Fz=(S|Ly??mq9+z>x5oOjQ#@bKww+8P8lTslaQ`#z zPbTE8flMadU))V(BCmhAFP?{Zgq#=Ep?<=kH$Bxy+*$oYU-|%K*7Of)?sood1N(=q zo`3WXd)7f;vG>ip8Y0{faUhkIbA=|IX?Ea1;qN%vb-= zh$c)CN8)C7m`=rNDva&-v7VukyQa11svP_@7 zj9iZL4c4E?m8h1_D#Yf>BjVCni;4^Kf5mTO*?*(-i3R#bvX(neu6FHOtj9)dMhSLc z7n-+gf8W&p9#p^fdjIv_Khhi6AE;Yu{B_;HH6LW<0w^JV7=$&QAsaFXnL zw|_WA9@l0TeWcB%pT|Wc^f}we%eab8H1bR8dvDNFhxK9l*IV>1WY`Spn7#{bN&D)R zzpk(7*B5FBl)v*j zalvHs2Q!Tm3fTYRsMJSRk=4dCH3!AL(>?0-znJBkIhcn9=xP7!wRc#=-R$|acoseV zyfnAbKW|^>s{iNozc_k9|LeB~VUjJzAC<-*@>$y6D=ZPuG87e=Cw zoW7!*E;Rml>C^g8R-*>7RqFpib!0Ca)-^k@3wy8+2XF|bXcneL*w(}9DcaVk|41W) zy4&jiJ@p@n6YBpJwsA5$G}hZQh9lyrMm&n|x-euJS>EAydI}j-jgr0s=^HD3`O-I7 z`Zi^U<9<7dQz-h{k1bj%hxy)5p7l|F=?{M#E|SIH*kn%5*o1x+ow$LDgYthu{XZst z)X-yR#D7QpxFt*%?&3b)kH#P5Bjn7G|6}SuT9LZqxt{S{@2UUYp7TNP|3&p5N0C4S zl4wK|I$r1%dW)yiK7+pG01Uzq6urGEq-%a0hS7&(B+|{B!)UVWDf`sPahQNfn1T-F zXFA#Wb!WuLXzsQUU4@>>WP4`#&BGw|aRJ#;w>g}bm(Jh+I4t6xfh8zzR^H6dFQYHV z3arFxtVP!se;n46i8nWejpSx*LkSvpZel-d3OnejnXiXd?4s{MhMq?I!q<&OUUyby zQ`pD<01lxP<#RWMG8~~-Z+|^hp_(3#pw}X{_w`VNsQpQdfBp0Jr}56_P-I;qA#8Gl zd!N}HQs*{5ZbyF~&C6}pE^jtY*le7zIaFNV94hZ^4prToLp51*dvl0QbdQ49L!4|9 zPYM|v$4Q*RS)51nrPo7Chx;Ifw(IhCQy%xf9x|w#`?_=4uZN>ZpyA%@A^G6-&?rn3 zN~PzbcrN2AI#Hf{J(Rt&IozOE@0I^IUk|tFU5HCpt^CG3gBslB7v=vx`4EjKvce-W z=j-x+PPu$vxkPX7zDO&dnR?}vUpp$UY?A*?^1sRa%cfA1ul#2z|C5z}3~SjlKoYnuu>NXnYs{iBFe>5R&eyMcL zrk*v3sQ)kDo*hPue+vd6c{(SoCZl=OwPY8&X+60So3RZg*ny58Y!1t#`j@MYaPN!! z*c%7P&hKPd(AAz{>nL^jPp^kV{7cbw?e%bkOk^3iki{RcMZcUI;`B!CdooXZLyzV} zTRY5kMmjS3t@JtbAL4A|Ka$^`$REdXI4d{2JJ7Qj8p5wmZbm&Tc2}V z-*Z79bVdKD|E$xG)-UXBe$8CyD)am6%4)K`b6*Pwum32#lfN+>lD1#A{3z_t-yD9v@CV_* z;U9-zJos_g`qKBqw(YNltK#lN@xXPV1Vt7#x9@z_+1($89Vhj7P%G>*d$}%6eu~t)!s_wxd)-o{E z)DH}C>pHcEp9=Ne&xX3|=4x*a4M(${3CXO#2@RWuh9YZ_Y4n_**sJ_#CwtG7zi&CC z{j$A_^Z{tS;JsY&o{o7>NRQ7A8MJxM?Km3qUX$K4SzqS89`s)CdC&CZUhn3$-sa7W zqfm)j#8HFje8nL57=mFKj*%FRF&Kx6fq9`a-yFl_yikprv3Vip`PI(OGrr9W6NH(B zDVUBKn1wl*hvuc)pVzcM$JB4MJy5^VV<#kyKl(UhilZIcA2h7f{%q3zkWH)j)#rvH zYq;s$eqn+57NPN6Zb+8ph9&gWnOuEHZdgWNj!cWV>)m6bdtn8?(tEjKCAk{qdvn8j zawEz*a^Xuik(?PjBrsTbMb zXt!Y~y@vgby20$ikXT@_K7jYTo;w!^ddi5nX>^buQ z71^PaJ3ds}*u62b<`}zouII(}*owsb0pW%)N%w8UEqd`aPUZs;FI^BaS4n1D%`g6Wun9(!f5=VMGfi+c{{p~(J(sD0>sdO%pj-Epm-dSFhz zPFa20JS)FtSdJA~iPh-(&493$T#t>|jBO}ELLa+>jON}O*Et_Z--CUK=HOFkQ~ukP z|MX4$Z->2!emjIx96>eWNT3N7YmCn~MB@PS{HWQl-k_EqN1eLfGru?2m~W=}y%)_d z%yxfus|n2`%uk>dDYT&-X=G4nj2`U;jLxA%>knO?Ny>dQNIzvg;ywHJ=pDbao=Tp@ zd0fP0R7qPkdgkB1r`&vytRzjKMfGcI*E!i5}S`tth&d8>Vn) z4twwI$|%}Uk*9ype{5q`k9}-Y#|rfC6ZP-U_X@|QX{+{r%k|!&5EH~P9mUV8S9PB6 zDE99O_U{?<++^Do_Ai;{&X9GB*}pg{t^^t;vwugif6?T=r3amt6yGe&!90|=>;JKU zUhVfPETS(#yhGZ$jX_X@W&9G-x13yo#@pue$<;_oZ|hF|+iq*N-0P8EXWX&LIAn|d zjjWK4O6jVSu4>fKW2oIA|Lb~%jjq{@Z79JG?7|-GL-PgszaszAm6D#e>+*kF{>f;r zq^_U*50rl-_&3~>f8iR*ChpRDb13YyE$V2r$993|U0x~whpicizgqlN@AnFa=;>G4 z|I%1HUK*pcp6C^h@K4;a*XY&9^8<}bjR*O~kzDBA&(!QF&(fp%$xcRyJfnCLft z85C(F(}mB5SXjPhP}jTtz2tpj4lFi!8g89lFT7C_m@B z8{|V&pHPPpYf(3n>WJ}lG{;!EDlb$l*7x`GyWb+)KC(gLd4A`Yel)C=_N z>+Ykz?zg_m-O`~9KF{_bTW_k{WHbhd+Low|sh`URL1F|OWURh_lrg|$eg8mX0D6V` zS}C1XsJ`yr-O@1F{9nFj-Ou;D++zraVK_!&G{#^YnvJtt`gqR5r(D-I-g8Bd{eMsU zuTS{g{`YPgW~!TW+5cpdcTxOqzWp86V8zvC&*UU>3Z_Ty-#SbC{bxgm^9eJ!JL}9x zk&%5cCvtyXnHtJYp)bH9Z2$1f`d0Ta9$3PiIw5V{Sz#G{IWq4{+c9Y*+k`8IEgX1`f=6=@yhg$z1 zVf`O%?3&27Nt>w295esA#y0|R6p5ME|6jEJKiB$yf%Sji6G+%&a7sL9aUK`Z$S;Y@ z^iKHenYrhD5HP2uc3>(=$dk6 z&qMr-XV>8wUaF&=&(R2wnDIc76C<7((y( zbAIT&rvLiNbD^UsD-7c|93wFr33cdgb7EuY<1hhHy^6};Bzj8yYE?$3(5EBAoxWs_ zKz(Z?XYiYaIVioUER$sq*x&R8^hH>Lstf9;_Mm3)<2Isa{*SxxCC?pE`CBI33KWH0 zV;f^~dhu^^L&>+;4zDj*U|P&pfVE?N_`v^(&*E)ot?p7kd6nJ%6%cuIFFi`A_uxUDxr9?_P+f z^9A3#Agd8a0!>IEgY@@u!*O!UI{gH?t_|<8=}z)Hg`V-pkIn7VqdCBfWaqoil#o{= z9bFCfuadVS9e0t;v-gfXk4E}^@*xssdEpV+K2)Ek@5;QSPa|{wz4C%oAAQ~k{a1Hh z=*v9-gD?c;OZ8tEMz5Z)|Jqj{nu>$*Ngfuvcm6`i-i$?HEIw;E#esMnlTuM z33xpJ?)*}{@eOzLTz2b1{rpn>JlV$GzK$KcNk6}a-FkxEdWIcKCa$ns$s~6p*|dS* z&E8>(c&4N1H{RPHyqgYT_|M?ilg}*k?{BDg=5sRgo#t*QqvtY9n9^A9Fo&Fn@-lnK z$VDjQUo}&{=9=T@UWQukXkEW%p?s1R@?QCh{I3#@{4et#?f+Tsx)oT7)mV%5*ofxc z%AdZl)wnK&wl&&zq>(}09pw*4?({7Ph;ygEWZXWJK3+Y3 zMLnLY{$5ajBYU^Q9F;K9{+|>X?f=P;QCoDJ?Ad?ts`0^u1Flc8GvqCCb>gE*phQL&T#yZcl7w~|fCK8>+YYsom-9o&C%1O|sJ)(S!v1cb<3_U=fy} zajY;{Mo+CVx3H>zSWaJo%wBOXw*Ece`ZqgxCBIVc)#O@~vya!48&O8DdY)ZBv47ai zy$!YQ7e6;3)QpnP!PdXq<^P;;sD57iKUw>aT6!FRZ9kW|eg}3T{X=zv{Cxk-7VjTz z+r9st-oLsP&9m1nR{wDHc2w8Yzi#!9{A~ZtK5;~AcF~^S1N6k~{^1Z={DStzdBn_k zaY;w}2yv4saYuW763SkwaHT{0ha+S);z*$Uf&IUCl&{ENov%WQogZ|+z*%2KeWH*0qKYz`5pZ`NVLOL1)M*Z*C z?X&r&GK~QkgdrG)uD{6+!^x2tjWHO937CYa|D8fk#|(5jmo$s)c)=K$Of1X^^T-8Q zge7RqSO2k$p6a9iPxj3s`U-r$|L=jZ(m?fpi~6t5R2@|R>(z}5>c|y!gN&ciKAkZ? z&`0}ZK76I?S7SZW&b)0To4wl>w7%vUqs=pIN1C3|9@H6I)bCXPx2ylFynnxazW+~{ z;xBqWPiTY1wGAcMfnA8|d6fT-@7g24-RXJnBM;yZN+T9pLnW&bM*=ORHhX-ZHYvuB;(U6hr5+OMuNuHk-&+Ewy3Up~p$ zEBfDw@;_O)E%J{V#88X4cpteYXRbCGeK7!oFa*QUEWbVL|L*nA-oN+z`TBp9SM;p^ zx68k{o1`_-$Jj_bBQY9d(3mCv{KwH#gXJF+=#!A~j?!!7KTrNKgvcx8f&p0oqsk*LUw$Gts16=&Gc<3!4B-g_OBL%J>))g)tR#<51|xCP>nd!znvNq zdy2xmf)~tvSFr>LME-N7?)dG|-c1Tx$ML+t9qq{GYkV zR;0`iw(*bV?tAtjJ}->>rI5i%bbK!-bp6G6;p@&sai2x!)W~mA==j)oI=C<5GOnWd z<>{g1RcHObI>P>M*YW#of8h=KEp(yPI3tBNv?GlS?)vRM9^w&l{%2(kebMz1`{UKA zp<}8>hPr!$IT_48w4g4)*gtGD0Kk3!luqvcqo))HXM{QayqCE~Z zsK}odDo0HVRb!`xYOunrlV(H(mS57a4qAfg;u1{ zc5WKmZdynqb7oqoTRJV&kC+yYPM+qhz_ift{Irln0kD^N~fNv=lO9r?Nwhl6)s z2=C1QYB;cXV)(`UiDCbpuY{lX8z1()GCut5*jK{d#bd+UbH|1~9bXPVz4Uz8z4-a? z)}1efhO(g{dF7d~N8CkEu-A7F4)s}sd)D$2?L!}rKVs@;nKS*ulp5pxD!(8ccChr_ zHB-Z{PuQ#bVs0qUdLkU@=o1!rUx)8G&v?_k!4>Oi&V0pA3=FlGtQk&zDpb7kM5xSq zGE`l%-qWXF_=oX-8+II?5Dtj@5dN`cLfAF%--Ms!|Ld@G?!UI~{4c|{$}!51HsX7^ zq4@B)P%`*$L($NG6}IpGSK${gO$!IShvN72V}JAwVR3YPcUpMoO?4qm3qO7B--JCI zCWUJLUC&GlKbt!x>}#2B&2~!IyYznwl?Mw##hV48x}_jg#R|NWf>3j~z#Mvk_058i z{vbERU7J7?Qpn&qPNHRTL1>;|VC|(Kw7ybc?WMrDupneSqcmB+r6AOaJHh|xn+5vb zg3z$0AT)6|lBFZ2hEw7>i}SdM^08Ax87|YSU!1CMoEomuI}vZ0YTk0HdCaNWqN(8q zzgtLb(ngTcxyQTYeMILVACixdI-w0hdy6(GX@1f>jn)x!=Be-Ki>~i#i=)2KTvBx2 zvg;?)9{2wrPYVP5-udjbFocZe|As~GFBW24myG{9 zjQ`NK+xSnvmELRo_on{en6G}F@&6`ccCvvxX>8tzCM5LN+r+aAd$13UoAm$u571Nf z;;wWaonDG)?_Qd_{jhtHMRDzA>88Eg`)S2ZoJR_cGtl>K&f;IQA2*{duL^qjrlXfIWfzn~&!efIahGzIq#qsc`4kwJ&R1?xVQgH9 zsx8JBs6n)^==1&0-|rWOMfdq$zc8E}iP0E?aroT+U+Nhz^iJk_H{Ns4IS;b#uq%di6F{K~MBUj4khjZ$vE;XU{ce|)ICjg?2#n1|`v|6a!ajj?~p zns#j*^m9Jw1LYzB++7t9M?0@Nvkrn6I|CfZDZ2lcJ&#M=xRfnVV zzfG=5A%o-SnGay6L}!3cayJXpBJ4lSznOnmN7BfkPCcu?_v!qbHl*Q-`k(x#`L|Qz zIEzG!GV>c{L!Ca)eG$@0K%PJZ$;iWX7L-^(DK^cMKp0 zVF-p{I7XuLhyMS9{m}7M`w_UuU>qjk^ZAp3*0#8(pzGHI!gO*LDu#Y~{&%MJePi<) zeN{|fTD#8p{-Ez4{d#an|Cn9!gz?3*&Y`FyDB_HxR|K49k%?=DYyeGe37n{h_bMTKs3{H@%1SDDS|# zD6e!z;v3mvy)e;!#f@a?{_L=s+=kjs%F&CS^(&sacUHAdIpbI2w;kApJ=li>==zgy zD3YZ(g3hOuU$VpZ2;*eo*iYwM2FqieJ&)pR;$QN#Gg|up6g|pY()}{@)ThtpIQ=9t zhh3-awqNnAP@%7>Olo)AwYy|ZOgpT9uGK%s$%Jc8;VjPMA}-@9I?=38Z$az(+Fi6= z(hehiM!S5%{JSwg{RZQM+2TTju*p%{Wn+RSlxnAMi02l%a2Mr!oxjsw-=|l%n?q0b z4iD*%5Z8a#8aKoa>cf%qclrhN#Q+RK^sO($zEFrE+`}*&Mepi&e$8H@_nc3-CjTeo z|APFZ`nvpg%YRfy5Y?5@!j8c>Ou!^e!F0?(v-;ekj<;Ud{_j*b_x@G?&t0dUMgPlm z6bUrkQ~$fwf3nGSiKW&M#4`u;umI8Ad~%&V5A^6vNh_AnmmxD**&FX3+uaLgW3Aut zUxBFJuOwHaG|xDQj7eV&dglGK1MB(Ch@R^iWf2>>H>2pc`s47Iu#NuL&##1g2X-ON zHr_`bz#){P_?mP7_6>D8M}LI78gV4hgcLG3j+5y4jqh@jonP)9&XVVG5tnflow$Kp z=t6~g{7O`z8a0Ta7ID;RH|n(+N6Cctqk;a}{-Y*sOEVk4WumcnK{S_S{E=n+p{>5PlGbU>JrYTALe5c6yeh$uSs*37CW_n2s6fGR`mh(E9%mlo9UE zZ<+HX=V1XBAzCwCLN3E{tUz>jqKyAa`e*02SJTrK&xZf<{C357=g(1vYSb*XE2^w9Sv^Ktjp0Zxv`rx^+i@paLzojp^ z$2s>R_wg$g&jIof%C}fgC6AzNw|uqB*LHBnQF~CngsCB8d#%4eFaM}oD9kGU>-cYy ze==_UKH-`sq>#aJoWv=dMe|MhzbB2|@=vzimNxfIBZInp`M)mz{1aq@a7nU}yNOI( z@r?uVT*PHu#b@77DEf{zWrVopGyNrNw|>iTrzg9|c=sYdD}SYX?L|=j%9788PT_8$ zoW4hS?xI)AOO^D*Hpt_e$9bzId**S>&nLF12f`)E`{YA3(%-P&^oX8Prdn?+_t%yC z2j;HMsR#6&Z)<;1B#yN2e{|>z`*L>%dk8)r9tO}mral+C{%8-u_lAc-+(R%7!%>-M z45}VglU0cIGcG0L6OB_*^!>arQn=9=gK?OENtl9`o62B^y8S>MC)>H($c(liP1Zke z|Ia96?*e1+5$d(^Z-cRK(?P$k~#>)M}g?T>hC z5x>A*pRKQ1Xgr8gZ9@9zx#nYx3$V;}#lE{&f}Zn*+U}Zx#sPWymwwu0a=9=ouo7K= z^xshATC9)UZ}>j~?PSM?>Iiq|v(JUiWMuzui`-w)e#zVR@BODx!o35#(5PKcVh=r) zd^|SYM?ZkbCQWbAe~{5x@df-2p%h0@jW`l$LPeH+-Kbh;pEujPMqeD$C)ZXQAD~WO zU(W_R`au7FPyfDO|Bl8t_3vBQVPp$hR~etZV$8A0*n>=SXUJ%6rEtIB+$-9nnjw$l zBu=5CUtXx($aK3FJ>Ngr$!6nM zI@B13yoK^h)*r~bD5F&~>3iY_eGiU|wL7Pq z^c~bI>Yi(I7OG?DivbvfAsB{c>%T2%Ws9fKMsG*@iaMZt)>Rt+;;1+i;%xA&lW1IQ z{EPILt-pz9Bt~Nl#$ghspl7@}QTZ#L50N~DvpA27h}QHjlULD+8)#I=qw)7Gda6MEo0t{4=y#FnR{s#~5op7GejOkC zt`gblzXcwVIsc3IjlLLwv^@ty$YB_ckr<7xr>);CRew-=-5ixV6@B+l2K4xw5*P}Q#OIj9Xne4RE36>RHDRI$0Eb@H0A?Em@ff5cHLJ%hwG0h2HV(=h|H z(0tqe|9hYA|KDj%ey_FpgZ3?x8Sc9A?ElHu|Ff+Bqu~MD9*yP&nl4y>-{AhJ+BrZ! z%`QL4E@$7@(&H@y!W{RQhXq)KC0K^#Sb=7904@8?5ga!EfVTPO2Fx?0XPbXO9UH%% zt$)-UMS^VLPS%@$Kog?#_bbJ-8f&p0jnbJM+&^rjrv}PfA9ED+qIcO;>*S5QU6?j< ztGd>6{$5>*{(q-@rr-73+k`Lk{Ho@9t_z*Nw>BB=AFAb!dnPqxOxP0Pc3>CwU>^=( zg6DpStZAilozvRCi8YhZ0;>G*!3i+?nKaG!jL@)!GsvJw5lBYO32pz?SO@eXR^E> z?LfUY;wYIAuHiNBXQ}tIko`B;e*}tWI%Z%N=3pMWzRITfP5%(B{df2XZl_I_o$vGy z3;eb<>Nhv3_gKWe1nJ-Ee`h+cOHY!GbIoPYi@zqnZ#l!HT%@!SZQasGraPqXf_de= zPlR9QJrNFG=@;H9crqLq|77^Zy?$Z;?We-e3!V!5E({Dms~;Tp<~<$WzCJkYNe&G^ z9sf+&?Sr##^?Nq#dghgLOo1*s8rad_w=>y3(wV!>=x}=LdI(Unjf65g+X=&-zH+|0q=KePDci&m8T& z5UamquKdr+=AX^c-U;;`w?kdlhuYQ;!qM&zm93lR7;c8E;_So?+(H-b;yxbY5puqx z{O`;Sm3wnT6{^c}Lrr^bh z$Tn+^?PQuebKAGc_veOs9Gz{h3Jv!9C(%f6LTP?(NT2>Vj1<>sj6wP0+)y?#H;kiK zFO@b-pie^lpffgzp$1dTO9S=bg~Tgg<+ zm?mj#({7AIX56>V7^eP$`3WTa-oXE}{Q*tfE5xx9rRR)u$g+DmVJ*2H<@x^Sgxrj3 za~V~6(zZq#`$!vEyMev+rQTti-#h=Lo|7G4@-3>!?X1Wyau4>Q>pQ;BLmonDVQb2os%++A>sobM8^7=8mK2hw_N#b)RVeKGFVt z;{AW({eR;9f5QI%BxLS=Vm-Ut_`EwDMFI^AKT$?M(f)m+{rkk3aQ@;euF*fTA)Dxp zYq~>nLw88gQwzI8D>C%s$m|#QZugkk?HzW9ll)5ayTd8+EXpT#hx6n`loiO=K>5OD z?yIQ1@JWbEV@;NPqTP$^$kQQk)IM+~)yqti7va0{Mc;a%LvLp(zBPWhLf*1hr{ zlmDc&wMb*7{G+bp6Ls_x^Uw17;1l~(tVL?438Kyi{TQ9DC zO}-Y&=Q{bMj~1@;tDl51WXF%YAD{nwA}5UFo`6Z1f}+2%{{J_*VLH9*8=r(3!_G^JEL?DSdQnyEQ8M5@Z%DH|knD*`{x=%vT2ov;RjuUZ;xo z_SC31as5}Vx}1L5+OO-DV+B@XHP&K1S{^8achtSP>LA(vqIks}eS0KLukWJ{s^Ek`01lz+%f?P*=}oo-`Puou zYI=`-vR@sgM`sqRv= zUU#h-AI*QWub?`M+bmL!tHZoWEC2(KG+|v1fmqeSJ^guHR3QZQSki zANTFix@_H2_W20*`DFIFcpHYYtkLu$TR~)BHp~`IpwC z(wZO}y5*m2=m1j-gB6p8m4$4?6$Xm%Gz{2M-{l`M*Jt`$=U=`#FSO@^#-A_@=!I^v3ty;|kk_ zo>KQ)&slSzk4C25yRTOUT9k!jzCq!BWB8B51WdvdOvemV^ci3bI)MGjK1U5bhT2N@ zIqEL4uWz%jAF!qGv9I^Cui5U6Xi5$U&FuP?my|!G(1v!TkwNLwe*bj-HQyO^_bh+Y z`FC>&bLiFk*}l7-5uh(XyqoQNhYkFIExe_FSi~>7Z}A7dQ{1LamzE{`mLYlFyub#w z>s)&mxK|)O(Y(MY^8~Za6U-dI{$&62UyZd`kMif;e+2uVUM($E?d*U0HpDwVy}x_c znBOSjw*$Md2m5dUhfs8@f8(xr(fr?hb2F%Z#oWv$b2Pj4(}*J_ zOa?vuw?4dBe^pXs?*YG4Xcndgtx;IC)qB23Bl^}$-5v9P56u4^(>|bqUlNUILUbnO zta#EtQ2+Je=jn|DjZyFU);~Q}pkJNnyIJ(B$gC3%?Su8R=;T*=kbO_yLis{#f#hA3 z)vH%)jH@@W?YSSKcD#NOHS>*SQ8C&29{!v2pRURIuKIw!7=S?-f?@b?&VMTR8T|L= zKZlEBBt~Nly1pL5II{TX=I#Gv9>Lk1=-U$=A31Nvzi_f~7NRrTQ@EoxcRD!(voHtI z+)LC3&!eZ*^JwqD0{S9k^3^|lcAjGizm9J?>q2%;RZhtjSc%nGi}mOxu>t>zYhm+2&FiJ z;%4>fE$^qF`TwEj|5us+*KgM7H)CWi;_UajvF86znEy9E_-E(q+@slVEyfS6m+TL~ zXW#8lA2bKB*EN2xcE30hXhL*0FL^=$RlN|=pOP)~of8tx<^yBKpFmgB&v$gHyXpF%)H1bPg0zLJ-@;6G^rB6Y$ zXC&`e{r^ht)rk84wd8vIxBCAIZ9%0rrfQ;gVzzc+v33Eq^tkbR&;KL#QQw9t|Kiz< zl8En{%YXJ+<4@23j^}^R^WW+D@Adq*c>c(=d;WbQ8^G9KxI~Bc4@op$*Zy7dKgHtO zg*`}UulAA2E9TFP2M*8=p%kt15}jpk(|@;5_S`(z49a&pf8+ORlx;VziNdpq5@Zu< z*C3~(iP30&-}(Cs&flMN{@%Cu+aCD-{%zmi-|zeT zs2k<{{aEMk#g%a0zJZ=}uSWN3vY#NaeRvono^hCfNoagi7)+r@akh3057X&0ka=I+ z{Mz}oxz8+qrN@ScIpjQ)pBWw&kc&`8uPT$T*zmA~dl_o!@tyKnDW86;xGDdrdSLw@ zHS`#23%ob)`mgu@7JV-}EEmTLl$c*Be#iIk&Kdt*QpY-sukp|J|ISta7pnhD)qk?* z{P!K>KlJSXwFbXZe5(Tg{JpwOT^QEVDi~CJudHOc=?EmF%-{M|mN#ySxl}@%_ zf?oE#y%OXel#j4Kf;@m~X{D$Hl+|2pY6a0^`#UsSfp`*?^)k)L>SmMNR) zivfuC{|_REU>Js@XAce8X&o}5ofu6YgK?OEMt;#ZfF{v<)*nat&MXw9(s<-k^Oz&;H*^ zdk@;BHEHhwyC;c8zeW3h=ZR+l7GVjNVL4Wy>vzxfoPX%}t}?>iY47=JaxKys7#_0REN4R_JUp8>-gHP>$c5nN^zp{Tj*uOZ6L^u1F&6{NVM&HSA zy2S5_`-&%y1ey^2mb@oEdTNxoQEVJmVvaClULrlhJ;*j`&j=IEAsi=9qWrKnlstc4=GV{iEmnz2{PYM*gF`qTzjcL!)p_Nq$+@e#P?$Ip0(E(Ku9?fowi{ z&-VwUZvcG|GX3Nq?P$Xgex);eh4bpqFnaXuh2dn8`QEaH^0h&}=DJ3EHIjerZ23a; z?S&W$t+Q`=-`=$=*4oeb_NFk=H+;(JW9Y?Rc*)!P=M#~AtbbN_V)^=KGS1!c&XCag zj5&g*>0dQ>@QiPAx^4m{q4RgX{Xup(Uof4VfvztL_oq=|7CrhFcWnOSxxV--%43%L z*hiUN?0yT4DL(8M<_I?r3y}V(Usyyg!7}vxkAU$)`XzIIXufSeo?Y?T_r{|CLQP&^ z7s_7~S;t=PnUCjAaChocmy0L*AC47d(YJk5pInW#ID9iZygS%Djr|6{l3)Mf8rJjQ zh|SoB5^VqKzYNj02zJnu``wSei(c$|X(j$}v2}YiM``>ZPwh+1Ymyo6eZm~TA(Z0C z|3}!}2S;L3`SsHrnSDzNwx7qvB&e@{v@;Wl& z_N^KhesFwT*#FMx@WTy%8V;=c)9|C?qr<^F*1Pl_?RxnW;kzx@!tPZg!}s1D8TOnW z9^QKSW7eqrSor?C!^75vzp`)puflF=wWDgsm{7UvlVRJmPg(!GgLAlmAKe%l4y_s%F43zKCE+LAe6#yX!rQw_!r@VWZVw`3e+K_~sC)X)W!B>!4!@sePe`8f?xbK`nFL{sOQT}|?np5;r497@x z=*RDzZ;SwP=w5DYz$*RbWMPc)4`cX8%s)7S3|f!#kG#u2Lbh)(CgN;q7%iP~7>@~P zyWt$iC(?5R`7rocCebIO!1t0*@Li1H!@yK=)w@c=bg~S!+0rnRoQ;|-?(2a2dfqq+ z;dyA9<-Xiw<9zptx-Hg!L<5p&L<&tU_Nq8!O-#R8gvD5jWq7^QGt{=;jE*VR-<-k6 zue`|Z@qO>`eP?~&>bnBjyh?uaogbN{{!8(U2F)XYmv*dcR zLYdQMeq#Q!C1Im*PW>Fm;r86|AKK5waRfKieU5EK6{=?q3){&WvUct;ebe6CFH1tz z+xieMD^HHA^DZiHTw8H}AJ<#wn(C)0Z(P^!_aAXw9UXm?yQQ%g`*9FI4}4bSVIwAcF&Y4lm=FGKeJ*0>8Y zhXRhF;!^{|3GyUP;SBnJV_ypL0y4L(F-R8sAKomn{(eb_eV|$QdqsS1lWSXNZ8my8 z3e&tB_uI4ZL0MEiWnfq2r`)=QbgQMs@TD*FT%*c>V`$ zb}XPTLS-URMQ`RKZof_ z#5|oxti!sI|GM+)t<~#T2DS2fJDEcbz2RB;$~RXKuK1;PgZvQJ)o&anA0q#qFN710 zJ>1VT&Ph#auQxrTeL)uMyL-y(qw<}$%E_26S&LKj%CCFAU$F)d{Twc!uTGvId*9~2 zA^X23FOb)wj+?lRySR_OH}z{RbN&}f!XR=8N--Q2pW=(%^~Kly$peKrchdEp+i`!v5|T>s~%#`c*17uWxJG@j|Swfdbu z4-1fg`wL+axfuOV=qu1Kvy@)E|B@MD89n2@t{}6cyx+m*g3woEExHoy6Lg~odBpKa zb=#f`_2#rUAbI>b<>PZ9MK&FME~M{07uNgjMwDYKs<0h9u^S!p%hZ2m>c2AeUzzf+ zO!-%){3}!bm4)Uv%>Ty`bNDl8rDxH$p)9mJ=a}bTJ?*)$S6Y)%`@H$&>I zE>sI2MiV`aMx;>e|C;anKxS82NIEBLetFxhvXG`%eo?($qCF^_TjKiWy1p5%?`hZf zWLc=2>;4z|A0(GccawDAaQ~hD2N`L!BZmTx;RH_N6gpm#|ImdTy3vC?3TS@U{hxLJ zh-)3RT8kiC@BTfDc2s-bXQXou7jOx+gP*hh;&Y*oo_yT3316XKLwc2KbDgO*t~K#o z=ofbrw{aKuG2k28EEs}E{_24tlnUoNlusB*AB{ig|B~k#r^tWuU=x`Z9_O6#n1G3x zgvpqS>FBtsy^XFnv~SU^?c0Mqy@2M0{BKM6-^k2WeeZX)$I(V_?-i#EDU(j+XVlX_ zw;n&exPDj9fH0e$lUCO*V~pwZ5cdwtqX*qvtot869;%fi3mjX7T4^sPm!gK=aF$>3 zqV<1-S0Ij=Nu%+&`y~6m@?7ZsMVU2rJ?}4AJHk0Jf2}6hVm)I1+(^d$&;GAJ7s}~X z=>JmLgY^gbdtcwgHy4ca`e*m?BjcZ}9lYH+JFy#ku^$Ifjl)Qy!_zQ^v+vu#?DanBT){Q;qpj09Eym2#bJ`MJxJe(5!YDS8>nhs+ zt~2KQ>dVT_=e0>u>p$)~?>=g7saMatFBJE`dPTpQx;DMUeQt7}^g3x2*B^ZO1MQ0q z?q4{K0n6nj3_&S|VldS+t=YaqqzK(wTsXn1r@F&beXD zAA0Vh`^QxJbQIY7dGst~|D#M?+(T(5IU8~9O##66Va*oh<-2roiY!hIpOT~a9S zf1@5Mu7CTlHg5L;c~id3qmcc#^Z({m{0}wqPfGc% zO}bH<*%svk%IOuq{FAVi?2>M7kMws)|5@pyfSC8I9N+fMm&0~)Cu;BMhrn)n^1L$O zw7!Bk|Git@-=u6qoMYU0)O<$g?8iY=qraJN;%CP6(39wU)%W=p-$sXdIKSXG6_-If zawy;!D!!y%|LU+%^p8z(KQr{via&`{DDMCFwPE2L{Q@qb4_9yv{kVzSxQqMfXYW@u z4m1D9cldugU;AJXIRvE`jyUFfBsm)6FdlJj;kd@!1bVUm@kw(6=#x<3Kggp;pHTNR z>i=hqFQ1~1d&URG|8LW;h;-*;;f(8;>~~W!9dYf4Er@*!Wx^dV>fcAPpM3+H^kudw z@}u;#qq)ZT|9a#9rIl$h{(lYI7IA(4cGT!w&woc*FRfbk@=S6Ls#E&>$+#Cm+>;=k zVv}Q;>scT^DXt;f&z;t<7q0v(_54s}KRvE3n0>J{ET!kxxi&1LuRwwQn@5jxx|g$m z*SLQ)Ac@9Z#ulInX{_{{)mV%5*obm$MHMJ~{ZH?S_mpM-^Y1p_aQ`@h z%yIXB)Z7BJp&iwC9t+#0vlF|q7qyQYe=zE?u%901WHjI)y&CC(t_>;Y#5D|y-`fc9 zV+?<6JU^a;inp{uQ29ms!u(2Km$)1XIEE8AiMlt~zq{oBJ=*D=+T~}p%h}0INZ&Fx zz&U4d4i^yT;$I^Ba0MOmQKx*=h2s7jWY0VD=MDK2%{$n?v(*1{)%9fS67@gXCfrWO z@hjJ)(~q0Djka0Z&)Nld>A5Y^e!(8M^Z_eezi@t^|Je1QtuaVk^^4{llBK9k=r1Kl zqDI-!Ff7{r1Hx$Gafo~VrG*>Gl(xZUexNPa^^40d{TOpzH2%@L#=`mE zl!QrS|A#}ETs+S2LiT;bypCX;z+V}MES%}lmLan#_I-^uKYcdlpzC?-hP6^LU7 zb_?%E9P$-7tQEJ>S62PhQmT z|IP>IFCOxrPrGjC6>tnE(6&VV`=ovZdQM&2g;Vr1D5zia=t1=LpA%QD9dUuYgxXQ+ z3-StTmOt*d{$qHHfi$=_J6POKggii|9w^eH~X?3xq#y_C@Re&o^hA9`~egc)>jJ56kbbc((_} zhB${lg~nN9!%gR829FE3$-8Ky-zNvW>^kUO1C0w9HZBYj9)i4ZL45ZJefZ*=FB>0l z-S_}9bIQ38V0L5_tJ%6;nz=3h0dhXaT(Rq_F8BieqP_h+H%-ASS1@jnaO=zsg<$HMlfKNi}ZpTpnn z`b5|{<`dz~$3GT!JpPIBy)}Otc3=L}P;jjOt-lH1zhTV7<+0(d7ydNt88tTiFkYK7 z?1bYdaSFYk`dIkU4fXq{J`oN+IWGMD(Q)CxzR}@F&y2P&oPM;PkzwDok>Lj|Bg5V| zhKKLh4-b3r*1A6nyQfVu|7}v()iNo3cgn=DGxhI6-?ztw|3|+-@7Ju!FOQw^ADeGz zS0Gz=gURTZ-W?g97@J)W0Z-%A;?onK*(Y{z{ z|FQ7dV9HuAHvB~`pbpONLKiNIT{gZKw zK!I#Np#8N@`^$M5vUR?;%@X&&(EZO9XO3T5IvKPhhqg`T=%En9E#~&UI4~TepFm+B z-_{;;{H*!kT|Y3K6jz-s38%<2sGT-2oFgxwhTbs7d%ZO(`oPZqW7%dAr;?! zr#?9CfaE*c>SRip(1?fYe_ZjKYv{*K+{Rtp$AFdZkN;=mN8e*{{f`3=d=N!DL0rp1 zeUOO0g(HZ*$5!8e`&BjwnVDg2a_I~~DMljt9FGbEl#ZQNY%scD}dRF}r z=Kw6AFT!GUsY`Mx^?#@OA9;GAhJDKZZ<)nDeV%=aR(f_W`*aQa6dhaGx9CC+-RMCc z1q_$=QY^y?RC_Ng$<zgIbCDMN0e;~b%4emDwQH{e$B8?2%(Q(N0LDw+PccAAx z#`8^hz9`QB9_IPJOEh-t3K+vN~eHhIDuMmHO@UrPx^g>-=CtN zLA<88H)AT{S{*wl?gB2M55@M#6yK9_|BCQ6^rONzlSk#}v_t+!2)D)EMcq+t@YC=6 z|Fr9i`(I7rBa<)EUv7@NIX>e=+an(7o${ZC>1elitkEZ&}Fx zUm~xO#r3&vJji?PWUj?;>*XV99{XwQK8`~_dTAO1gIUBX7^(Bz=Q1iHc1M>AJ-~ax6gZ(AiXm1(o zEPgTi{>i>VWE}s$f?SE!Sc~B% z_AXGsPRFX{!`{NdM=QU-V(vkH&PV&xAF_VVRrL$H z-ci3CSHE0TzZ_M+pfJO}d(!%#eR@LpBu?RG{Jzu|^=Np39~zA>s^9Q%99msU{g+k$ z^{D^IM&T4Yy-9l|&1OF%opY$PzfJ=z` z095?u)e!CNKHf{_#U$VvPG-Nti_Tzh!KWZz1lH7RRbg7B>~s#(6E|YF^{h zdQJKCn)2y2<#&ek&p^wLG%)va=NwXdeNTP99x$nQ6|Gk|5y*w;%&LS+vQY^y?ti)<` zY%ACPFW3Gr=YKEfe=q0%D(C+y4}~S=zW;Lm_j3OCa{l*n{`YeJ_j3OCa{hO5cgn+B z>8!^_l%wr&_c-dcu$7*h;@O~zz8!^uo(+1O)BV(I#l1_a{oliV=xRNWozBaD*IFJ+ zet%!P#qCAY^PUTe&&j!U^!oYEqbE0s_qB!bq8f*hL>hmv z{tP?eUyT3ZS7AfFSF8m|@={5F5d8mqUUw$oYC;LAA zS{OjCMUt)9fa16|GOb>Wd&N{LGpjzMoZe==9p}}MyUD$%rN{ky_R|ld8i(<4|DGg0 z*Q4!#G(CgZCy?Kw{v^BisQ;ek|3B2+dd1tmiFdS3j%uTjjc5JO&%HzU(dYOTwAE`*ko}135Ekc!DE}+@cB_W^ zF6eRISe$#;gGcxOqWfo?7x({L<=JQD|9bsD`hadq>o)G(^a)7IFHPc7>*N(o6gLTz zF%{EMhMAa+NB$>1>N&#mun74t=&L0w1H1a;)(51=@%YQg>$qSEBIdN8~TI z0=l(Z9`$dev^N~@`&J^XC8PgrJ-HF(*orD_$4>0VUc|M`_mc-vH&^~wzBM3O@18o{ z8=B~8RJ<}QR6Fl5l1L+icI3Y`EOe}r{|CyW%7I7rpKxBdKsKM(|A`~c%PjZ&H+lYK z8`>9I|KC`w-oIq?w?-e}paIUkwe_ z%H(t6Yc~8zxIkV)?Hc7Sc?EGUMFUcIv>|TzwytVJkjETbfNSW-P29#^+(+F)_VJ5s zP_}APzjkAa?YV~ys^2<&i~X>ReTyT=;E{dX!oEd^xXuIYW50=Zc(;D}9z0y1FEjYD zFkp@E8ADKtwkd4nfqV<}+&aI-Ncw0L7VIRUk69t#u6NvL_= zbwBI6QSr96BAOm|-DrH$eW0$!{kf+GGTABqqWGik4{@!5&DS6JabNeWCQGw8|Gw3H zTKaT!yyN{NuCbCk{l0zY8Rz$T|9iauZJv*J|7iU;?>#GAeE-hNJo8v6lg>=c#vHUg z@BZE6JbF%E?Gj!7fM1d z!M=D|+Tz+#80wG`!e2c=djH=XTQ#a^>S0sgFgI5a`Sw3_lv$C-(g05zl{SVyUG8rGakZkD#H`X zN6+9H|NoT#pP_v~<`I4Qb-Uaj8jwVzdr6^5J)TCDG`3?WcH<{csxzk~!jJj34?SKI z9?k#Mrhc^li*n$hf2X;|^U)4CLKfF2rWfZ&c&2gwXIwjY=Yb7jueA5$AmScAHP4!J zMvrqI;vB}qQAhek*CsCR(HQ42{{0&t4hN483_p5ja5#V;nzOUNXQ;j_YdCfe3*SGq zChVQHCcHIkUD(sICVcPEI%_zt3*WuGA?$j0L-^Yze;u~Z`sC74aQO1*P)o1)IUD`UPlQv>IfHY! zfJ^8@OX4%?;6F1r<==-RSO0y;-uic;HSy_C@y#*eisQxky(4@-+J5o=uZinNRfqol zFN_Jd>34A-1J=sN7=ls^$4HFEIE+W%X7iEB-X9xtNA?>#IEkE$shEy3%*1TWLEPVO z9@)qLzaWPBXcxtBqy8W5fan{K*Vq4qzGLxAk?$C14`BKKD*uacoa0| z8}xtcS4}-WHZ)E#*IZm?)wr;m+>5s5<|2{@krm&ie>I0{;ls#3rQcqke)qHb@lkh3 z|2!IyL?cpYLK;cGNh5=H5eUIyU+PeALrg2Q8k0qRxW}G)4=h2^{Czp>64Jgk4`<=RTmHU6m{jYQX zIOCkmzOmsPc>!(8flFi`vMpmn*9*!8ToJy8{0!{^Wk~ll??5?HH*0LDk7Fzl|EqD6 zwD)P7z2U#MX#Y2Q?|yd^w{aKuG2mOiMRcUJKeQ>jR{0;Yxc@Yn7cP*^?`Z$yNWE{R zM*Fiz-Ec_z1MPc^AA0!-{TEM!uA}C7q8mNPqk!f&JU<+H@`;c!543g66Ux^ol&??l zr9BbqUh@9XfFv5vdw*zp$NM`vE(~#vr5KKp7>#ilj|rHFNr*On_53kmGQDQQm{7at zPr_9Cbd=%I{QMj0-+SucTk2om{%ps3f2)2Y`(8~H_u|;X-xbH>&l6W%f8gzsuzzS8HS*mhVJ*2H8`1HM=kGms zd9OJ#`t*9vdj1zZ|93tAceVe|YX4u<2T8UH$GW*~jOS1GR_OnK#drW?2YSD1eSc|g zMHRMVCw5~m_Ty@7_y5+|oG*_J2Zb|xjIkgOBZ)NHI{inBd06y^>n|7gXXMK$>~d}H z_u=}>Go4dC-`cHY2DP!@mpq1=b^P`_^xx|5Jt2G&O~PsQaU+>x|8IW7*g(9#PyMg{ zJms8+{Re00=Wqf4tNDL*&#QmuKB(jW#rgxwKNK$cZ6B`S8v1b)w{aI8FS7s9^|JaM z-5b>J$S;4;zGyzq|Nk!kKbbkNee#a>3EG_7j_QlnT$9d#b@CI2p!PER|CY7Z=nv=r z4X2O9znK5W$6g#?q5K%__&CJ*f6@Opo?iJ=cE9$=1o}iw!emUvbQI_R?co1Ez`wgk zyIlF4LX$k3J}UqJ*f?wF&BScX!8|O$B6Mu<{h{k!_Tq7Ex{KO&WFB#>ce6621xH?x z|MAHGL~oCABkUg|ouycY6=<90`+3$nKJ?ry^#@kd*P?KM{|`Or#VpUDNm%&`s|_{+@pRTjn1uVSh{Ki1gxKgRRofKB@nm z-abnIdrfIrFP+NISWD;2*3YEZimNe?zMP)yHHUs*Y1m4yLi)0K2zSh#zhMqNxm{f5 zsx~vZ8*RJn(N6A1_Ok18pE>t=P`DZo=LL4}bDgMj9rdoQxc|a5_wU+M3*Eo#j^iT^ z`%MyQWYCTr3OI(2TfRT}y9+sVBkt`I*LW=8(f$jA-9K7;)&FQiyZWg*H7uNv&PklY z8PuM&{ysb696fo@*o|Am!Ug&zq@R*EkhO$z z`EmZvHL|$>7N1_X_#WiX>+46|HtT=w(Z`1*-$0}OzBn$w=`>ppMf>;j&i(P^&@Y{v zxQ)BGkB&9QAD|04be~oJDFgFlL7CW`<$sXoku7`#Xr1r+5c??G(XX5tuwL1ZA*j&* zSxOGaNaQ~|FpMV0VLbZ2sC`fNe#YDYvRa;+L{7$3Oh>IUHI{v4^f>o9&VimupN;fc z&krf*G@{RU*Bkf%KFt2d=6mngC))jUoIekXuoz3x|Le!XGIB-Kzu}#(GS8P@oc}*` zU|3CW6Bm6EPW5fqF!k?1_3s$YsYx9u9F%qK@ z_dUq=T8D?8+o2Bt z-Sz3xsy3#tjI2r7U%?o?ne^Gnf5zG?d<;p%Z#Ff?H%z97ndkDn@d3`6huG#^KxPgZ zKSM4?ewMsDSH4^)-;-T@Z@E?c{f;fe3amt*yu6xRi}l!ua@47(>KA^XeXj0G$m=+z19CzVLNuB zn13%Db3lKXfA`Y&BR$H!KkgdH#wWF3UiN-a9G~g<;rN?)9^yRWidXn9-}3B*`}6^| zlQ~p=)pP$#bA0K?Z~_$-Gr~#o6waVu-$4GAFN9w8{5j#iZ&;6*ycBg@LFTkE!(=~h z;x=O6PF(-{EUI5B zp9^*FwH^&fUKS@l9&3_r8pZzq3eQGb+p!b7u^0Q%vB~pKc>c(t8$H$w$Upm>agNU^ ze?9+op1<=lWb2Eb|18hnyKN`yw#i?6t->=l|uTQ@}CAeSq2) zvOlI6-xt4MZSt-)#veF#GM<-}HXg2Db4pyC|96Hwhgx~_0(l8Fe%BzKqRikNqFF$D4GD^B;aOFbw$5p21huFJ%AEo(n_BUb2+z z`_3j~SESz1WX~ zXmgKoj(#;gcUk?tkN=&XMB%#e3+TCO{K74Dw73k~k&DN13@6b4-I8#U?EMA52-)`= z{*tHF7tx;4-<9XDiu?4QbF5hYu|F=*Yo}=cc)pkD(I?WNkD!l!1##`dxQ0Q||G&;y z192JUdp~&-ab3yV8~L*H8`y7ar{@Th%Hk9~2?XXXEk@;{k*S^d1>efxmk{*rO? z(wKmWsJ^8wP1ZcC{|%GrQ!yQJ3_@~*>(k~)wY*>dlMlyT#qX`md6oanZvMJ5;-f>u zY~iiG?K$K;EWjcx#!@W9zg7ONaL!7s##*e$MwDYKI@Zbmn>=SSr>y9f$Ko8s{5$gh zQTadnN9Er_b+h;JaQs!=XQARR?7J{jUz7AIelRp_CwF2u_F_K{q8ib+xf%VRGH01Q zEG~&O@;@CKwzb+X?4$NO)AsEPp9sDDQ0)mH;>QW+2D0HXfj%AudLBLe z7TqZBzx%xR@mKaS&=*tW|MO; z4+~KH690qvMfBu0&jyR>OOd9>wHe|Vj7BUImw8s7Ke-ZZzPZ)pT4c2iB57+(2u81Ioig`v3h$|Ch9vzf~Ic)BB#V@119Pkly=C-|yFy zm-5q3Jnusn9}7S3*LoF{crLY zI9`8TA6nuGK34swyJFwV@X%`)&ijKAPz1myGrE+iU2@O%(0z7VitUg*)CD7&_5)Kp%pB zg6>Xp!;ojg7SMcoU}(8+{vDaQXa6^PmTlQ~*8Xoj_J5<_mCk)sm}59#lW*{j_Q#EZ zVGup(`Wl|JuF?;*f%(GIqgVMdsFMlW05i+))3ZfaVnYe+T z#N*!KTj^EUj{jBrpmCvcWQq31OUf10so(3x702Vv*Zydd3=T^pi8L~3M_e~BNA{@`@_(ftlzt5T|71Qs zc@n2^2GwsUL!NoirrSKm_)&4SJIr6e1$uIzd%-386{P7+c92sTHNOCmBVLaMev@tsMInm>ol-LJ7kv<89 zTiTuInWYXu^?d!0{$sLZQ!yR2YxIBX=P#ot_lyk9diZqHhW=3an3v}z#>%sRKE2N7t@zw8G1kaL|8%g{apWXO8@t}{^Pv<@6*z}F_v$A zY-pM>Hl(LmTmCm8taR>bti^h4L^-yi;J#3|G!uNKk_J`*?j$$`D4Qo zWYBs;|3BK$jynDM^*j8(dr8JM4ak&mQ>XvWjty1*V>@ec5B%eAjJsn=hQ?Q7pC(x~9;JSnYHID>PzfJ^AZALqC3v+^TN?lT|}Q<9zF6suSlX1DKsIC(S9=y;}P3G6Ud2} zgsJG*q5Urpb|JR2x_i9;b@JFIb{5%unf-sA{eOr3e}nyhUfx8T{Mnx6`~ReMCZyB% z8GBriTWW#}4`C+#b+Xw1@murYh38-%7NGy@W5XhHF_vQ6KmRtdj9h`fZ|D;sV>z~( zjO&W5CD&uEyk7A;^A+jki0j`>Cbyyr+Y#%~toWVuT)n#G5F44k7X@`l9z8Yc8g)x= ztMOmr`@UTg4w9SQ_ZGJ8>mz)Z!o~dmRpl%_i8M0kA6gRH$!h86$O4Yx1mYV0HR4aw zv-i^J9<_fqlO3KxXOCx;bszs) z{=caH^{kFKFXO&i5$6T8@hP^>F-O%*V->9@zhtA6Aa?7sNFQ z;~oYn^>bIG?UHzRL~Df8j~UKdW6Z!*@F0cZz?}|E(-Y z?UMhI#$>;lis>lBOw7g{%)=k9f32;(PTthEjeCR@_yGT{^{*F5V-Xgk`kuKkWX)C2 zV2r(1=qs=i4bzlQ%7usP1~+YRee(Kh$JSy!DvTW{`kw~-{-*K8iYrGIwj=+`pH}j^g+t&uOo7_v0X{aTrOYk^kJ#&~aS;XJb7Y z|9!)AMREN1KIOk}qGhi9PiE8&tz=d>uF=ad z4D!vT;T##;1{cUnIHnBjBd?(EFGILS#_aA|_!nreZq!zdkgSkuy;}cUYKB&cQq^K%Ytt{ig5Ne{K5wD}KS}=-jL^09~i~1yC$Ue-X=Neu3ru1Z@AF4c7nDpI?7n zAHTl*BpTV$DKsIC71CLW)mV%5*obm$MF%^yv)4014&9eMo2#CYzWoCGym`Ly2RMQ_ zPN4N3o7FjO*Ns0QtNAFZq_Z76u^Y85=Kr&8_tKNnXuy8@L8PB_ZPQ%i4EN6uP%SR5 z`*1j(i?%z;h!}_LQQu{~{I7gy7tW!$?!zAWKjk~Ek^e7SpMj4jwo&6ey`q0@hVOrh z{I$dw5x+Tx6F7+v`gczWcRXYLd*^n^n>lo!HNNDc@g-zIzG{A2{qOh@X=cb)=VwQ- z55^duMxK*K#gF*x$?7fI$z;tN@_!4TJ-zZ%CHyJI2lTqWd#>-Y{CVB=k#Qbj?B~nt zzuflby+rKyyW%$)@*3HXyt1g+|EMn_mP>Iia-5TV)3MvQi~Fc3F^=vj?TFX?KUwQK z2hoS16mjp;NBtu;{vYW*);t%8-9colia-|38O54-3%$J7fQi|6fGUD8m<%!{yzj2dbGy02vx|2n%hqpocwW4#;e;r21c|EqgD)wf+_Ziaf7@4aUPyZdqV zFsdK7&#eF1iQU+X+PUig7mN#_Cl9fWU;I!wNUuhE*Zckd^=xJR1|RJIKOE1G=OFGM zkw=E!j$;4AoB9ao1suZ()E#*L{?yS)_tNNIi}M@Ky1#d|L7a06XK)S|a0z|5f(~VR zXUu2FA?~T#BR@W}zvJ`S@xJ}NT=~DryCLHkjdn8g3?II9`f(Gt(KcTj;VJ76&~u(c zmwNs_eZW@Nen{G#{-Y(<;o1)3s{Vf_JeB#P?~%y3`Bj1vIUI1aL<#(O>Bz25Sk>DfKr&CC22c({&H z6FrSaq_DZuS_N$3*ZJdOUryy;{w@*y1^Lg*Bd(`zx%zjV`gfE3MK-RIzq9=NJ^Ja$ ziO!jX$(V}iD8o$5M#ud3`ycfs(Q?s z+DOL!|8la=`W0KrnD@6}Gh+SU%P(In1K!dH;P`gz#9mZx_Pl<5FR`C~5Y;%0er>~k z{YuqS_%wyn$e&d+WW#U3%`N`-!ge?eA2Z^cOrcf(oefKaj7Y;b=rH0$>K6s-%Cs-r=xAp zdxoLM~OZ?g_PwHQL2lM=-l7jTj)}P@oVjBRAzArX`hC8pFP+}DL>mXU>A4q_ z8*G)k^!q3%Q}XEX9J}XhZ*21)U+|q<>$C5ltr;Rb1f>{`kr<6}7{3+d1WZK#uk_=S zlTocqn@ZN4wci?9hFbbeayF9kd&3Rytwp{+ukDic9?|;?`Py~${bfn$|4xbhR*dJ5 z_R|dG5B)BVTbV~Lz#=S0@0XqXD{G#9t3;h|9};oJdECF$?xx4N^;!1m3VKfc-Zj`9 zKl*AE)T?>)B=`@+Kbl{-M?IwuZd6~Uj%!~aeapW0ezP7MQI4&s!glOL$2#`+CjUX^ z-qpT1tKL$d<;j9@v%0rM+D8tsv&iE5ck0bp$F`$-%pmB;%(xEHij=!a1pf4E28_Pjby{NK;IlQ>v&C-I|Q_Yw!@-bws$p|X0(y~Ga&-bn0I z7JR?vza{p*bR+TBs#}RYQ*I@`*L&4}{rALo7yc%(Ysu9_OUszhTr(zQ-WU~*Ja2AI z;?tq^)~Jw-|Ah=*ub0OT4JeK^`-y&s>Vba};+}MeddxvQE+0FmnjQBO^?Hpp7vH`( zEF3;PIMnKws!x2}9On;*Qekv7r=vMv?Z0My6u zuKuTs`V`32J$($f^s9`qZlH7ea0S=UkDIuSyXaV_pJtQ(r@{Jd(Cs%p3H?tf%+OD> zMn4UXAcNNV`f<>P_POF_m4y4!8Boc8h#_d(R1&h{OX<063Exgh7)~FF!k!ZT#}c-a zHWEgQi+k6NBgdn58XqA!5jEl)Zj^*1CJD#A44Y1ugftp^OF|00=KNI*)&Kt)-z@*d zROj`xXNu$hKRYZ;7w*%3C?k7+XH2iWIy0X05Bi44IhcpI_uv9DOD-Z8qb+NkCAkc_ z=arE!C>zP{7nO}&xj|QH8Ff>|=DJ2YD1wm(u^w7H(h*C)vi0WQv^} zZQL}r`|VEb#$LoemSXwTV@(|4j(7F{pJnGG$CmGA^Y`r0|G$f!kLDNH-#GFj`x~wF zEZUsceycQ8uPO}(r4!f2J4_}~tM9!=d^(1A@C~5Y{&+>(ncjpH8j*2MCff334srZ{ zfjow6FTX(6efK;TP6(ewUfmz(g!AD(po?ziz7_rK8nFLD2T1{HN7obtOf zIEM?kgg#tBhkC!0J>P}m`i@!k|2p-5{2#J;iu>m)I5OD%qgCA>_l9Ug`&spW{jhLN zI{mnb+bE9zuNfBZ(v#6QV0^%R`hY6$|4H?Ry6e&S|6$$-UvI1r2RS|jrHFgSSH3E* z{H7#~6dsLn7>@~di8@6~hesPUfZ`DeV>5#Fc$t{ z8=C0pS^7V$OT0iji?A3K9VKBYxeP1NagYDwdCwm?bmI@lU-N&k8IHWm|8boEE^k5j81lje;qI(909kjL z{e7K(flS_0|C5jW3rqC3`prq4!Wo>y1zbWO9mT#tOuJ7V0I1ewR}2~9F1|Po9p@VpES7l(*_eZQ=(S$O0jv)^ZS?k+`2T;#_J3R3 zzk?s()ek-JXH9v~{yri7lQ@O^FUN#)3>HTa_B}6@+hELUw4cDII=@uH`%JcJWI9-x09JsW5aOijKpY+L)${< z=%XG_&*`J?(&s&aJ`n}s{EPa(^=WsLlf+do85<^(Q&Ic4{#3FIaSTI)bdm#&BY0bT zhyST*jQbL9BvT{wf4k>;_uPQwJL1oaM-x4LbnG9j|1(pXmDaYX`j)j-=yULIuKzRZ zf7AbSN1k_m^Q5x?i?A4N?kns5meO-?__ns#2Y|i;1>wAKoa5KMi(gmVYE-YYj{{jV zYHV0fZbW<*%e=F4dh)0~?svR*GVU=D=N88~##px`dJk``>*_{&c*5=(uo<>}?zyPLO^7JT{ypPvH#C;Q}t94_DCt z74rq7#7*?Q&5kDH`agHc`xx-IzCjE@+*7EO9FCC~jW%)Fh1Lb6=bn7f?#=&LyIi>N zwC_M%&lK(8Y5M=B$^U5BA^&fa|J9!E!pT+Y2fpUw{<`$^a`V>wW-F?&9Utuf*(uy{`Tg~Q@2Ka|eO0}VI7Xnr z-fkYF{&mig>*`;!^`81yA3_`2kvZ?&|H3~ct-aWfHs{6t?+(&)PijLrr#Sxa+rF!( zoIlNfJnO&6YUfn@uft>#aqK{v%%HgconvtfeYQ0c_dIMu#pjhd@=$!&4aoo8`uE#CuRX@iklFL*zmmhzrv9tkY&`v+ z^Q|4%mc+w#wF)ETBaC$3X!L(o+Fz9C=;JW~6EO*sF%{Dh{oQ3`->3ceH`$QB%@mr{ z!D-)Y-5B|Cpl=pQaTRq#!%XMT#vJ6khlP3M0xUxJTzOeu&m+z^>bS}uh^{5BSzPNo z$^x{V=kG)FDD^Rp&@)&pjlN%*t3t;01DBD}_q7GFuY85@O033uY(&3jR!(k3^+oGr zl9j)X^A7AkPOsf({NFC)7U@a$cmsCR_agSG#(Rxp9viV=+(A_1Fp@|kgW~>Q)y*HCG0V^wo}-ul1Mb&s8Ui+#@)?perAo6k<0 z%l>B5w&0QdIf`9}wqfkN^ZKvZVExj&iQBk~+SB^K9lKADK7j@dcvG2=^hMW(6yjQa zL&RlVS1CChZCi}zCr2Z@@BQ`vmmAA3JRXnw|2MH8*XjRfXVy>gAJ4jfG%l3(EdPPH z*8T*)nTSc4jH#H8GR#Er{%`%!{qN%6KmpC}v*oh;_gvyy3IC%1zw;0GKbS4e+T-ru zz0IK~`32(o|MTd1_Qj+A|9Gv_^Z|>HV}7#gp9SX4msq>a$1u9DTg5I#^2mRstcdO}z&rAzz$%bw6<8|Nc9p5e# z%63&(t#{r=lw&KZupK+G8++0J8ST~27|%fO{gP*&mFHhJM^eR^u>|NF#%GbYAuiB8M*Ypc@6`(W0(vR=#AOlIM|qS{*Ddjy-5Yar}>RuHlyZ z8y5RTwUGvD8zIjB%lVH2PT(X?;SA2<0*dSZ%rv-u1E#P2EX{{Nq{`;W7-tP}r#A)qNCA|fIJnGq2p z8cRetpt0r}Yb+TVv1CSMW=7@`ONRU!W>P{zLPA8sX@>cIW;k=s{5W&Y$;ilvh*)xo zC7Wi=h_zWFWRZZ+^EwD$t=-RlzJI(P@ArM3`&{R~?{i<*`+8s3`?@ZD1BLzn^bK@6 zj`Ja2CZoS|C?4xCi2E~j(U0I5P9Xi9-#EbMq-PpkTb2KNrSgNq`ah1dn_Vlp%{jfK zSKq?^u@ALm-R*1znP4~i_^clWplr9b5XiVTT?Lytlzt7~I6ru} zQ%|ZlC(tKhGHyj>75j^7WBE0RbNcI0-_1C(KKLolnTk7bCuZU<%*H)v85PH^#W;v( z$h;oYkQHB%Lw=+9Oa1qavy5XqFFDM(b&+vvGL7c`(%j2^@|lYVF(0uHBkuqA5IwV= z{atJQLHaV}+11=}_V+OROO{G2na?cTrX-oIJiKl04{Q~gAT%|%CYp7&mU2_(^sbV@my zqu+?0S$(DtDNo;q{8;5mJ&|3a4~g6^E$%_NgDe*JQgPUt;PgF+dr(&E=c?H&&d2e7 z)H_ZXtEwgIo-6z|`xrtM;##DMtQcj~Ew>M#v}*lbHF(i?JMl6Op$kVKPCvBa>^^|` z1D+A$9K3cUTkZST7((M_&!Wyf&vNfbc|J`zCXW+1jqZ<$za7i&_tuw5UrNu_7KL8) zK3M*_qA-9Qgp&8&7)rmVUeVqk>iB1_@fvbC($Z2GNzaT>{)QQ+qmMy;u8YvaOH@{Xuq!5<)JqU3WEm9I(R zF8XZTgRXChohM^`F_(<7fe(^P4^Fb?(xkBX@T5?7;l!}2+dD!(>GSar3j5DXUqr9D zcVeiXGchcqmm)E1qV~(gPDEmrZ8xlTC;<3TkhgI(N8~d&gD+k{Y9>05RShI6%X!cnKPux8|tXu!S z@U6}73u_m89(Nao4Lz;%;InPmjvaVPUA2qcgBP$LFQUwQ*h#*OL#SLmQU8qp;FXD? zro%Hw-9i7s?uq)R%E2S|JHJ~^#RZ}-ueJ&A7Oof{?-TR zYkh#;6T@O{)g_4YXS@7f&RBg%Ls2+J@4U&n1@AL$2k@2%?Tj)qx_)z%DS(W`Van2d-w}%3tL_A|JSCrA1=HJ!v&E?aHmvIPPID%t1fflyB^=@@MGH6E@XV>{^*ysMS)qTc$ZE`&+ z&n1oKjnevx?UPUUZz@;lh4fhGcwT+znZfQK{pbUbpW~hvxc`OjAA_WoP1hD9uR&Zp zc{n)|v2P&8PS#+w<1wf&a{pKRpU1mTRI<%g-iPWD;>b`t-usVwdIIBoX96bSR!qTE zmzjjz#8S3SijF&J#n7i z;s@LhVttzMy^`Wz8lwn>>WB58y|4o zd5Lvo$wToo96|D^@h0*FPNVy3_pi+t+aDLuGt<1A^j`EnIOh-E>|K?9&i^fX-Tz&} z|6R%dUC#ea@h{}r&-Vsk5Z>7TH`H;t|F@j~&);w7`)6nI|8L{}-^~Bt$^YHU z|K-mX_P>*7D$D;Tn|-g0jAi&5c@4)%RP^TmUU+dBO|R+8|D_k^zlgy~EO2cPxW;b$ zU+H6JM7{&*{4#UufQr4YxCt$a+A0;Yof1{ zwgH<^hApT;Hp#T(W-_k-vqL_+um>;TZ2a7QdaP60*v%K|oha<< zxnB8Q?YGF6rN#As4v}4`(06czJcc+1R_I53u6sD)_%!O(1qsKsWL#(Pg2(0i6TVds zWAXa`7C+A)KY6CVKX-}wlU1F_QawtlEtsP{~f@|o*(P<9MNV4PC zeZpw6>oPG4aE;~D{l^<&t{Sij7b zmb^hfKUw?}bNa|dNRQCQ;Cn5j$2Cdf8W5%Q6)3F#{Qe3 zY};Y>56vihK%9Ym%CH4BsJK`B!QJA&;&HUiNYa~8*q2DYbxYVVG9xXIq%rwzWQyEQ z?m+sb3&I-z`7U}UYYxI@u>oTD+VwBw$s8)XvH!#T#?|a3YEd_e{g)QkIo{*DZ|vWA z!SQ~)h}`x3Z_lU|h5ZqqcmJM67CCwz4f1Nd!TpbQ4;Q-sf$kq^dNbnu$71VHm%MAJ zaclLvd@JhC%zr#YuQ}@44jK!fA3@@PYeOC4T#RGVlKqVVkf)I@@}I5ImO#pHw(Zj{ zK`+OBa4!C6p?6@O|NlAv|MUL;eg1#_JGI_{I@EiI;#g5X-x+{G7>aL1KbIVik!X2E z`&SunQ;%epX#di)^jub)iMpXd*=)pC=Owj2o75?BZ@@H~)irVL>Cy5TgK?OEiUt0E z@5Ll~%|>Mylj*nOT>O#uK8|0-b(E$^Up$B1z0jK9^xU__z>zai{0=d2Wa;O`BYoql zFq?i4?!#O>i1~+(mmrg9Mh1oc9eq5pJm($fK7Lgw{Ti`2Ke1>_Ex4jpzUAs!(>XXKkEym$W^20dekRto!!UYcBNsiqxOdk{g4Qh7}_HGRh^?L`0#Pfs09lo;-FXIrpa0JJ20xb*7e@EL+ zbJU+RCV#-#JefoO=#bFRIyf}iKVa)}F|M~8|DSGbdzSHkviY_lp=GD|fxY4f%;C?F z?dIfXPl_KnD1HD9<<_5XwEjGjE3MzZ*8KgW=I=YFnXEM4U4`mV#_x@#*A6p&Pp>Bv z@;vPtyMIf0Kri$`r*hJd9Dw9YL&G3)D6YY9q+jtH7)j5Jb#GUTGoX(_{sH%9Ts`YI z+x^DUd-XqT9vT+U@cvCR|GsKy80WkRD5qD?ab3Psn8)=Wp1u2y>l)PF?K(Y&${Soi zs&9543iI+kmwMkxV3K?$<5o<;RNR3(F%vB#j6b7oymCI)yE#g^-mNW-IM?gk`v3m# zB%0LuDWnIAKS9!ee3yJ?;~w0Hw6qlF(lfoi_jr&#A9;EXS+wIJX=VQ3MdUJ6#JwWO z6)302vF94Ba=Zrh^aRe1`xX4_zTSVV^Lf`#jERw(Q2b4OV%Pe;cj=S(SN)#SN-kk< z|AntYPat`>HU!y>4Dv{8*T*#mx5YH=jW`y#oxUS}-r?COZ`s3V=D(FG_q(L;!3)@r zidN;PQJJBit$#Y{FC(rA5Z4B*yFvLO4@v8ad}e4kLLS2joJRMx%6IHPi1*iDpQ7WA zUtbk^k)2;38v2lhJy*Y>&;RfACtTYj43IVmUEj6F|DT40j(~r#Kftxt>1Y@dhWhLp z497@}#uyYA_Xu%+!EyAI{>60vZeap_5+>tZ{Mb?Ve$q1_^RMW0WoxRkY)p#%d!9{U zyXsI+PdqEe*mtL3D(=9Yn2EbE8|U`l?JfRJpM5*B$RUr0N7z4Xy&h3x%2>XwwY04kcXhs}=*e0Lt*nwR*JN~eTo>|C-&(o(t z-;exSb_ZF{sU0s$i{lTSp`+v7K`EhM?2chlpL_=(O)vQW|9+-#q3scQ^9N$Pzu*Tr zjz0S!=MKd+7>+o1w;UtsHLk6iUox6L1_{UYj_b%;*W3B8Y-}z6{VC57~!d1utw=NM~oc0I?q{=iJ>oyKPFBIEiS_r&wQWUS^#`T@S%JKX1Z zE*`{uv}t=~&{Ezrw3_>n)Aq`vou1V;+bTU-XC6aeUf~zT_8-0C;KgBw^LC+}Uah|% zj-l^y`~vFfaUX-T^ZYCCcK`CJUg$isR{nKI-Tz7FktO2JO8t-fefLFl;$<8{7v8k~ zBfA>MoU+d;YbccemF|D7``_q3-S1!Rzw0-S$mbYN;55?n-2Y2r9@e@4Y3~2Fi^B!< zUdXSP_iDd!i~E<}M_O5m1jU3FU00?~KM6jKc&>LhhgRv6C$;m4Ci?8#3j}W>y)U$G$9N zU&w}o%0IR$>&e5)|2}0L=~m@GrR>-F-%{*j)>!0bYl+JC->=kqWztJdZ3H+K#SYlaOCk85kM zwKm|lM)V8o1`e> z6`}O-Wnr7owqxlHSA}K$uL@69^$0s0@4_Cufc|WK z`-NWUgUTZD2l@f3QKMg=_JDT9bK(z>7_8sjIdAm;2RI&tp~xAJZ(&1Q(KbR~gg%4z ztNH)PA&&-ia3i)Ni9-L!SoL@j|6kuga_|LVxV%c^k&{uO&2=j|1?7F+m-A~d)$tvu-^o76udt3o2fIff7A5;A%Q!6XD05#Y!u&U4QKK`%tcE%|G$opKxVRh0y673M;845 z8~Fd&${$Uli4T2tf12iv{0e!@$3sYt?->@6DQ)t!cF8h&DORBExct%156>dbq0P_X zSNN=A344E1%rd=f4jcOMuZ5C-{aPq}>TCM2+{@kW$#2Ej-nfQG-0ORTb2s6CTmN^- za(#j*bMDet_#k8r5=i1r^}l|$|8@OeZN4|Q|C;5IK_1)CalQ7)XUqwBm-fgHwAF!b>QU#MTD)=p@nqw}p_g+~ za~2tYUKIM!OKwsZ_Fx|lE(#g6A77+>xX2jrqLAOYDD-pA09@m@29ZOfj^P-I%KIM< zRkI!r)zcpiHOm);+9iuZ9qN%l!_A9ABeo)mrg@9BA`}=)9P}#{vVYJ^FgK?OEimM;Ce*eQ^61}F}Z(}n3RwS0V|JCk4^{}y^hr<+U zQ*j6G#7x|U*|-OV^_KKY#dgYlj_0D-SW&LQTuypro%kRBM>WpHpAFPbzmT0qq2Hi~ z*gEGvghg0}QmjA=AFg$|7?_pff5`TY{5vw|*nHm5ut59|wmL8Ql9(61Uy4kld9O6@ z&no$>!8&X}dcF5YS=mI7Wx5Sz^exDj@C|G+8GZAzfy$N75~w(!oRiHc->0k{ z_Zc#d^Qd3Q21naV*3I+%AN2^^q;E&=2R*|Mau@dC1-$Y3?RWelip3+pOddj4TnMhqXiKnFsiW_i97hN3i54JS^bfFA zS>)((eqQnx_Q>xf(Tohzx61?hczhfCceC;Px3G^opYx6JyX>FOK|MWjl%4PIK73x7A2;23sCmTwHA>IAzbfZpoA11U z{df_bco~P#g_hg+|A;ZQ8JwH{p^Y8u+6K?)-1AxK`FNHor0LDeJu{ylkf-%egh9%;Ru8 z9QE=_pmwbL9PIx0zCQl%eXT+5Smm-v8<|Xu(mvnGM}NKk8tMC^5$6Zx{z+^A{an2L zBiiT7)d?HV_~^MM+UMH$4b!#%XKDYFNq#`naqWMk>CGM5bke&AO%l(%a0s2hbg&51z#;?d}NY8eh_ggt+(Juewg<@=^--o$)5a|Q1 z0rTmV{^KeX*0*Z)TQMWv{(ru86(6;FIX{t)8rPI7jQ_Xte^bV%eP;)DVGmxwe!PfIv}BEci?46v z`)9~@zJHd?InI*}d-=Zy_`hVq|6j|u^*d>@+3zO#p)bql5V~*#ac)2g$LJa3;B9>` z4kzeike|W-=MxwFfBx%fX=VK2?oX-bQ9R8A;c1jeLejdK)bao4Xn)+V{Xw=b)czoIj`L*0^U6QARw;}62b!|V-sUs@ zee+uNOIE%8dj0Z{{1zeiuiA{{3KZ9}S7hn$)D>^-5!TSxVFNZHuG>{cZb1zai1UWy zJmMrhbG!Nh%~3~wnEGM->+Ki!otL%^+pz;(AMX=(k$dn0I=^{w*iUv?ck{(~jF)i; zU0AG-V~Kl?dvO$B(bFCq`b?$gF0-yZ+4-~X;WXKyAG-TS@6XS)Q$B8OdwMVQK|c(@ zAPmJd7>=%5@&EtP!+stYhmnrs{J+uU7>vUN#5GeVk&|&Nrl4Ym@;^=8Nsnt9RO1f% zok$E*{>Ll-e1h5{<$wL__Pvgs$g2O?@j7}v8T-GFxb`?UQ`jH-*$cuK-_GV|A7G=|`@BB?2K8wpwyIl`Z0kAuw?O|t;u!Tk^0^N&?ycl{cK#pz zcSYxixzY-8Zu7JSZejPZ%=J7dJtHmliVjp-pO6k$|RbvHx6{1i7 z#`S|Mr@Q|d&T)S1N3Ffr{iEJ(_@t0Z7CU0b$$sF=2 z?W2S4A6uQ1L=!!Afc<}-{YRV=nUqg6GRPwx&*7VIi)pUC4cqBE;^zb9?Kq2e#Brsv z&DuYA`;QP~2X;9x?mt{!?!NAIUntgx`GWL%dYlV*_C5>#?@I5;TK2!yJE1*Li2orI zJLCL6{ue4^WJWn}-=}OLN6$aY{%86B^543gU!WW| zO;K{;YDXBCM*T+P&#SHb z?>p^UrN`Z~@zcd8t$$r*{2y(%8NWyS4CD8=7{9;W_&wQh*!=(F=Knuyu0NVu zjsGKEXZ*ihd_Z60``C)4KFTJf^jW578Q-5aG_*V*K45|PfScuy_OC9a@)xBcm}_NTA2cY!?XkwDTtXPlqMHf%@QeWz|Q zSDl_oxi{>h??Ha0@&6^p|DDtB`d^S%ws1(;PritXy~b(Dmr>r$b9MhUIOMns_0M^( z&wI`tp0jf+{RdS`%yWVDd-ZRa`|^l58*14glrKPZio{7HQ0zNi9fqYikMYdI0p2mOwXYY{o z&YyW-@O?g8a~ zy*A)H^#CfbX8+ic>S63e5xYPZ;@{Ya82`V2Xh;Ha2r6{VwEZu)ot@_rK?uA0e&mW^EYqK1Ba}E?LY+D8%1;wsGB^ zxWCwZ>GiHBwm}MUhUB^R7n~F0^lRsN{^kCgI{y*5_@KHLOHV4#@6aE3z4gNXU95w= z7NO-i<$tI0Pi77%|MYY59UHa359t3J#{cj>C$HvzAcZuVQHX2P9$Y4`QskfGVX&FYps~k`S^7TE#($$|2%h2pNgOb+EL+QZ*_TRcZEMI?lcy#lnVFmI&+lI%~yQ^jl3Ex=X zFRbi4Bs~7Wm0`_GSBC9A>-yb0!V`zj59|DQ-;%c0|Ni6%*XDk9_#`RCRLc-Gh@+FSYV8~N~Sjqme~ zlSS75_nT8N6?Y)rC=Z|CNzWYQ|F1U=|8;(W_dd@*&mp@{UWe@q;GD7rz1ab6rN!nY z^V#o`HXG%;`M=Jssk4s1%H&W5ZnMJcy=l@6t~* zBFv{hghg0}Qmnu#tidz4#5J_+6G7j6d$&-b-)Bp=_l56v>k-Nq8dvLfW2m{MC{*4y zK2*IlUY&bGNG!Y|)Xp-Ot=sk5U-}C@F43$)8o=zn;Gzu`Nbco~Nf_pa$8 zkKhZd2l-d~5A#79u@y-)A%!%WaY7!a(ft|a3cV2HcKVQ=)-}j|u1Dxc zAAlJ9Gl(3DYcL$iwPGa5(HMsbh_MZ+M(rYc#=W(nu>Rh6^$F5*IJmzeiuMvN!w4CKdXLr?WroR~_nG*(r?io%whOi?9r(SbuMLI%|7(^1*#Cba``eBGOU6An%U1KlZdPZZA}i+F_i9jB|K~P#6$!^l)Zb1& zsvSVq-J<=`==!jAr|Wyx^&$1V_KDw*^AlQn>fi6J4M=7NYx9v=$2l^u4cIKt4D#59 z?bs1@@6;}`a+K#g*7HRihp*l3cTtZ7V*mdh=e>aacoFFvT*G*EK0VXl{o`f&A>{AZ zmeB6V&U60@%nOhf`~Q!S$50{t1bG^T{{Lxy7v0Oe->ARZ?~ZUCMXtl|{8j(|H0^5C zBY|GN(+9c#u+|Sb2t#oVhU0(j|Gz=`!`89NpZ5A+_Wv(dZkO;)mhw?z4B$w4ruHhI ze3#Mm82>YdJUefe02S-|BCGR z7w-w#`Br6y%pi|#i1YuplRK~rdl2^)ifg>SKrgKKGD7?s{Y9Kxe_)_@d06cKHNSl9 zMAtS^J9T2HbzHZ6VyH)=)qG>$>BP%8gg4Ir>vG)UoL02mHi`c~iT^(-WRbglQpn#t zDKylXpPn*5z0v&imFB0fH$NTeN6b%0na|?|#r=_)ljhIUvj@zd-)sK7XE;=z*I+nCVl?`>?lI&z zRL=5zuYSYx?eF=@r~attE3Y#7OmN;LOvbIK80`L?GlgC=-gj@97^c$iK%%GbqOPy| zcV3Jwk8RF7eHLR3W|DWIF#qooai?+qUl4~X?LN%KgP4zp@K^KywE62$Sf_)XSmc~# zD8&lI_Wvq!4c4J$F#F4n{*Uwj9?}1Yts|6wG_g%7r0LE21!Di-2Kj748MYujgMXoK zuZEtvo&7|Do<#nna)vD0(Hzf->DY#jw@wV(${Ir>(9z)!T*<9E&f+fTlTPP~jmD85#kgFJ#`i1YtWkf#yX|LR_@ZbdKj z!P)h{`q9s>|22R<2nlsSeXIu%``?F3y9UEC5~DE&0 zBR3$$uWR#J+}~!C<1*y<{CTuLr=3E!+^hcrZI9@`M>|`ZMU1g4%wO!QeK1)2fDKMM zuZhh~Ax&?-oBh0veV_h@_ET(Eu)*~YsJk)S^LwNH*y0*$kcijvCBDO2`OpjTe}AWq zL9a*|zgX_~={2sU+Bw_k+mSdaZ`7f7iFhz+asJ;fat~tP#S7$qym9{Di;g?-SM&d} z|M&TSFZ<3Rbm0hM`9DUUz-gSD|HtQl)BL|i@p<@vng7?lLis^2^g+dfGxPuY(a+Am z89*O|znuTK`{#Q!aF1(vdj8mhOxmZ219Bb&0!g&pB0fMoayyx|PC$;#i>t2WmsasltMBIjF695tk$0+3sLxt|pKSKs z4D#59?bv}`*n<{v&aLyrABa21knQ3QvVG-`JQ{ZP(ZAS-|Ka+`CdVn~r5}(t$`0C3 zQ9k?eB05pQM=pQGT5I&P>kk~FcOju~qMo0AcKv}P(%x9Uk2yYp(}?r$hTl_D- z)_2FwTtM%IK1eD*mG`^vUNq;F$?F^Qg8&i-OD{Z{1ZIb_%BAH?z-uM9E1Y>LmO zqQY3)9ps%Tr&mAAj$@|dyHHP0;OsivvCT4D`q_5nJ@or97xNL<|9gmBgk>nj3ar8! zbp6ggiDY5?;UDzr(>KNR#rht}4*$Uxvh(AfZ<894xhFcfiZ-<0%g=$S^gb+a{T=p#|sKX^48 zTh7*2v7dgc>S{I)=jPwj3w|m&+IPla9425ACgWC2LCZ_}AN1+Bks1B_?PS(*j?C+u zD6IcKmi=ZI<6OHY_f?pG@7|ib#rgNv|JR;clG3)qRC!ixw*J5LJLomq6LJ0j;_rxs zcbtf02ig>M@~<^MF;iOHSE#W5{}uk%l>LyT%|-d}$4WFhrZ`S_Ut$$xWtFQ*^P;r?3J>D~HpvN_Ds+HqS^fJ6Deyp4I@#8r$ z9Z3}4TmDEhJ-){oGLPIl%)uphU>8dMRa}Ag_a6ES*pE2>zVmJN+oE^;$l7>h*Y)Dq z$wN`c5gfw_oJQ<_>t5ykL@)Hg+5WeF^vpEA$!+>B=z|brzT>_(*_-(fWaX&;q5qN1 zPAE^g&kYVkeeW6!N9+SA#4A;qtKqoC7+~uR&;M@EAMJfTf8-F?2q>)oy;u1^!2Xj> zJJqcZc>ZWc$H!lf|7%bOi2oZc&v7WZ!t?p);4pzc36pUva{qNzm_l~_r@o(G$N8z^ zd>r3_J24Yy=P%zyuTkdX9PHWjdyqKFwm6P^pT{}amG_%}K5s~+|ow?cN1>R`+1AH z%Wvi5?;?G*JjPlxpPqTZ{ogAVfxZhdrtsYU2Y1__L0TF40=XX*bB2T$$xf8}Zed*n zdA;oT5DNPr?AFfkyK(=6j*ng$y5jl&cu_b)9>WQoM)zvv3ORe=^dkFU)eY|s1IVr) zoCt%+HFsYZ9v?h9tlszT@Xg!a9oFqLUN-O2u(r4HvS+Ug8wMLsyZQ34{3@pz2YZIc`VI)I@Qp^}kFzchj~5w#ym?S4{!!0R94-h?OfzPPZ%rE%){eh2 zJh{-j%N~!E_h>9pS1g|1Ej%?tzoPdc_ai=u^l_MgNtldV(e*Dw!W6RO9rlPIJ3lAB zh`bXsaThZ8YM;$ezUlWMKUkfG>_E2u0d>wR)^wIW7Y|}S(yk{Z{ULf;Z~Om{i%^PX zs9@jArLUmZJYo#1)!cPEJe9k16%pBTt*_uVpVK@Ac}q8S;y zX?{UZ`45)A-_9YA24fP9*t)=21Dfb>oS#tUxAOAYhV9saiiP~|{(NeBO`U7Q9{LN2 zdjixujxmz8*e~rxbmC>4vkz?FA;(=fhQj{C#+6RcPosN{vW3c(;(yj_|L@a2jpg#N z_CMkp6LJ0N*#7V3ygulM0T_g#xCSjdjsNd8{*TN{#{W+m|6j|-Z8TOwHr%HDkF7J< ze>BnK+Hh&-H6Qi8o;^b5!XBZ;|Ir%%7kVD;$YSeC&rMsWA>}#xf8v@-&DvK@hdoDB zYh%?+_dnid9wq7z_}`w3<17Bhz2-K$#*r9}F&Kvln1snl?lzB@oPxOj-BdDt*!y{$ zA3;C6{>Yv5nfS}~M+)(eY~Z>0$CPO9lL(2{9KiUxc+W%_)-)`-HY(>1+roNs((r8A9vHeo{bjH4l zoBD(m^f>;wDyDtU*ciDE8_;DO;6L=gY>N74#Qu|8P=f@Lt$u?{RcZe#foilOogn@cr}>eTJo%X@l{nGyMH_b#s;%#<(9C z#vyd!2#(3!%m{QYYE8U5%35Mu}8+JJRr+(T}Vw4t~L!_oDQ-eDv;8e`DO zPa8*eTw!l_a&dp{6h3e4tD5Ava<}roPx&9A{3EU@Q-^wb0y%a5ts~?(_`xKc*Vg4WcJ5xS)VK(kT+Iy60WH;!UjqD5N z(jUZM&i`BfUq{8BS%c1J4F`BCL>AHN!|eenwX<@~>vgX%A|-K+jW`vUdX z9M`X1mnRz<#V@vsUvw`?G#z*SNV}hA_fTofziOU1!G+=k$vCIKZnyYJB(TbFuE9DS zQSWXbH=zt$u=u3){x0-?3>1IqIDy>XDm!EbasEM`Ed4Kah`xYr^zGPzT}ZmeJ>(16 zj~5Zwc8P18b<#8Z{kD15b)z3bKK2bDs}0bOjxY5IUD7*S%u)HCafTm>`E+~?Ct~`I z-cPbad$xO>>-nb(!UbfP_o5fs2jvT_SGrT&AARxF-Y+aw5688^YM+0-KZ&oGzyRkA zLiIuKTL+sU@1c%!H(A4wEV*8J#c=vaRE}4#qq?_xo^7q|sa~holL_{;;X-x+TL-ce zec1))rjYjg&Cly!KhCb8Z72Kx?CbsR(hL3YjlNTEp1a=}gK?OEbe%jfiJp1IcQKiM zD`J}=hwMx8l0HRR8M`%=yaN>nF9>&%Gf_^jt~xWX|1QU~QNP}Gqp;7w<_p3-((c1t zJc#*t2#c@`XUEq{>G2&|L9RmX-@Ao1WXZ3^CGq>$(Klcd%CH4p4Lw2)8OLuEWamHj z2uZRT8RSv9{&oA`+yBv1`5)~6A`AOZjAj47ZjT1%ZO0Dm!XCVU{b+es`9T{pXh#+~ zskZmb(KUs4T=@Ssw_RXWcoR3dV_I@XwGs$O@ zkzOq>nVf=@b1K=wDpb#KZ78fuHqSLb!2aLwo<4Uni%yzmWgmpZy=q{*%e)+5dg&R;208=zJUhUmGCqZ`OIq`EQCJyhr}` zVJ_z5A;h(W7Lm)M{`cOGZ=4LJ^s@Emg%xD^BjOLpHK@p{v%dFdadUsxx9lFBQ#ai` zIw$V6RFBGqr$W_|Q$9NtYEaA1szZJ2DgWK6@SuFwVFNay3|mlx1X@z3LTmPv{*Tl8 zKTd~sWRXK24bPtnjo5nNl=crFPM#^GSDy;aNcM$%nvp>s=?k6H?Nrzn(|XGr+vz*v z=dL%0Y|m5LsHeg%X=T$-g+1g8sJQ)9*iXKQ@>|^3ZSD)j@3PjDwEFSx3$@OxyZMy) zt?GX^cqx*u>t*MpTxWXksc?v1@+o!~ZOUgxxolTHvt-V3+~2XwIY)2|CvX}a-#Zz) zC%miQ_;a{`?1et)YWj2NM-IRs3`OM#&mYxAu5+yGaxZl=Jb%}haQ)XfXE;V;G{#^Y zCSVfYH2;|0IoJO;(Dn8A{yoS3yT`3$@__PB7WNk-qS z_MQyIKkFVo={P3_C-wo>^z`m{m+Q!S#|bjV{KeR-vJ3gMv&5ed>>fVtJD){GUuF7x zJ-mNxb8j{pb=}zL!Td0#0Oz3;#|HaetAlaPhoxVB76Jv6kp(bZDhvx+R3cr z9GTCC@Oi)UMSK}wL3hvptK`F2ips;<2gm((m1{t4t7||#JyGsAZ&pT}_b492<2W0C z@C5yA{N9uFr;)!+o!~pM4c)H%JR_|F)u>0;H{1)EMjJZ6;9AL!e+c3GP4X(e)yNmPb7a`xlHu$Zx>mL^4|EKYBQL~dzyO(c!fR9Tioj(6fs>;GvJ6#VaQY%~h%|MBe`@jtHrx9|M$Zuwk?_u##VYXFv` z_$lukU%VQz|M>krOR(AXt=b^k4z=WB?_Ycem(I{Wp2X9524}~~qW@6gxaB4O=PUkWGILV-C$o-oWPXP7?>ROaN7;%bnh^J8 zj(ZF?qjER<#~#J9S))wFvROygBQe&y`x#~3?`%aHZOGyK_#s+`DW9V}UoxY-wv&bR zxseKVO0p>ww?;><{=OUPafBx`)@uQ#h~D|Ju|&oKME~_eErkwOTUI`?fHIiydEz%Mr)+ zay{5~a){%%BaZQv+|)aiT2C-@)PLmv$(H+%TKzZ5ZI1p<=e--(p;-UNd&u|V{pk8h zw=j|Hxc2<;0Wyw_MF0PT^bg^~_z23hEacIAR@;j@lEkAh#x zF4SsU){QX!Bo@3%?07Z#SL^?+y*OOyJ72_?@fCa(OE-E3Sd13q_^o{D!ub9S{`YPC zZxr^~<8wD07Jq(R{5fAa*=l}3)tT7-^hWdl7xMr00aP8}|6j=eN9{oVe^35DKR!V| z9Dffb$cfE-lza@O|LmPG4%*V+?+^C-SM&eTu21XS{1-lA10TQ9^=;+zC;9$O^we1X z{|)?qJT9*;Yau>CK8dIC3@T8KdTd1+ZOEasvUm7C+3^iy$j@FJen|f@eu|$X{hWK; zeR22&J@dTSe8<0}{|b5i2{~z5-)kr1-2LB3|1ExxKcJ$g{)fI7hd&ZNln&Ja%dPDzaayUe@yJpTag`#c}y=y~UzwaZl( zY74i@zqfLMI@BW0`8{uISbRP%!o|20mm_z*H7`ae`vcVhXz8YWDt|d-5$E^CIlpo3 z|F{NdtP7gQEBn$@H>(RAUn!r8)#BCHcMn78W#rq*cjDcMYY^6}3+mJfwWy)Ty?UyZ z&+B~t9=sRt$3%PpAH;|7VSEH1!zb`boa>)e7C-GcZ|vo>b@wd(kUm?GW zQf;4-9@Z2KSA>ThFGa^Um20wdaR`r+kKu8|I^YSiFus3(x9}wWX*`2M{d`>6S0=M4 ztPhzrU&LqCs7LMx1H)D_jW%@Y^U0BM{r~Tio!5KU$RFdUD89Z&!N)4Z1L{|ba{zxX zJQb*@ZXNVZ#+MIULL>1uTXrw z_6PY}{2qTm%S!QcYmIX|H$xWozfG~%S@COR1AEzcnEgQ#h4_EzX`eTfm5;o>{@!YS zItuIh>JzAE=i^xWAN|Iwcnzm;UeddWi*PXt>kqK)=hh#1fPH8GbL@W}4f1N#m#|e^ zA*s#K@8OQ1P-i6`q^v>@M4AHK?lis0!w(H;CRT!%(Q-6ME zV9&65r8Qk;D`+ z=(7*wBPg^#Je!ZvYYzHvI>Z;m{Q;ihPwzd`HmiF{Tfwt^`u+>UCinYPZ)-oka$$JV z+?);Lds`FoKf`);=o1}>!@7fq!?&J0YE15kvASbn&EccrnHlgd9sg=5zVNlM{P3T`qtE_lSYd4K(?0tw9_x5(Sk?dH@Qty53@f|IukPZo zX31N^=Y94?JaMo`SU1o7y9F18wTCYXPwuw&p}OYFKKlxm4DKElrsL98Vzm(!0Y}G}es{4KH06o|N`9Qr+I6FaF)3QvFpmR((?BTFBZ_>Y9`~ zC#(HKKI7a9RHGi}>W@}^l8#$$=l{>*|KG2F{X#y$z%yfSIeNaE_{W{ZS7w|Rb|7l40 zvGab4^c;B})n1~$3+VFe)0e{4+cnzme zS!aEKly%%uGtBsRk#Xa}#=ngnC*~Rd?rr>gl<{w$C5?qQtu=PN(fB^u?6@T*J|Jrh zo6JaWUvBJvy7d9f@6VIT!GptjP0BPb!o^5CKjr(E(lgKd?!h79a{86XFECzi3^_}- z?>7EFLcKR*uy=|bR0mcZ9}_fBT>QSES*umig^78?cIiiuSokU z9!8mcAeNHldk6FX2Zs{+qj(I}M_s41+MTYeh)*z9TVWLcb;6*eDDTvHDdm3OnQ?+2Xn&vY_+ng&mMY~R=lcIdYn@k<9|Lt{~+Ep|KJw=e|PggadsW?T6!JMt|NYt z?|cX!#z*imd;*`ur_sWux1tRh6xP3KWaE%W13T5oPHiQVC)s~8$@UvwU;nR8zv(UB!&%CIxisgMpj>+O9Deov z>P@~?sj)?cCND*dwaX*M+GP>TY6h44>`Dwl#UtAPOSJpxW!}|ylJCZKNUZdqp$@eu`0;h! z!;{MKdwl+0ydTS(io!(l1Nb05gbyRe&=$t`)K4FAoNMSFK1P0xpyO7JL>)4GMn$j9*noRn&Lb^hM}`w5@_7Qe@j z@dx}7ucC6J`TwzhV7&f;8;p7LOAGV=#p)--t~U(i-(#!Kl4#n^_uVJ{e<%N*-`jH1 z{B*}{RpzIoJ{C`xV2DPYL$^Tz|CjS35*Ki8wHESc_B3z71aXDHx z^8f4j|KfkX_s{D8@GdlZ z7q*hg+qFNYdpBnp|0A0nw|Qn6{UR-7>n*SM+jutZ^z1y(t(#|rh8sLbr0LDQJ;%PD zBdXWCZs%6M+^7Vxd?)e)N*VXV!nrS;Kd$Jkls-14_&* zYI$D$i^4u@yOn8ma2AF5nzia*WxTPs=T9aFd;Vm~ahhy)UdLzpgpQ}o0k|yA|Mwot zyTh1&j_mw*{WP^b!uRPvMAutG_%Zp@sDH}*fBO~uoc;?G|80+uy!*WHOL~e-<5%?G z;J0X7seQWK^=)*0WX^G1OR}s^e4o#Mk3Zm#sE}5USLt!?eD%%ehu7$*kZ@e@xQ?uy zc78aoMY+U9xESaBqwyhJ>iBY8i9-C_+w}3!-;Q_U-Kb=Pt5W(WvaiP%T&QpV2L3-1 zqr}};b`RG%?>%@g-j9j+06vHo{amek|1a@x`UV<@@&DzOEaLx@ss8-`!TkTDA$&+4 z9pbJ(Om_aMXZQ&DF?<4_#HaCDd>&uKm+=*R62d9V)U(p*8MePo z8=-`r<3GeXk>~6`+h3MCKRo8M!u-F#?-m}XSKMIyp_{&cdd=OQe_!j^(4R(Pn&*G7 z=a0Bn!!y#7S@k_xjkJ2bp4^IPgW8ny%q;Jl<2J;xpZrVeiCeq}C&kY^;QdEo{tNk+ z``Z@($BiN9d*8EPI-vh?K$>ja~-v!zU&QH^u$@hDw zekq?{A+}|ILzdqlo&~?9{~mup^R?qc&H-=L0-TCy3a4{~$<+u|6+x)*F&UriDiFe~Vya(^a`|;-bSNYoKxjv_S zA=>)Cn*a9~^>4>_^$p3V^B=qyi*F3YS9#uxCxs74`yf7q591^F7(RhdBF6rInq0C_ zKl}qBe3l-^guY0AIez~4A$*05eU)D&djjp@@Ts(&YjTIbcF(s7l!{MC0)3^k}l9qN%lEdOo3m&5n*L;M&&#m~`l%Ow3T z=J}h;pSj<>e)Ija=KJT!yyJ#vCxu3AMG{R&A&q7%ZdJb{KD%GYuVeAV@GG+OT}9zH zWQjiB(jLb7-%=EQ>-hKh1OA9tk*t~&eo4NDQ#h~9eQx#}($A-7ZkZU`aMAxE?f!#m z%dW%DZ<_+GkeqDY%xIBK{i%3L9WJJbfLdIl7MmLC*a6}M^NJK@n=Ckk9 zdudrtTs8B@x7OPGp0m%{=bpXSUi;UfKZR|225I@;!bIou;b+9Z32(vEm~4MOB*gzY z`s{#bgI}coc`VT5-VDhR&kBD*+_L}Vm&jknYWZ{FSIFm(vTwi!+qHR}7ygp)uOqkb zr21lutx99+pe{^f#(z0`R;PXMe>trS^XGM8p_qSvZGVn^WBa@OWn-{$J>FlD&dYP! z&s(>`8hgB6#YR`?|2otEb=%SNX43{;{H5r|Q_J8vbu3Keeka{8j1w zHB9!^g}*_5)cZJL&!oRePYx;bSje);=XC9%za>8R9gigc`TNGUgipT5#((8j_}lcq zgTITvhrf>(@DlFgqxe0_@n2-#n+z;|CIjEuz*l{2f2G|G)5@@rNYeGUrbg@;P1e zRm|`$XZH)jW1H#`>kKCGIQ^&a6n+L1;+C~T-b8;3p2nZUDw~;Vcs%?feY4iJ+0)E# zhw$fxbNlDAr|DAucRBkzXzhYJyC1jp-Tei}?&{oq0eX(I!9A zHaXBnA?LDBgkN^d@|C`R@;R&y7^@|J9VyQxw=ZH>Uiizxe+8Kic0qWnNBJYC_~p}8 zp0E3yb)T_Km8^fz##-okJp5JX{5AXy{7w8V{LlE?_&Zo*Pu8WmfjHN6i;U@J%3P;} z{V9t5FWR4{+Mn#v3YijKCGR_Kf}j6)rStcYd5`jKU%JU1?Z2af@b|^dRw^5N1>pty zOIQ$|FK2_vxoYp7xR2uZ@JYOOe!^#j@2Dq!pUlng`}&jNOY}+1U>;M({-#Iuzwhzw z)iJs6a__M3SkR|^&oQgmKnDK^{{;UO{|pblKmMcnkLLSsY2T!g8P(3g;*kFLh6iJi zf9m(g>y;Pj{7Y=#dVTm;+d_kgwvgE%nkHv^~h|U)MkS3q|3!{_1|jKAopxJiDKL#rpnN^zr}Z6Jg-fPllfn zfAOB54{wUUzwNQ`7V_kSZwXJ6e-6Khizzng+uD_{*bnkaYxM^o3x8gGLizp+Ag181^#=R$7{cp|cHsSbPEpTMz$}KmX|acPV}xKRxdM|5MI=4u1*1j=zi+ zw4x1b#?{xm_$?j$NPgi~3BR$7A4zT-&tKFJuym+@;8_0vxgwnE=11bb^Omok3hmPA zKqtDeTH_f0WH)`YnE%Ze@1gf%n{S#CP8&DhB6I%t(rf&0zH^`Ba{l+YeDT3&Tb>F7 z;_jF~F)zGu$v-8N4NrwXXn0-t{i4D!hA)o(bofF~P569qO&A^hi`J`EhtF12htFW7 zzb<^bs4jeJ@cHn`l6QyUy4vswe7vK={B8T=gkMHS7vIhcBV^Vb*HJRo z^J7t(%Q7D0Usn{y$MqA?C)9mk(k4#q-wIzYwufL@X_&^J--st};4olZ1`dCZ!qjvJu-yc$WAJE3`4Aas~VhJg%BaJvu?@Ohwds7r%!Aljt z7=EYVSHh0pc5!!Fd$~rttL#_8K;8coK05fE@xNaRdw$!;?=8J6e7yRX!^i4=DGXKp zQut)WFNaSZye{1NHFXF3^O=&L4xjbBKiy{huf_PE_v;ITPlW^L9^wec_+r2HedHS3 zw4PSR$Uk-btfwd}^%aGCjpkZlh0Tg{lvm07L*C!C{waM3GcEQR?6fAuu}N!J=DX}O zSX>nPUE?M7ouQK47}2Tl89uLkX7v9rk3DIOr68P(yTUbA6HjXYJ{fN4o4$#+ap~I| zZ@2#M7wq3xDxbx1e#4Vt3wdeJ^ytY@Ko(-s_-GMXjD+)Mq%)g-GL#6HV!lPa#%G!v zeKI6bCaxS6s6-X2QG<)EyiiMKKk`(lBTs%aFVvF_Xhaj1J%?tp1+8eqs^=NU7&_>= z{Xg2()5cj|)|c>O{U_!2NNBMK>!CG#yXMlN(>arU>i^z?&_$oU);};_5W49-SYQ|C z57=ySE?W?K#Vxl!5&Fn}M8A4~97Oc9HwxK(3<(b-!=7$q3u#<>#_i%qFp4pZBY|m5 z8N1K*52W-DoU^&kNn)P9aB6&intzXbjr@D8wDRwI!X#I^2=Dg95L zOQJzsBQm#)X@{pn6TKNNXvJ-|ykFj4lAk;L_`8dq=cn^So8Q{eflhRx8z&!lEcB3B zb#X6w@y5qOAK8xq3}Ohw7(w>C?C-nSUjFjQn;#FO;>IwJ1Rjkal!h=ZoWz~2vT*S^ zdvF+=i~DbBc7&Jz$av`o-WpPVf4KkFI(>8B^Fx}xgKc^yhOvcv{{Q6;?daG}Yte4* z({3K~oggyKA=wx*$4Wam_kG|2^N@DcLE2+m+Q(_}yVBglJ`Qk*BOD`(+;_dx#+mYz zBXVBbePPJEJL(-C(f;i&3#X1d!#OT7*;y76?PcMLK09RWAJ_C7EYRnX#2oUrmH)%1 zLjhTcRpaJGWHD04&o{=6`=dm-6q&@+VSC!xy|MN*>ZQ4aGQZuqr9UVB--!F0m5ZAy zH$IP__&!0N@p&v_3HPv!l>%emh2|d^+rM99{y~xX2PN*K%zcns!s%1<56;a$ASZ^O z2^Fq$_wCyE@5u|5^vS_zj7L2as_3(hnL#zZ1`E!c&-#YIm2U~4R@`#c)1i*6N1XT2 zKsF-IgV>PAO*9ENBh#Z@-lv^D?mjSeAb&^lmo@(YbIs--U>*x-aZW4R(2fptq6^(v zJ68VLx7_@)A?c24|Knl*?qap)gL_!U%9Zkeqy10bcix2mx<@*_=tDmy5B&c}Wnq9m zYurDM{SVTIko%sdcRqRQ|3`l(yV|P0(f)bK7}SVkMlptQ#QnDtE^mB<{a;C47w=6nw=yP@?|qc`OvtAMU?(=WWXFH`(7qj*9!{=JwGgbNldW(FxzyI1X|G%64?fKLF|K+2{L$&MFpcZvlb$%Q_ucvS3sRJF;KySphaHf=P zF6O_JP2wiBMViSL#D0QSvJHt#ZI_gOgmgh@7w*7(vvF(0y$h0v-_DJF$GPttW%EL( zVJCrN7xJO(sQseGj zd5?a1pKw3o+?#FW{B&~a!1I;A+&l&K{alylJI4NxxR&yK=R0{}zNEV?OB`8H1%29zzRG}Id zzrlZK(f_9m&uAmh;!lo$Wb6}G-GO*V_*5(_5UCH|H<6=kFvKYy`>@fACT8Z|9`9hAFK5HNV$(= z>1455`&2k3&#+wgR5&N&SnZbl#j)D>-M>q5*;p<-hYinQqT{J>CGHwG$oohBfANW) zr$PZew|+pnh4dmUxW9QM5$AFgiz~s!AHF`6l4U4I1u9X6%=`004Oxrqd-d9#P`|*-?z@3jN)8FN1+V7#kZ;fa|Gg{D!HngJyo#;Y0deDnL^kV>nm@+qL8Z-I` zXE8@lVqX0a*C#Eq;Y-MUU$l>XFHv`tvG3&lB5M*H_h|j#xjKVSl}nEYYbxq z%XRu`$W)d71^xeH^o()-1i7)Ue*#-cb>0A5aHAjn>M@y={}%bjonZaGXOMZbXYe8O z1AcHToILCPou9S-FG(&Tg>|H{gI(-l9|t(Z5iWcWAft?DKja+|j_d!j?6zJtGI zE`Yc=_i@1Q@!NlKZs*CV+5lZ!#EPS^NM$v z+0^j&N#tB?B6jX_Np zcn6iMIJO_h_~YE-xG(ksn>F92oV~v<>^W~A2ROtLj*-Rsr80h|?N7#U1mxzq3vV~M zj%&rSx5Y|hZ+Q>K;8F+bh*Eur|2t-m&6+yy^B=qrX7XMLvzS8?^H{*8d^9+( z5lv{ukFEc!dLgW5UkDr6#1_)XU>l3|FNCG07s9>P7s7JO3t^?^g^;RvLH+tdxL@)@ zm`c8A{Q1Q&gW2;J`P46lWXX$RzW7D`w=aek*K0)^+Hv>0_9s~jp@ZIuHOH)%z8E&j zU$p-3#Rv8KjW*(y`dxj$*!^Ny!aXcEyckyM{YOnNhSj=2_xnfLs9P>M# z>%Qmg*(47=A8p7beOvd;)mz32J+*J#_tO40SmRT#V*{Ig>{kc-)g^!Aws$ol&1odj z|Bu-hGKF=dF{Q0BjTz5k_SE+UJ)0yskA=e!b{zBC`Ng}!d)PrWH~BOiHGZd ztLU4`*#>)`d7C<3_|f{`E51H*<##9lzm{KmjpJ&OP{wEWm0$e|b;1MR)OSsf^M>Y@ zlwV}OudU*@lizwKG?EwJ%MVTDUHk05Y`x{lM_vfc!r9w-p@nQk8!o=}v!R__?#>Gx zWGA}Njkvc;s+}K5-&Drq-eA4-K5UmN_kK^4TjjW=JKe=4Q{+F=rW$ZtBztGyZ@ehP$zI3kt&HjR`<4=TL*V#jANx#9V zegl2|`@#pv(BmFJTUYw=>3?wj;qd#XAGV)lZ}?*QN5U5_KN3FQ-e>%-JACfwgW)`HPzLkHZuSkEnLKVFl3$4nKxFqJf3qp;! zTHLwyWT+$Sk^KNamOS~+{~8*}CN$$>>haJ*wxSK~nAX2N)vu2x={lp*9hdI9KAME< zOxshF|J~uZPIRFgJ?KT|dryY-M(?0!yy?C+$h7blx$WLEeFb4j8jI3gmgYS&6CU$ntwnpcJlx6X#P{Jwh>l~wLx(A70>-Qio%GrMse}w$HN#o9`9tptn z6_2QM2ROtL zj*-PFrmFeJ{KuI*{xKfSPv>7gn*U$Hzvag+lPl$XcdX(*)^_>h{OGs_ev?nVMW%%_ z_6pd>^7K>T%yrIji7TugNaN_Ka7~Zj4BWVRD%{XB?_l$spNVNWX3H_r@0>X2|3^Qx zS^I6|sgU=-DFaB1x$kxNJ;0B*uiDFejQM`|jXA$19b4$Q<>9A75m}7Y#8aV!EJZ5m z+FAFFGU0M$&iLYou6^Ym=o5*b3KiljQH5$u?%O{RHT2moW5B4T*I}W~9+XJ7JgG0$ z_a_QHU(A%b&l2}l?!GXO1=KsI0gY%vGg{D!HmqsGtsk;6{P|6COE}%k7GN8T(pbX1 z3jdG)ai!9;b)8k$yRUzuT^b$e#Dw~$i%cYn_z=GH_YHIE^xyZ0n>kY#Va}X^q;CMu zW8v6*mIlwgNqyq^Pk7&Y)_vaX(N!EeGk_BrG+!X+sE2}2|hql{{XqHy|hA4jj=N$ z`Ul#?6+RZmrIWxkl2|QLMzKWC?XSyEPSMw~?S3=La~g4f-L$wJ>|zgb+;N|b-~K%y zqfZg{e2o19(T_e9cZ6fa`3G@d$n2-h|9>~k|BrhQociqy=eWcbu5p9BuX(p|pzjo-=}?cOZ_&&)_-07*2Aapdl6RY#bgQM-qRbc-i<45$VTr58QaJ?|F?yYh`877 zT#Efqv;X9RaMm?TonMA>RG<=7sK#2S@pU{L=ie%2mys!Ar}Oy#hwQ&L!98->aVx#X z=Mnekx!-R5-ETG0sYM;?F^(Sc5M zp&M)6${#kciAVdt9(u;gVlMwn@{i>q?H|`kHS7Pwee_6ULLPg`K1^y$b$jOh^aOpT z#C>)1<%I{4YnKgc!$lvy?eQ=qZWtpN#Tdqsz%-IrLJI46t^G<1-?{C5zRjn4Qs3R2 zZ_VqE-W7lHxhKLNxsLn@{P4h z^#e9)T_0QY-2OyoyF@jOVxt*yibIs_RhJFy~l;8ao6{WUbdEUZO8aJ zHn52;q>;fk7Tb(pO6Q*R;$HVF<;E{vC$3SykBM>PmC{>63hS6mJRK4#>Coet_{`|j zVTZnpg>(0I>YPjWdi`|R6SqA2OxPz6u$q1*9Fj*!rJe~Je%r*ca2A_k}z1=5Ur7$oGckUs188SE%WMZ$6Kf_cQf3*vtj_s)ua zPnjq4LW$p3>7`^DVm%Z4oHi@0BYcnl%lxqneY?tkU+o=0x#KDj=U!y^`jzyHDRUpl zYHYu~nvefXsG(=y{dA}$>rjsdG@=O;`{tpM32n<3vK5o!+Q@dqZ|lTw0VTDI=dhqW z#c%9PYlBXigD{)t_d>subESUQrk4H| z87!4+gEx9c724oPRhok@?tY0jIHug=^nq(*wn)8N!bdLP8|U$n(c^l(=)(Zww*v>s zAq->9@#|%tbEW5uKly!!-2Uq==3lj$Yela3KT@a+m3rBCyok2zxX}u;{cNd>~GoQ;gCMd_RjFzkLbsU-?W?OwCV-)P_LgwQUHxaZl+AJXSUFZL||o9iEk z`AYb+_W!>y=G-Z*E_5TVi|HYI(T9_7nj1>S{;z=;HXbz?!`|5;GR}V+CP(lY^Wa9w zhx6aY=nv<=jnfnOlk?xE#U-(Xi5umUOq^T)$5&mar?G<>bwR8n<`Ctd~AQz#^a0? z)RDRGADBDPKyO47nlZumJdme)dJ9@HS!DbeZS?4q#&6TN(>t)x&NuDjqhc=asn97d z`tV(3H&*FAWG_(I?!G41F69_X(V8SoDLK<-$ z`zogCx3vXx`|}$YS}u`Zne?QS!M1afj#pyw(B=ym#^tJ*zVNc z6_@Ve{~~YSJ7f-i0a=L2dh?0NVkDZ`)h6YZ-7OI=MQ&YPm2#}!NTUCoe~gPiv<6fA zr_3=GxchE??QiCXN_rKlQG>YecP)8h-FzKcj|Mbid05{)nOfKW@5>L(^cJ)t?s>I| zhx?FZ)Tz-2o@!A3TihpRJKR^F`&v@JAkJ@!gGAh-Whm-64?`4@Tq zrJg@B^z8!A|A2kQqwjz8c>aB!KN;6wWk0OUzRTKw`~Syz{=L%dLq7&Eh#`z%6l1vf zmbL%p28`1u#`rD9hH~>To7DkdG@T~#T*2Rz&K~x0fYoxx)M$UuH#^zicI^-PF}7RSU!*(O-!APB zaT9(&CC@M^{+zr-qFcVCANSS0628WKv3wyn&fz*!)BJzTxWCyY@%!>$t^8pDxA(L) z5!VObI6w1YZBpd@Gd2K!=KR}ccf}_~OgfjYRpY>H%^9gBZdvMsVlZXTvBthH)ft zVmxx1jN{+S&H4lAOGsfItG%%gh%Z2oKL19wK5_ak9`<{s^*wII{>M6L^(+6Q%Kxx3 zp*@iM{--j#fIGe;p241T_i=zj9N`#i&FXw?Aoj6sb^C9S!8U$k{lB;r9)ADtcFOZg zdtT(J^zJTNm;cRY!x{bJz53|hVO{Whz0<-czZ=3Od4+4-V4~T(_cy=D5jSn_x(TbNU}s;@rkOph2es?hffy0J$&lu?cvh{Rbiy&9pN*( z?+BkwRfo^zy)%rKyfb`$q$Yf!@?GJJUGEBGqc!38>)svypyu77+dZrvK4;9)ebSc? zo(rX}Q-*SEA3YZ`eoy;-3!C(fv*$vE-#*^V`sT>vc5Nl)S+=P)cLI*FO^q>->Iw!4fICb-K`8S_rD|DDXk0x_-I~L_y^Zj zp~-J8_`SU9@bSTSg^vxtGYpkghfg-tgin>$guCB1UaU{{Gp)7Zv+eH=pT2rm7@>cD z{Mqn@hPu$|+&1+0v3K}l{j(v?8JjwLK1|;{uZ{J*XYhQOD|?>*^F06OdH&Dyq1`zh z=tLK~(Su&}Va;*tCH6nSW`+F^>g<1T9h2i^=HEfeH06W&uW`IYwYCNFo!!|)9-d(7smWH zj@7fekRYd#qHiSY!e+{z0>VqkjMatha&3o1T}Y4E|DnkK17-FfD7XJWo&5(Y99w1o z0dk?W?#J5qDd(>vjop~m8sA@eUDqDjC^i0uEp3%FGOiJ?vnZ{l8v9RRxu7nrxK?VM z?~l0u-o*5?VNY7Q^?mA z{3TQBZuyIC!+D&1SGk*eK4gFOIdc`B4<(K*MH$MG^N+M?<9eD3;ftx~LM2%h^lydK#^i8&P!@YLVyAk&^%(Tj5v%G3+^oX0#9_c0f5WgMR zPYxjA+!<|;xUPRtcnI@l+8@+#-oY(fRj0lfn z44Dt+g|&Y3gNF2f4CwzD)&DW5|ARfTCn0}@|KFzlq29cQWvn1|W`5Cu`9;X}A0~XC z>)rbQ-~OOnB&0o!_zkf5?XS&p|NjTr*?3L*wm$T@w|5#7S?$|+EY^`m^xfmPzjo-e z%1iv-&n|rr3th^OxMYv9K1{pksbO=I_CuH%H#h0n93?F1&zTpOc}G#$cisUGafD-J zaf-P2>RPY7lN;J;xqhu9Wo(xX#*$~Vh~;|Epi$n*l~#EtSB1}{agIw|;Tjj#;op#X z2l8u-?Zh{?3h3F7+)}vM>q6zpH4DWRVYyTLjJ)$z&#z4XE4>V<68BW7U4Xma_8kCZ zjvw4xZl3s|_M77>P>CvB{EHB($(pGDhVreCJ-ecvtN-Q1y3$(lb*M)J8qtJ_Q+*X* zG44E?7n+4zFgctTTFExd4tU3qY*l|Wd$$_BV`Q=4+R=edbfFtP=*3hgznGsqgIPZ9 zT#65ze!cp^K5x_fYd-bT761AsuI1-npYw-D_|s$j>p}a<@!{78_~%2>KlgrmZfRub z+vp!-57FmZv5nnN4qy;NxEsf&v=@izBN)XP#v}ftHok1eRh~{o_;DE(vPrk$$uZ@zxVUi$zyR@ zoZ<|x9bY>azM~)Jl6?8waWB7H;fj8Z8|3|S&(}LL?VXt+XDj?Sbw6JI3mj93A{3(p zr6@x=*7lTtY#b>A%JWa`e<95!b;&()S=uY~_-&X~*SmksCP@N=-qU~ICY;fpxbx=x&_VA+7rN1dUi6_KcjFv;V`~HSK@4FSBN)ZSqWSw|>?b&``2$*dnfCZ_cH#>_xR}_cq;72W8UFCiu(uD z@)4di*ZOFr{`mG96 zqwFtcrr9aXtv~3Oo~JLM+A*)SKQ+R&s6*yM+FwcbKgCAsH`pY%gwy)kGuX!BIR78_ zu#6SNb#bc+_TP2e7F6Wvn26`(qXNvD~O_Ae}wz;{dDTQaGe<4oDkE^kZz(Gf0oP z7P8_dip-fI&oD_pCohqp$8Y}3;!5}$xqXF%=MJ<%5N&E!KSbPT{{*q${Kj!7e;7jE zzfk5-h>LeyKO5!u*i5n{>fbm2ge{9>x3P@e31y*Fd>JxtC<_(j-LH6Fb7i5DUWIDZ zpcZx5+%<2}yu@{L64%T}+}<~D5$U}LYy6kXygOLJBJMkORr@@J6~sNcW)Ro*Php|Y zyNe{|Q12QIXhaj5(SlaAVd~ue{>J8Ks*Qgmw=e#fF=k`e3qwzba>ulz1D)tXH+s;E zwQ1wu*s!1frth9_bsJYlrpGvY=QG}akMr`6pAttK@xcvE_4N>8vA-9jwMU?9w+I<-guM2l_s? zg)_owa;y32a3F4C`sr{;9$}JxOlFaw&lJd4k$DWlXP6gW5S}ZMPjaeU8yqv3#T=5D z#{$lsbBQZt-e(+uyg|XCa)Gr$&hlF%EdMoPmRH=Kkkva@B7hH>WS4u z9XIaia`n9=WilYJvMs-#~b{W|HB8ElvEKhDf!_WRxU zv%lJkW%S9cbrIsr>9d{k)^1)iy%Gzp@+K~cx$gW>C9WDZs6`#J&ps3C$&+8Tmjl^| zCdBdhX0ipXXhS<*s*s0bZ3T5ehw#a_@$bFe&gvdDtz(=b9%^eBruKGr<^30kit50>krtTG(C<Wq}S<3JstRW#sD(pb{YFWsP4i&zWGng|8vX)J9i|VW5juAS#q-Z!Ti5d`osNqa`yit z-pTZ9=Ks;p9CMDi_s=C6_x`yeud%BAaYN=|v)(=O`_m2X5!;A+08RC(JMnOxV9x$~ z77y16=KV|WIto#QVw9j1Wq9=cqY?H$pKTYv9}t;dSzn!>~HA5G+0 z+a;X}RH6!#rSk8&RMTe*-CLb8U3x7RirgELm_wbodNiOBuk}BggztC<;S` zzw!FeBCZu}XvfqT`>!23v;QOePfyZw`}8Pl9ggWl7rK#o_Q}vg_M#7Kd&)mHj+9UK zbn9CAL}pa^#A3s1=I6Ds|H7#z<(s_kxa9_I4CxGD5JOmPV*fEr-yD>7e|{LDk78Rm zBb+9;hV#RixQRS-c*z7N3+0DQ#xQ+mSG~6H9)wev7hVvaQ~xB%-2UFtek;Fsg1$t@ zWWTKJkSE4ocga2M_7r`hU{qx(Kh; z%0D*t69z^-%lu#W?LsaT&mUUA>!{MxlaWd^x^!Vx|#-#*== zjP@!2Qx& z*?@`)LX)^HsGD?==5>_*lcw zgrUUGh2O)+*MCkW{9p2}{Ql9}pY<(x>Dn`L#C!Ca&ew;}9=|?Z`|ZMh{-50Y>F}w( zpAKJC&z*eO`W5ln&)F~VkNnrhk3X;O`hxl}dy75(-GXrPjX%yW@LM5@aG~zG_?S5X zAH)yzJE(hNI{oF^8MS^}InRGdT@cqVlsG=u{&`11D5XDKzfeYxds1wmoL+%PA7qS111)<(`8qkO)Og346hGzQg0Q-j)dMg&#!g<8K%jVD~ zt{oldL>IczgI>gE)sI+T_6f%^qdOR&4`OPU{XcXK#Bcr1-AKDk-)M<`lOkjPjv2yT zYp-8^tNwWU2u3l6N8jHW)c(iTsB(o2eH)9)*^;{U9=Uv`|4qFd>*iJZef=`aZT47@ zP6E@ov*ouj$Mm=l;mvjb-;y-wDQwqC8)Q`xWYAVkawgEe9KrE8T;@G z$&+`OA3+u)x1aD^e~`b~q=6xrBspTU7UeiVXa`(T_ua;g7YEg%J zG@ubp$nF38u3P#Q_3wZC@z5-;1+8eq?HccIo%fgQ5bi`5qHl2V75!i8)o$Ut^7!&c zZt{ERlj_(whq9MG+vUGSAH5&BdDX4z&t`dRH~(ko)%@v^SMz7aUd^8+=ayd0Po`hZ zpPzm;e*x{z8NeWhFpLq5Vhn4Z-Fnpz^Eb%Nx*z6mk!j%!xxN3x{6%Rj;a>l%`O8>A zYVg(k)y^O0-!J=N{}y_B_b=_Mf5_!y^$(d6j&sEC_nH5ly2-DWP7PuoY%Mvtcaxt$ z9etL4iF0oo=o#bV3&L^kZIX<8c)oW3o4Ee3(QyfK5>52n`oA~w!7H`HE7ULQm`D8! z%2~5xTF{C%w4(!^=)zQ=`boVqgIUZWIjw#gRzG2}U;Q$qei=~K)mJNn%63})qK>)W zr+?agtg9b3$jvr&0+|-hklWYh)2M5fiOr)fy<{Ka8o&)@WK;R+7al-d`?uYu zoztV8Lmw13gkg+e6w!YeBjZ}2aWa8vT(CR2`MN)9zbD1z+I{9yzx-x>1Mj#MQsUN; zj>k{&+sW*QUd`VnPrmD2C--rHLmc54Su79dg;Vn3`m!_nIWDo`zuxpdY;}1*Zq(0L z%EB4{zmolHVJDF5{~lp?#@JXg?i0G?+$&t;26_LtcN>K$LT>(F0pFS}-z2yA{An^b z{$9ubzmf)E`~0)Ti^uXWoqN(-CUfH- z)7nR)55_;@o=6?i=|mU0v09-GRc&qoJ&t{BIHs50hwUzX`5h0&KDK(zH4rx;uLI;D zCLKRS#yyC0^AG$M$3QaP$r17MNQMc+Sjscb4|u@4~by&u=;#r5g258ysxAHeNKYw+-r zvKjjVGT*TGHo1y*>F2)Ns6Q?40TTNHwxpYmY13n0z=V1z?YJH6Vh^zoAo~9M^x0i? z2M*|mSfIx~fVjtK>;pIwcZ@9Rz26tc?@sAwILC>8lS?xDvAl3aUgHJ@-%ti{yNiA9 zVUPOsADG8@_q+N7K3o)v{8o$-l%fpfs6ZvEP>mYYq7LMS`IeQPqePac`)oR^I2_n*GrLwSAZ7Vg8{&l!)iW;D0|Ujsk8iJwhwAx+Pa z+r8!owDEth`M>U; z4seJgytaSsSa|J#|Bnr9y6#q%|Bno|u~_W>a1YB^xs-pb;y#w$+o^PN^MBXON1?~x zVjsylee+P-xTIfUyGGhb@46PQ#Z8RyFUh=Pc~9B*k1RyOvyOcyaSlU~a53h)ALJ|c zoy0i|kNkga-Ps{!e^ecS`9XC6;`irDoL7pt|8^N!jtW%bk^k?U&0%(NU3o-ogKuN; zn*Wb`>Xl{n%E~eSpIqJH|2u9uWsR+Ls!@Yltd=@P+oF!X>Ap5lPj5i{rgp~fX}`yB zZ8wUGd*C&ZEr{Q~Z6(`~Xi=y2$`|gwhmDn&xTn~Hx@C?``u)%P{#UznI?#zO#BTxK z9_0tB175m{bC1;jc(nfKSlQCHdbIze{4J&Bzh7SQ==+P>ZE^p{ZfW+Q7Z?9Rd1nv% z;&oz}|DXAe{{QdzeuePKTlGhg(H|O$VS6yf{fCF?BN)XP;=0CEk3LcQ!~KU7^l5A# zxwixN=$I|UZ@xB*PhtrvOq#2gF#m6zp4A3C^KkwB9zD1I@4f2pbL|7RcMEafp3DLNf8V}7jyb>~j&O`DE`Ie^IF0%{_3@J@ zpS~5&$xB?}8aIf(Z{ELFAEOXOn7pxnFN*22#vNv~HA?8ESTGJS-^TunoAcY0dzf}F zGl=U1e(d|ZW9HR6ryLci#B0lcmGIi2{te{D|Ft2u*4bucn)&}&JZ1mS+5a2%UmGab zKeEgI@38-loAO^w`%h*H`RSM|<(FfgzJNRL^M8Fr-@}yl4jWYCnzg7yeZ0P7*0JH7 z&Ef}jdb-v*+P1NMyQuzOs@MK+h~NaQ2yc^%+Pe0w zQvY~AjWw>Un**3IM`zp|K-^C2<0)y-WW1}-F(P5TF?JQ-(UZL_4n%5g!t+B z+tyn{l3YRx%h%c|WEwlzMcmsV?(v%WhB0x^exuiXBJumk+21F9{6R?gxL~%-WS>X z-uV7J7O+_SjQM5HgnRl=m-n7gzdsXFJI{E(pV9yKOjz4}CajC!IP@&0%@0^NKL8o} z_PBGb(JynIa#Wxas|V6ZSc6XAjOSk2f1h51ZP&@vN~2C%j;j?{hk7)i5!v@W6Pn1A zmA8gwvIVWU2xXy-Y)1z=F;Vk$=pwt(gI-J;S5H)!mq4F2&OXzlpLmK7D7=s`H(=e^ zdDY+%NtyqKoX0|qdR`jC7{MsUFpdPKu~wn{S4qEC`q-+JJ~GGN|3mM8lk)GFdyZKq zSL(c*MwM%Nn!bZw%qZ)#%I@41yLt1#H&_tf^V>cS5Zk|p zPS#EVmomCo55fYCK&3Q$vqF{|0L5b=c0T8`KF8$G;cX*=O7<^n9>V>iHCSKDh5$ zt&zF!t95z49iDHy=ZlByD+hk-iBK=y1~j5td1@k?(SoUJ{TH?By*8H=Oxi)g&|9MuQeb&2mT*UyFdyl3rGsqZqT+x3+1t3|)?r^1)*ulNrKemYE!{*?dSy!>DD9kjxKm;XDs zQ~k&J10`<^cWZwxyuAN&;m#{Xq0#vtt$0KD2Sq;{J~sBVVW{Y>;rB|P2_I(*FJqk* z^1~;)-W)#F`19e@eLo*Y3Va8x^@;G=#wS9PbDQz`hR4Gfs;v8JHXbO?E#k7?M~gH= ze_mW)>Ob=iwu)~H+s;E`0dX=vL6E&#Oj##|L9|3h`u?< ze@gQa=p)GOU)HQ{Z`GC?*8Z<73Zvr3FpdN!+pHsK)h|GwZ8SFA7x#SgK4W3j-fzRg z~w3!l-?u`s0n07=Z@Qrs1;af7^nqr9RJMJUE= z=TDaim!b@r74>hD{rA4l;K%HL5&Q3*xO-dMU%yX<<10~xYSf?>YXh%Y|1+ljE1VvZ zKXN<$qxL_WckfjDgxy;?*Zw`${?#_P@Av3G)=8%x4QRybfcIyFPfd?=?&99}&GZ)J zzJFJ#4-Rn+Q0D8m!h|x`>bEvb?(hf54kQlPo*VTB8{H|~h50W1_Uv(7`;(+kNoN`} zwc7ufqbD&xru{z>Lbr2z(2G9we$Xcd6u6o{Xt~jcq682YF#;4^6uT}zsY*{^baMke$=nlp`S?~&r(9alK!sy!YgE| zN8eFf?3XGEYvg)M?3XIi?_Xr@Ur|V36@~cy*J0N=z#)z>X)bcK0mt;Xc5SB8ylDC< z;x_^2OWCMmwu;?46L*eFT;a9-_t(NV$U9LFeCVXOwrX!Qn>$V49B0Fuj04acu|2K*AudgBrR@77t{E5KEeI`SE85VG4&1KO z9*O>CSqPoN*{@kkNS@e_p_`nLMh}_m|NB+@MAIjaj9(ns>y19^8PDwN=cf-K_T9%m z{3IFsDh9 z3+udTB(a1P){(|qr|~sx6zZSHR++k?K)q0;|Bqbc|3CWvjpH8e&qu$nUf7YwF7`0N zx7a5W)%*_}&<}Bh_-%mrjet4rj<|34!~JjJK2Fw-2EfZ=X=)7kJW{&JT{B#LN_K4t>I49 zg&z9swYAxob)lEuhlNt@s^aHD(lK-B7q>kATo@n+v6_4?43Wb~Em`Z|Xk9u+gh!E~ zZ(|E-Otm~0raPW9N8!0JOV0I4yVEt+t$)XubH+;`l@yDsjFwhK7G^L`HdxO$?1pKZgp?XuHl$1Of}0t za`|tUE|Pfk{jXQ@!jbcik;N&_aE?o?v4!i1Hf9r#+W+)zEZ)4P{m)*mtg}T}rQg@~ zTt2M}SJJu04f0Oif7Tj#YxfK2@teFGwZ3IXFT!@8zCWaUwI7S^D%*Qq6y7tLEQhRm3+AWPaC}*9r*vc|4-b1u+#Zn=td8E5!e3pk^jH< z|M}JD!-=-U$?L4+|G;yhUz!6L#1Mutf>DeiqyCEff?Ry@xsVXf?f=v18Mdnj$Z2s8 z_y0-KAMOA1wOjV5xWzBx|6lU|8~g{DO7K6~vfTbR>ud@=iFr0?0prqGLJI3hV+Xs~ z!&o*}ny`2W&cV&m@NzwUp_{x-9}i2L8{OXmQGIKpZr`^&~1 z(>J@>-!APB`YE>g;u*F%-OB#9Yk!D~``?_CmzZ?y6?u(B$Ah-ZEIWB4ocC|NeY2}ntPC04ilv7SQ#gtQ6 zmgwekxF7Df!}S~vhx_4vfCC)JDW@!`F(qS^l87lL37KNb3jMt|HvPWO_nGI<=a2X6 zvu=Csz1Lp%^;v6^sYyH94=w+>;qMNf5OtRMCSqw!${p4m)79NhiCIyP=#h|Ihjt70k&{Hf>OYxfV42{XK-+3;Q4h z_CqKfgw_zsKjVZp#*ppMf!XOv-5*H#hq^w>KQs`oaWCbcw#trdh8EhOaHlIDm9=-) zanWZEqWBYkM8_%>7qT4_`VV;V2w~ z=ZegrxXF=x<(&ix_}nLPiWdTjFi|6$VRB54Gjn7g2k zG^~dP(9TM1^5T8qKj;6C6W>Lc0GH3{XMj6+!ssc^fI@2H|9Qy463#~Om?z$teZUvA zwNF2!KLkJ^{OA8)L71!Tyc?{=91Q)e^>%ZBlzhF0UYI_6hjBz>{leGsBH$*+07!<*elY4eRCT>P305c4FFn59+Sit-Va(=w7shq) z^Zykv+n@%ty$)(WY4cP5J5Kl>h+0qi=3K{K%&k78Kk3uvNBR#_{x53%di*hWLETo; ze>>?9jnEXtyAf|QvW4p`ah}pgJkhL;Yybb~$6U%kNvyaJV6Mt!{>Q)>ftZJ(_9F8? z>=Uu+ng4+i+!_lg`}a_;HtM8#hpUX?HV%{LhwNuufF8g_>=V!#$hZJHwo?Y7E)^Xi zBf3Fm>L6%3LD_|7(EicYI?njwB4xOTc#d&hKi45Uv3DU|zM#y2J9vWrkhgdt=Rr7m zx7?TVO}W#~!1{Uqa*Xi<)PgtRe83m{VDtgbn?eRaAPh!{1R*E;AIM4lJ?er8`r5(R zEj;@cX z5b*|+Kadq}tU>c{lq%ZAO4>u4HU|ybzkjs7;+wcuYa6vaN7~rlioF7CP!EnxQ$;412MGf)RB0JAB z_s6x-tWm^VOWwpB0D%w$tzo1S->}V-Z-DF|jh*gXx0dqF~2X37UtpCE7clSg~6tWT}w9>K5)x^8AaEOIMsgYD2td3Wrld@`PI z-%I&qOz(tF=z_`hH3RLRp7xLVzNS#VaTw(r*@ArsaqNOd%IR*Tow5@ZOI?q-neuT$ zt21s~ME;_U4W(_Z=N`1pUC2%c`hWWr*-O}c5CZ$5)Q9rt&6$IkC-s+u(doq;2DQ{B zTG_WDHT~ss+Cc3L@=7P~kF2)w{*;Rv%7?c8+IjOLZG58YAr1_X2{F&0AF#+rS}9Ww z1N|>#J7uc_^Uv$M2bq7`PCo{IKL2ry{ufOCe|-oY{*S0jX!pXIzf{3U+y!8U(KpFQ z$N}aCDv*O~*du^`fK78FFV!RMFsbiH+C*(!L?4eeT-=;+6uS2E4&dzN+2B9>fsWyK z94!1pJQdjkC!iNfSrfNfeWVX_)jZa}p&#=A)Y=zI4cOdR|ArylhG7K8;D4^ajbpzE z6JV#le0m{qG?B(1(Fel*EcSBN|10MG5B-8b^b1(quR*7v)^`c>BmX!2^Z)Dm7qPdV zVE(s{`CsO^+o2O=|cgM=CwjnL;NKLvM+i` z)|i)M>ThQ6S^&BL%zg|AsLY#0KX9`|``jWi2XB#T?wi1K#P3)k3I12{ z-!t~ZdC!rgvAL4$bM@qVCF7sAW4@%PE|84<*GT5YYb5L9BFPTpe1>zbl0Sp>uN^!q z-zlGbn(0B`2U_E%OSOxRz6AAM_!Q~poX~E@3~%F?PMqhEgYXYw1)q!6d_H5#4qQ15FP~Sld_3q?(EJM_X1pANkj!%XG0|Uc|Ci88UImLkuM@(tUw7M7m;3_p5Pp3Fkds55HeG&$EGL~FMaKEhkl!JTxJSoI+WI%C z2WU(7;MUDPj@Dz-rEN3)Lw8Z1!tI?YqJ9GZf)}uVk9-k1eJbq@EP#u!0#@?Q>yWo0 ze}%MD&;1eEvi~N9rdF+IF5+cmqo==;f}}1I3zF#omF>`~!+W{%S7iIHxyp6Z=>PBx z8<5S!_dDcTszTP}wP{WD9prN4^@IsVK90N-xd(XN2W>|KblDA1v3bfxqS*{%tf*pcjug-A4a>I zOZ%Hg{hPm!|GOFIDusk80#oQ*#q5|O#ar2nqYqF@e3^F)U>*|pGCS?F*-NR|K3AFi zKhbr(M@Nv-vpz`S@zqL(?$uBWE%f89yHC6lF_@=nE zxlu>;k{^cA_nhGDnd7vrL-UmmPuf}++U)_(xPm&y_4VkAHnab;g|JQIoIx|fzR+O$ zjz{^A9?KI+JkGn}8S=RP7swq@ z*zYDqfjZX7IlrsdP5z8q9F*|PH(f948Q#H!IgM{O2IG*{kW-~^BZrv)Sr*5UveJ%1FayC2zJ69?_iJCc3>6sg-bf41XZA$kG z*D5`KTc&ic#Jm)DkM&B=GaHod1sjx})tU@edS1U(>E65^^ID~Q&8R0@eQ*Ekt6sz<$dDd*U+jJ_hrQ0fP9{OLwAJ#Ju{xh zT1P(DOH&$s?FB)fS?(thgcy&iH1d%~7$l_z=r zxS$ovACP~9Be=haJOE5}sozru!nv=y2ZZv^KB5TG$tB#QW>fy>a^Bcy(XOtqUz4jyk*vcjg?g)50yH+|e(%MKd$+-Ae!CR?N3SFgyh1iPNOwv)ANN%vFbUG8vBR z_hRp+-QI_+&v;E*>4UT~kJ9}k{T7}*MDwRlvLC74qwNd+kr}0v4ml*wZ;nXkcVW`? zy&WMbxMTGIwjI=of74}PSN|UQ=+}rDKYcbDY5?Rl(>L(O1$qnCBbi0*KYSLDZs6~Vx3Z9Tc>FKJTvyzAjY6t*asND+j*9oU8Q?2 z|GC7i5&e!P^gFtR@r673AE5m|yp_tbHI)0_!`Q)pmh=#|6WtLlj+mjD()|VTQo5wS;o1UZ?aNqReGb&UOAu&w0wsQ-ot~rS&LnLhnq8h3tC%amn|n(4LKVvuP50 zJ<<+E`|eOqAbT};EgjzEok3_M2z8}4)-suw4gZ%;WKYfdM&f8A#oF<80)5O4e-AVRk zO7hTjN!d4zzU4IjsXI;5L#VHox=E(bG|AF)?IJh&Gw4hNPm^5wdU;%*&-Ddd-%Yvd zzLs`j?{qQKS1Lw!chD}>>v&(*f03=sCzOZlWX@9BIwNO<@20=VbJ!|$G9S0vplRqA zPLujEo&3#}?{AnUP5w73cHimfw@#N9X!V^T4!>!_l(5=PpQyvrM{)L(F9K&s7pS~z z1#Ns~y^AQ+OAA5o?Jm(qcZtb#m)HS!iSzSQZoz#MJOod}t6+c>+jL3Y>W2Q8o22h? zlMKkjoVC|Yvc23SCrEpLH_1ijdAdpdJapk``wEdo?!146yO<|vC&y_gU3h=mMl0b; zk!4(0j;z35iLCObU5#_+{k8L%`4{knyVSY3OMMvUD~HlfbA8i3cXTIdrwQM}^{q(9 zfS=NaTRU`|@Kc<@?$UYCoqeqC61mb%%&$z9uX&e#+SU@x*88T)9Mo$TK>~FSQ}XIk zh`mQA3_I1Efi9)G8W?h^|9}P$)^cR3e2Mu2G}F0i;om{yn7@ZM%gUI)!cLKyh93<)8C5&qm2Pg*ycS&KF7~{>o0J@+@!(WR;o5&g{=^JniG%Y8sN;ZT9<>fo2~%640~kuE`TVE8ao!Or z_i~pvvA+%7@HdF~=&+nZegtRX9DD{}09}3c0(=AC!H>XjLDj)}+QGZ8=PVb}$Ta?t zV(xoLDIR`Ev5bdG$w(+?-yV?C%mY&9Jb-@i0jU@{B$Y=GNtJIHIzM5on}tcuyfCTt z43oOhF!Em*`7ez9n}?-o_hGSbJ1otZTMizk3?CN9j>FVBhov3afuA$zFnx~0(iL!6 z=uWHEJjdUWwaBKBBdm|0pG8|X!83N4cPh@nS7ho^##y(Ddix#p@0L@R;Vn1~pTL*! zHT(cmZx;3U@G8VZE>uDr{2e}qPvPnSQPmwsBz*4?iQvAGp+^}19ua-P5s8jFA~E4d zBvyY!;tWS59+@z5L=wl3h+*Q0Bn=;tWJoa{kyJ>lIKqFxk4T2&h-5+*=4{Bp&$#`t z@xc zxObiim9D-}3dI2l4?Z9fTMtO&mID&C>450B9gygYFG)<;0meVj3{je^n9* zXDE18lIl6P$LCc^N&T&o>VH7e!VgG#&jHEkJ0O|E2Pg*zB>TiJCC7X~jC&tZa>oux z-nK`S{1XSHV9%>k82($OsP_QlpD-~S9#M*e9#$;w2c*RJfLH^_e{qi}WyVL8a>pY| zMd$%^*k6`k@V$NwzlBHO55U~38V!X|43*FZzrBSr4wcXee}#A8?=S)WE0{k8^l!JDa!xd^iw5*IrK<~ zilhGv(N4y{jCo@j&&CZ+mG}|HzsN+!xCUer_GDyA!Bk1LG5)nqm2|GjKxXdXKN!0i z{~~ijwDGSC<6jraL*`>Iz`YRvBBY72wb{-bn^QZB!bM83TYDM*_Pax9?znb@!F^?aB4=Q08aDnEOOdo_~OUC;nZ? z>{X-(dpTe zS|qXryR~;F-+w0GAHCNR)$nhIm!hn*k2|OyatiLnHp*Wv_Lz&0jJ>%d<&PsbuD=r?uJLtQ+E4fn$R@H9LNFG36?Kr&=Q9u$EE%Af__hm-IL z48rH|C43Fv!VfT&{=zJn4-4UXSO&{s6{yq~;dbW#s6Qf+QP}mp=-v0DcaMx6VXQaK z{NIEN`QL^7kFG85N%$utQwX1Gcab!%Pe*28XUSadAnrSs(gp>H`X{*FkGA9$&eLEl zUXObO>kFMZbjHvx3g;|^h|i`-WUNl2PAwFD#x^nBS$}Tl^#t-p=YDXk2n)dlXR^6kb`ft zpXDridnTO>e&H&2kVgi~(Ie2pl<4HN@EpW7poJBkj1vFodhJX+%$r?h2=`$afiW1} zsgrR{K1O`%6)s{Q%pv?sI+?)iLRz|kJIsaACkRK{Ex_y)QU3xfFfW2lu={!W*dC_M1kS89akFyc8Ki~X0+KeYy3t#-XRw2di0J?|K8ULE0)oewZ) zi@Uam!>DJjf-#;0a~o^0o4Kz28{Oo46&}M6^W^(7F&DwR8TYbo`~vqO&lK+G%=rTB zT6$Q2@~`;GUXOhd_nyl&$8o!lYao}kmcl;fwIQMdf9;vZF`s2#ITnAy7P972bON_l zuF=vYkaO3+A^-k}|62U~2&3VP89Embh5=sgJj@=Ld9Kl=_xzY5 z`NyY9;W6|r&l6TlAIw_%jJ`?yTo*B$=O(Vs=kT-Q_Ym&Rsgq{yI}UnaPsL8ZO0#R_ zakQK8q(wvuGKaZ$ov>bq4kdZS=+7F~W0c8nn0wdUNsq=>%FB)Dki9}1&3m=1Cmio) z|Iw8)&YsPCcy{@z{F7d_lw&QNZ6ofRxj*l$eb3H(@?0SKO{H&D&T|kZfh z{EU2kf_NA_8ixpvzg8wjU+3MZOSE@Jp5I60D%`aD*YXZp{;QWoQb#^E`m@LBzC}{Fjr;Bud7gT2kTic0 z`Ez@4fqb=o(PVz;>RH0`ERmQ0hrd>aw7l{fZd$re=AR&*nRv8zD@F_Zf+C+&hZd6l z+PBcsf7F{c7Y6HmWd1_hUd#)TOW+60*CUs~au{t@WCe1SW>$HA((W>08vA&D%y!bO z=_vD>7nuVdV?b<~IDCgy|q`7irRo+4UOc1c>WSn2HT1Qp{x&=%*qpdaj&dRn>>i%5ij8g3w_JX8s#lM;!G#(4X6l{v5IiyFHZo z?=bXhkgeDqxNCdh+mRjEok8SxuGjSN+DU^hcmv*ocj0~LroBIjJPl{y6V0uT`W5*( zdRA2?lVLjqfPg^j&?LEqe~N%KVhFn*Kc%bwi4rZN@ilF z=&9EL)qr{f+zht@)sXrN*bKh{stEPBu$p2J z&QwNk-k{v2g?Zo(Wpwo&%HaCz6uM&S=#5L1QID&X-(X(I^DTj$n12V?V_pUaFh@Wv zBmq^7nhS^EDF}hbc#h?ST>(^wD%FI#3iIvIf%#R)f!{+K{0U0n6#NU$gSwr)Z4d#G z5Vaqjj4;;ULZ?Vf$P|g)!5%mIC-FAMGiK%=zyL`=6{+6BbNmIF#=kw&`Ojwt?RO@N z!dVmSzeeU@H}+1I+<>{v!%mfa>;<&Rg$CMs|G8rHV~zsyAQuWCALjE8g~-&SQzT_< zilp~Wk+gpP$rDT;AF}bwf;jp!v5)}qV1PtOh9n3FZGt)y83B5Tf*6Pfhm|r4?a&7Q z(M#{b+zIti2aV7G+P^WHpaq(tf<9t7bx;+ul76EtggFxIwLZ*|fElxidd-4=G3y-G zRQj2?l_5)wF7h5}_Ye3x=t#>t3XO$ z$=es04|_j|G2B%8-fpb31It3{D)=Sb3lG6=i19oje?X=|9+be~J%4g7Lk`^cC)Y|O z|J-r4A)ma1dTJYO&2On6_EGNtOj&%5vYJGlV4}QM@XQ^o(;P#0V{ZTp_M^P>d-(khf-wie*Ic(4|5Oy3EX4ULejkuNb-YU>G{eBD4Xy(_uW=nb zyTX;&#*?mt8~+J!xHO)~I6wd>%bBaDgelCKCmriEYg8hz$X@(t#1HG6lw>*%#HuBv^3 zgdbnP`rizR44A>X2Ya9)8e%3ENNhiP@WyK-et4lI1o%kev4vt7TPR6kvn6@p8cD%D zbqnX6Z<{6Q$PCtPGm%-?vynMrv&4wZ#hynz`MX%xg~F|~qzFuoSz?CbcKL0n&BJ5S<@y*l7 zbFcyP?SAM_ff@b^Ez78%;IHrwyaxgF;huvG$N?wt+hKJ8zJ_n%2be>D>lTQ((O=KmlX zVj$Lw`9Fw<1W4S^{2wGiGNgDh{|9OGm(u4k|L4iSI;gX=sI#+?In?1l>pz7~Vg3*O zry#~{!HnCGrXKc~^`ieo-D*LWjM4vrQYeG+6V!1Rf7-{z_^6sOkqv5k=>Hs}ZggHb zW3+*OjdsRp6a5_RjL~N7+8Lv*Vf25HZJVfL!_bf3i@qSwpq-)GwVk-t9sefWTd+4HZzJ4YunX#-4U(Y<-h=QVPbvUU+7Hes z0X;-P%mDk30v1rVJSB%X^N_hvu+>xYkwyDG88>)JYPhGQ;Gd36i(^d^nPK$gp9r3k zh5cjl#$xW10PnzAm_i<14jbVs?BBt3%=2Ld+y}pfm*5_F3Z8?*5D#f!hO>|hbzp^d z_yEqqWthb{#~Zdm3Jhsx^7*6iI6Mho5$Zzpza`8{%(LKG z?7i4i;R&u;h1*r|JnkRimH|)VUWfbxa&Y5S%>T?`&ILY#X`al1;8qP!UBy`6lW{Jr z#=H{a<+DIvj<8X7EVVZoA)%)p+BdlvrL$Q;5OgMuZO>+_KL*b8tk#J>n>8VMHjc(4>t z1d9b(g58RHDgI^1a>7>xv5%bVtB}>$ZMfIqUyG~@;s5TT{NFu{|GOibu-kEO_6n93 zXblV&2ed&uba?UKdFX^LQ2D=mIPV_8yGMF%kf?bZMDMdfqLDG!V}mwGT<`{oMn5&=~KO59wu7$dToNoXPm>Z!9KRYyIZh_XdoR0u)nA@QP zKPPlz?gI59?++1MgC!E8b_I(bqA|xn>_NUi#A8l?MEnepggF^f*7E&fvi;vSh5zg@ zM%qpLkIcrN6H5C}yPJ#5!=8_O0se)^BJ3tR?Z1=uA8F~K{qJSZOaBzn_E42!FCS;` z%LMH|vI=`O?l$~ukhR$B?Cfvi`pNy}+t}Z zoyaci)qFdfmGrccp2%|S6?;kV5Yii2!nd|=;d^Z3dmv5N&2D@zzO@C}Q9ycHNl&it zLbhUe=t*w_>5Xja=X>DZjDHKV4txDEzE=<53z@iQofseqk|B1}I*EgLNPwt4>qHOH z5Cf{qItd5#H6;@AcdwCxA@b7*`3aeW-MD29{p~gM3D%&GvPLrZu92*eHIj`?;hI!P zgLHUgz!bE)0`7t*;Z;b5 z)3k3)tE=vWy$*ul9(WY~1mTbY-verJ>H=5>x4{$zc_UW`CGnh-l?LlZ-`x}`}*Xd!*Pa|8ILv+9q#$o@aEi6Mm&KiI2${=}z zabPE7v$0!<8;Tgnjvxz``b$1zgaRlWXI{C-Mb56|-5A$B%^G_V>*;4;7I9tA`0zE_ z*QvzW2={7X@Vj4g=d*pzJ<_;GGGR{RpNHSOxEpDsk6=&0zJz#Y^K5Sr_vhF>xYu;T zFTwwM;#)ynw=f3yE8I$4uMjo{|C9KCi+?)TRKfei^DSX*49u&z$6K(9aqNq*oVY8z zT;yKDMG~ft`Qv&W|0k6N(&*tWO|ut@T|SfM1w+!Z=yPe6bF3c@NZYlaO1qqwj@2XL zT>TaEI^&ubC9qYbA0X`}*0dZ@?`Pk|7D=hR~aa1c(Q_ z6P+Dsp=>onz4KbBV;-x~hE5MSC(zk}F8n&dfw>jhN6_g3E4tVvYsqWK(&Nl``OXv* zb_=o?{bw8Q)fbr$L)PG5F~EEo_9|rMu#5bHyz>T(=ZBO;aA1=Uvu3T zU-1mra4&^tuO<8s_)Q~@S71Hyv=IL%T)&)n<`7n8-){KWjS{h!@jqkbD5O4w@jo)A zfbqYT@xP7nfA3AqzcT&@10+E*qzo|rhcrls49J8m$c7v<>mQH{d5{kUPzXg}+Qa$> z@fUMX3->JH8LY@s>}AOE-Hg$#e5*|!QpLRG|2`k(=krbYmjAE+tKrt#N1lN;u5E`7 z2YCuQF?TUv^gGhYMp{RaK6jHQPr{3&YdSnjdVUE%!0mjWTS>1N((YmGCn29}M#(q7 z#eExbP(`Tur186?`$^(CLfS7O+{fB?MP9{sxry|@4dxT(et3{@wXlMC=Wx$Exz7dS zZ72L)#G6W-w-fGjzVk!4m%uN${tf*9jsM5Q8Ah15xW_fPKMg;?{lxhOJWiaKwfw>L zpK#6Z3I8DReTsY^na$uWi@EenWCru%>2c^5g`;CMvQAR8Ir1Uq%Ap{WxpT-n%0CwI zHxe#~@G$}C7J;T?6lGs3kqjgw+|e;|p|gNY9$~H?43G#3p6DilrlS-KEffCIY(#${ zm3jc#7*4rI)?=?jb`|igzzN#f1==3#HgG^Il=)4SQm`^8Edh($R4E2CW)n=FckGS6 zxEK23$SR(z5?SF`D&_dUNE#)RK5vmWjimh!5C;{6hppLKNsB8e$;!_;QKcLisjOPzzJ} zE`zX)@3aA8Nw3xC`QBXHLY?s!zEyJ@R>+sh^81z@CpR z;+*|LNL{;3QgBOWPktI?vM)aavctU@<1gWVB#R}smp#lQi_sBYEQak%BysmLNk%5^ zS|Z`Q7qif_SR(z{3+}&I^q!ohk2z+1u|%^+-of7g)?Gf*j%))bd-OYod{~=a!WkCq z-S6Wc11H$K&oeY2?YK4J*MhwnSz%rx;Po7V~KpOy&rkUY+WKIus|_b zp#;iy`$#ELwJnx#JJ06i8Ie)g^}J6s?-hfL<(=aC7D+tMn!xiV4)Kn-B_WdulVV*g zsey|mZJ6~(%o)U!slQdSLKm<`cnx(VXI(??5NVvbSn>x*b*$H;%NUIw0}^ExeBU{*coDvYg_MD|{!8*8@7b&^5q`o)B6>L= zVT6BZq4%MO=wb9tj`}eFLLNII5`WxB5(tyn>mvsAK$5mFUf#j}$^iCX1_^mngnkiu zQ`mRI+AL&4&KUYk@a~t4o8UY6gt^+Ol))Kr;l5e&>UrKDmW%-7#TD=?#xpzM=l{pf zt=0V*U>bcow#psYP)j=aHYnrrEqN%%XOSBM4N}6SsAibkM0<8 zv?1FI*k6TwoqKiCM!$@#JI?vH(9p;JENH@Phi2mT!+$sIg>-0z8fXL)l)*PV`$f0} zGq5iJrWw^6;a0dE*27(JFFXiOz%vj6e}-4#2*g1WWI-OZLjcT%oxoJ5>Ix_4Gsgg* zz!3Dor|==TFJN2@Ut<0OzJf96d+9;td*rt;0pEZt;TYDcvtS-9gaxn|eBmZgcccHi z*F~0Mj@*a-&p{Wt3UhQQ`oAIQ|L$OopD^*r1RMIlc633V=>H;H}IfRKlGsT3(T8gC2WFU!Skf$X}-%cFLVT8 z0n7m(D0VPzbux}+KaknZxYfqEwSsZ1`9Z~MeNd^~N4*t7J(kHhHWmH0exAFJ_t;N8 z7RtC`ALHf_#?g?9Ig9aZ_Cdzc`x%>}hn2gXaq|ww(OWcqJI2k(B4qwn#?jl*SMyXP z(j5f^WH>UKv26_F+9=F=+~R}L*<}nI8^}JUAja+U7`J;dj`wC9&sf+(nii9`W-q?Q zyenz0rM;E3DG4UcwY1m10hD8|CH=L3v)Bk*%{7e&N&7JRt5Aozg}O>RTU|S2v5E9= zAG&fjjbj8oHqidb)H#73+ZcLm?{2u-U zv0#8q$b}*(feNUBqwrVw6n=!OucnU&>tF-i2O*FGmCy|B@CJMamq0yE{~sdy=>J0$ zW<5k-r2h}GnByQGzXV9cY=HmYzdwiNluhvc{WwQ)EoTt+@cobR{oOcwu#fNW;z?O# zZM~lF4-GuiT-`ak`eZWP~tH{YMWk%MyBHbMV?oc>Q3I#zq>|3lYqSMj87y9rjn z?XVv1hlk)*h=w#ULnpiepYi?2;m?@E*=vvhnNS23&0#|amKf>ETVWQ2lMI5PSe|4Hm|lc+v7(IY4I?&xR4ni;d> zmlSkmpHbpk#`G1;74$P_5W+sdQ05GJnM>G%-d*1e$+*az!hZDb#@YYe!(0J66Gr+S zh4>X6ogw+i0w~^!ZUvb97_%cw!`UYl$3A$Y8~Z>Rt7EQ$a;ShB!%yeSAZ>bW-#h&p z+$Ybc!QPCy1)9JPZG>xQ@t_s61OHC!U7_rQM>?^qdYyzDbku)3iOkeVltU+ah_28{ z48$6B5;vfecu0UmFc>u5J{|pk9sPfuq}p`s|ItahUB~_(-4*>8bo;Wmv;MW8^)Ksx zS^r|qs%Ut+n9yl42Xg*_AM0N{M+rLoR-UPpYs>oBPZq-Z7i(9QtYKAov)_yb7296+ zn>kqja$)^zSo=o~>t9~%6@aEd*1w<`TAazEsP8CKzT7yJoA;Sdx+132JgI1fYcOY;7U@G7`a zUS`2G_#FE}KWoe?R>{NQM+I z+CMYjAJQQMG9e4HA&0uh2)XnJ^1^8UkOjn3h%DMo`$s)xw$c6}El%1$q!oK9vaFx_ zZ@+GG&bpE|Tbr}42AhHUPfzqeh}hdJ3J1VkPkay6LrcxFpT*Eybm9OH*Qx!DLlou%=Ev+nC6mX zue~H$$W+fuk^<@2(_AjGHhGCTpG)*RE-{aINsMlnBnS6AWG+~}FR|uviM{TZq}1z@ znDH~=W+9GZ_e;$6Tq4h3LZ9^#YyOwe*Sf^K-6g3aY$ei898J)|b&)GCDuePNjtKQd#b~iB^)9yUY1CR8oDg{k;@W2aGCX=%dG!g zW{%?$^WT>wA%O2szAyyy{gKJo%SZzgr0u5vvxEN6Hu^uI^nb$W{~-Ts|A#PzTloHr z6HV-4_^;#taOS_E94ep^s-PNdPy@A42ldbZjnEW$S?s}=sRtSVL#u=K4}SiCb}Hqc zv41DB3r_LuJuro`cp0fjwpGlOhmeOMfHL=2h=O&Lsd38AE#!j)zW))hK`nO9P*=Z2 zzD}4n%x`11z*m?X;Zfo$gg@Z+YwSC@*J|SI$L`KGPvf=*w-un`b{fC)gfGEw5BE5S z{d4#|Zr?*GYs)j1Ql`Pgdh=Vfxprg)b{jNa$8*6yn1(*=PCEx1aJRxLxE&UPN*NA^ z$@Aa#QT}@;L?1UH(R(TX1un9LFxSH}SPm=TW8(Z2=<2KNZ&ZtU7Pj@M=WsiTo$kL% z^{yU7PVoE;;nf&O1%>y_L(W86k=Gz=kvAY6$mPgmwDtFr4oQ#-zP#^!kOhB%k6{3Y zp^ki1Pk#EZ|92%1w&L%=fAT+cE>HMVgXH;$-o$ zvTdC4Pnh?4?>=ZG4L>6dRle2u^~^iK5$0-E`O)4$0u;bs;HW=&2U`7U-=GaeyLRZn z?1WDGA6*AqIsda>>Vs(?gJ>T;sUK+r?OPhq4{eYZZ(iKtkZ{`E2#ACz7$x55WqdQ@ zi6u1=?6iML)(AQ zD7pQOk{8DsijlPxD8yW}3H_I&wEs}-+bEV}=)a^=|3m4f26>wJ%13Db$7%m3X#bJR zc=zQHMH678@kF$Rkg1$w(hiO;ks3HZm>Ta-AXMM+{joor(t6#fRr)M@k+(;?-A z8~s1_siSP3JXAtvM(M= zq0Eo|Uy#=S2)O~hk| zwlUVd$5{s_9>)ah-b1W=53mkC%sM!v`SeP& z4=LgtzCvU{hzGjf9{dy0gS`PBlFIsR3g+zH9+I_7J0rqFGN9_X2V*<_<9O6V%IiI( z)=oR$$C>%P9#Rb!qlXk1cyMNdhnO&z8PF|>^PrFHAtlgGyluEUGWiz+va2sv8Rk(- zNv96()#&ixYzPm=h#pdRoPL7UgR!B9*eg8PN5gv^pCLaGJ~D_i(9nn0Z<#Jp5R;02 zHRp}RWp*(K(j^JuT@ue(F@}TF$@?=T+3qPx;T@97&$3hMJE+?Vl(f_WCDYcy7@$M4 zd%GkHjK{hp2lDERl-vr&JQbaiABsK&`x^=mqF?UPB__@wvt-swvEC`xa3^CQCvCq| zN-OFa^Ejm(_bS{fG26zRQf+Wb?XXj7#+_2{+a+}cPHEhFoI1cMcC!>7w7TmtN~1yF7FxA8A2jWbV_({p%SsTQzDObN>pm6=nFa}+S(~G zhE9pKr7Celof7Y#rX<)qCDAKQF=Td1Qa}Sb9feBDa3|+~*d=XKnvy$%pelwoH>^n>%YX?otvkO{_tj z|Kau_I`*9Z;m-LVP>C){6;xxkL5&R^eJlGvn6IkOJShzYC#6w;igDjb#(gKHx$i@1 zf!5&<#Q|*>Ka_Up!0d!h$A{7d>ekZ|4iUR3#}I{CANqkrhkYP1$k=TkNZgJOBtG;s z^S`H=|2@t8?`h_LPow|%0s4<0utxTQq#gf2(jlYo1IdIe%-N80@dM_6KVbg%1Iat~ zf#eUImV)6cx-><{dH)lA)cSBWJ!DDE|;c8A)Br*bj4TIOjbV za26aGhP3iOU6Mzb3!yBfZeb3ffHu%hA18=8kGOWp*iPS{epVJ3AqVmx7YZ0RwK`~hOHb}`hbD$L`uFQc}A+TGa|n3vQ_SnFWL#r%kqg2+c_8at zivR`A1k&gh!nS*fTv)*T`Fh&_FBDmYn}M_I)0#-fS4jUT=I}|=tZ(=p<%+DqKNv2s zq3~|xFX28&CESH-z6qNOf2;ZP{kCiFPbu;!@(In%X3`QJ@gUP4BYyVJ@72Pdb(KGA zl6%G?vu;KQ=>pHg_m7}+oiq(I&lvF%^OI}v`y1;Eg!_)O6%Hcf>RH1>8sGxwkQ_#~ z?&bYM7!&Im$A7_CsRUiQRLpE{%{tHj2u+1bcX^Hy$G59>az5x;>brcNk9VlsMEWkJ zZ^8ZT$l4*kDY6ckwT3*$^B2Y8=CVk-&lM^?pA{-m&k?VdA9X&G^%-W;+rqy6sE1e| zX~KOo`E>Ilv8S-6!gFb7WSMzRP1?Ue_ME5i2A$Q5!$)3N*_&*Lh zpczlmCwzvp5uc;H2<3%;*Q9gKWE%e@RCoqIm<1~3^H$`C)9K?=H+@R^AA$?9KJq2< zD;R@s;Cq+=nMu6?Q-Eec^?(I%6)Xl5`T}Mb*8ahQzCwxH5p<$a|X$;jXbs;6pW#|=e!YqMX;7;nw zJCVPGR_`)#K&fvT-^)s$!7A2Fs}!eN#Zq7u)38;{LsqHTcS359RY*1Nwk;LRBUq(s zPlc2pJRuc?SHnvr+_RMVuS$t@DU~SD`&LRcGA6iGV%S2)<3uInfKqAiDV2`CQgPzm zdAwA*j+M$ENlUV-x(@D#M_~i$%x|Ste*djbVa@Gt$TnBnAW-W9CA@m2yn}f}$2_SU zWeTogD7g_HhL_+Jbxs0ujSK6il&kefjuKKg1IG=i_rU|uvnE#QE`inr&IeCW9>Q&> z<{pb%eyl=stUjUHb#Y4fn{mohn4bZTBT`?0m*B!V^iY_;zOZqzw60twjp!XzxyQu3ze)6&CW%fpNsQSfvGpd2 zt4LPjhfU0XB`S#(CNa!QRFaG)N%pU1Z=tHBj+mJLu44ahqLMKZu4JAtNfzO=hfK_W zCo0B0iApXq&uK!RzF25-)WW^hQncU1d2!4kKr#2rN8Y)Vc@gee%6(3de(y3LP(l2a z7fp;eVk^A9;uX=c?Ko6Kvm+*W{D?U zLZ(>~eGQCz4N4L+dBiL!M=g@N%`9m@YL)e`Yaa&fR5Z9otMj zVwQqnvlRN9(Xld%X^(|+YG$spS1ibq0kc?LY*LCW3$jVMf3H;NZBltKT&X&0mg<9M zu?3l>hIndS%=}-*%=%}N(%^2E#vX&x#Qp5!2Bn#BEyr!r>K(2)c)m8DO4gzcO2;-; zaqc%uC*iub@H`hIlmc6^6i!q~kz10I6IaZBtt9&E#q3wAk&OCcd7CuJ8ZMUXfFvd5 zVliW$VoAe2J-nFpzz;bOrACsB5lXUevBW~$(Heqan+A&c&p@#>bKe&1b;e?;Pc4=P zp0iP3%>Rmt857m8AG}y<30KZHuIMk8%40QBg4F=qk+%4$;Tgc0N ze@}~~d0QlXo`v)>%K-T{tM9aA2U#SCu*R`k$wlTJt0lcpi>Dv?zamm8!p*dm?@v6% z#A6AxND0#Fov4)VvPfA$vQoa*!uL;BD!E^kqgJZ7Mk+R*uV!00W94L}E-XT+-(-=7 zgBEEFwQzP$vSL4xtTZEAc&^qxW^s&IZc>V5Byk%ShoY{z3j_K?^}Evc&^^GJr9;ZB&`Z=E)|?|#(3sycP* zRMoHQELH#douh^)%S?@=YdXaBJW*q6`EW{F-8@$xE;F^iRe8$WBZ(RReTWt@yx&OJPgKM-Czw4k|x(~Ve{*RmQAGoFO zemC|%w+x(gqknKS2XM>q3tky{pX>jI6&b@HKjW5(;sJ5tPhQ3KzjsC4+=o0zoF*@E z(^tF2M?U`jZV9~LmLPuUki`_{UKV-FY>FOt!{2O*Q(oeO0ZCr%G^MCtnrCX}eY2^( z!Go6D%y+6i(sixH)LrRej$$$O#yrw@HTUX?^WrbUKJ`H@-?`>~@PS8KLLO=LdC-7* zWchY%>35(nUOZ3xc(^}$#Cc*+YVqs1Uds-6#LRJ)Lmsg{%>AI)Bfr0u@zpIJd9|2v z%n6UgPI@H1;*rETk0j}r6!(qv4dlmhl zD~-p!(say=7Ki>PzV&J^@BetE-Aw=Ecjhjz=Ee1Yg;#pG_ImOA&T;)SUmqCZ`ae`> z8oK0_;Y0NQ^Qyf2WmzVZ+OX@?e|LhRozwok<;6nd4 zj_vE8#fo2kcwB7eVX@;oLc>xq@y}9u)iC?tf2~o4QX{ntI+F=Ut+4>6;tp zd#L560du)&5P#^gamMG^_OAU?xtlR_WGD9TyRZ!s=Q=#e`HxNTMr?!l9@2P6(x&O- z*!F<`YHWQVcmuXQ5GEYCZc?Iqu?>EhHpSmhn-YYR`~%z6Eq}`SkFAh=%5E89{ELl~ zIiHn(#8tl6BQ{{aE@pCEhfVZ~JEihm(o}VFg7HtrRI>%!=t`xjwg_A4&Db>e^Zsir zX=*$WGd10xF*P5=ws}1^&WCvac_L|Qe~jykV>*xX{xj*jkH<_swxp@|c*fLMnKbpA zW2S-QNz>r<*UC`Qwe-IW{ol3d|Ioj%Jx=^YDr0h8cP(q4{Ljr7(7yl=@Pg^<_{PN5 zF4q6~#>BadDR?qt3ZaV$w?s`5{3yrAZsgk(Cq{Vw!=E*-)D1~fnrkBink#=HEsWb+ zSE@{Hhh0(+8qAF6@tZ=7*L^N*mNrvw&Lw?Cc2oNYF6js{o+rM`?2_^$F0ql;ew^{r zF&7KRoKJ_DPvBdB>XI7H5$9DdzL)+Bo|i73r!L;7c41F+aldd$u)!rEALDiWh}#99 zaq;!Q$2KYf;*ytKl42~DrrZqkiD-vQV#LQQ8PC{UGQ79SG;-7>ql~G>=Io~a11{bt zx0?p3&k$q$srT$A_a(c@gYTu>@q^W-i5FbrBHiTu97n!mVEA0MY2@8%(_oh4E>)ZQ zV-qp}JReq@ymQs2skdC>Ua2;@8YW~CjQb{J0)%Och_Bid%uPrL_*W*-*-psxfJ=NJ zePTi~AbDv*QoyJ;b&rlzn_@lHCc8Psb2e=%Hz!QCpQcR~^W9?AlSiIq<`v$Vd)S4R zp7Hyq&8CwzWu~eR3-8WV#1f{;{b^I{;R$I2&HE>$1vK75zk&Lzs!a`dRhxQ`Fs=jL z*U^ulv#8qCb$zv|{aX6*>S|LZMg1}N_b&Wz?7(h@-+LQt4EUHS%PxXp?$hh|zb5`2 zd_I>__AmIK*v`O|1g|ZY&`;4o-p@Du*p9M4mPG@3b+IHslK9k( z#ge|hm^Jxgd7pfmcz*mDdHy$WGq@G}Be)BU0{`vUE~w+v+zNtL|pijHd6WUZKqBdu~wr1ArLni-7$Q0-and~?3V4lIc^DR5Xb_KSm+zzpVvUhignfO}b z>aN>S&>h^kL#lw*Ayk0M>vu5E!ls8^uo=IlVTUx@c1V+Rht#i>NW&bqC*u0BBXz%p z?U8+*FJOyo*}-}own_G1ppLQYw#j38#)r3Tlh>jt)5&jTOxx(|&w}Zzx3R9ajrVJ| z$vtcbPj2D;f-SsXutg%yE%HU;CT`ru{A8OyJd^ieaN8q-ECt2=@#xsTd?nM zk?&InTWp)yIo9#e7XB9z>v{d)d7J6vU*V5iOed!-{EsdCKV$npm6=ZdtJU<{zY=#7 z+y4*O;SbErr}1B7y!cmFi|xK^#Qw7q`5}2Xf^WmE4vzmo_!+_;{4n?!sCb?=r66`R z!VkQQ-5rf?16XIfjbmP$EjRrK;s3#TYvO!*?HS4}E9d`@0qtL;o@>PEx>BFATd-jR z%U<-Gpq#J`*jejz^q_klG0A@+pDyzDeoN%@gue*B5B>p^UAIfjxoMN-#$CL#yGzQi zyHad9o5_Ad8UO#uZmPIzmsCEvi}@V-w-uYI=Gr#aEq6)nE$H8l?P5)j`G3g9^UrQ- zirGxf50#l(cwV&5l}g)(rP4mpBpoM9SqCnauD4318^7nM-PC&mn<41mUn&D&5DbA~ zFmk9#M)AjNcGGwY`_Bud;>t4r#h<#rRNMzk#naHu{AZU;AG$+)x7bX6uB`wFV#f%r z>|(yiI&hKA6uqjR`;Xle|6rFSu9!9@v%7en?BaQHCGUUHepi_J|5px^<@!>wl5aU{ z%(hGAT$iolI8@5>>ME%`S_*%&srs#QQ%$kM@tI9tU#U!Ax{~X&+~hB6VE*qg1y7Vp zXs^!{K3OUe+B$lPb?+O>P4S%Bl*l?v$#+X76)I)EjSlkaa#O?m4paTLXfqD(k;dW{ zX*skw&cWg4a#I(6XA!o72JF2PyQL4m_x%Pjb6nX$i^&?J4bg$x z9x6AL!^;u!o9yhXJibRNUf6@}#9^vJZ{R#m8y>?}uy>Et9^NCq$9Bu~qSX|5VK;Nw z-P|kpuzuOg+|6MMU-enuZE2AxNQ~@}_=v@n+7AbG6`5GMDSfDsHSgWhe`L1|onZ{g z@gqlf%P<(bakq?uiK5*y4kmAI;NHDk+&S8iZ7=Z9&lUtjwOj|Spi^HDT(6GUP1414 z#KU$NRA0AMY{XX{+sc~HO;UILR(Y9t=M`I}@#t1rBK&jE@Yq)QpZIOro1}TI5e@WQY zuvKoyANOrV2eMT>2W}E?%T{z?Tg46h?{1Y6_D^5hD!#Y2O8D4KjQekr;LSJj9KMNl zzpZ>bcdLAkG_eo1O8QKxe4B7GTPokdFS}(Q_m6#IDc&bmQ2x+9`f#7vfdf>$uum#M z)v{O9T=aH1RgN4JD7)wBND&AY|j5?$<;7~%LzH}lJGN&323 zx9FDiM7Lx{y3qn`mq=*41mE8-A@_FXQAHA%*e=t>MZ8y9B;ThDJFtLG5HBi{#C1gy zHE&0ERK#}*x64>=yHs(0j9gJ9gCA^X{J34hq#c;sE}o9<;=R9!d1;ZjL7e!>{Y5f< zxJV{$E~1^bV<*}!&BwOWZrib=7BPO?F7>`5X>bub25>FZncz9p4dZWd5g^|24Aq`8o7Y|0*>rpA+ZY=eYm#{UiFI zU#^sa`_S=x0sS&BTMa+>=Q6@KMMl5zmohfayP1D?om}-9)@H!9;0ka(=%b25;6}nX zgAVGoAOC*v5a?z55dLG}PKHA+AK!m6^Zh3<#&ItY?*x@#oU|?YZu|lK9xwtzAO#vcC>+<3X{hLe@p8DW-;_mr3 zv>(62{(Fr~udMR@k88w#?KKj>58ikUn%HY3{MPRza^t2%$tOmBar^}NC*bgR)cbkx z16}`vACrcM`F`hPe7_UFneCPw|G%a3 z8u0@#wmdZ#-VVBvV)OjmM%B;<*a_$OqSOBJqFU zev0;oHGQj%wSU(7ZI%50L&6T*%Tj?~d5N|E6KDWdSo=ToGv0ss&%8&*_rKAi)nD;5 zX?TJE{|A~tGrEA5V^^a8MgNW8e(*}Zjen(d7GEh{XtcY}T#0>c5AXk7iT&qF>1W>n z=?35B+XY#^Velc}GQb~YdklXZjo}3EM!0wvV)7Q=58ccAp}<4f`|ztW{V?x$VwCpd z2YCJlkMVxz%C96$IPxU#hra(S-v8tMP|_zrl4DaKO*jL}R<2~8c_s6XE5!=R85h}r zov;H`u&)wSp~2MB`5s00_wZHH z@fbQl{H`8!e>QY~PIQ3yeaD$+RkFSf2AOjW9iKtdg$BZj_5pv4_4e^2CuD-Ki@3>$ zW@L(VZs6eH z?YounLkYTF@R#5>9DjkdpCx=dcocj{+~fH7fTzK8;6>0B;=O**z?y$OXgi0l0JIQp z23_y+T>#K=;+N9yJSTmGdqEHA2G+Yifj*G;{6QITy!8pO0~;s@)gOF9s@~%rZv2Xe zumRom3#sKje&-7B((^8SU^PWHP!+alLGRwIGCa&jM1hhuO1KjVUqX93` z23BC=o?eZ=O1rh;{|Vb&_#14G;RnDB_*3GCXv^#HSK0o*@&5(?w_pSO5#z>>gTDZu z0=McgX+DO3FL(gt$@5YCPZGZm|0QrU;qBljgzJC<{0r%t@P|Mjm;_Hzb^`x1q@Bm# zi+=(C-@)eye+=8l$HBdXzW_c%_-^nU^12uQxA@=0|6A}hD0`Us<8gFAFRWmnXa0DQ z`QtI>kGv=6z^`DtlK3j#m8)KPOKJ}A{vW92oq{@0zlcr;G_LUdAJ9DU7Vn;-1DfOe zA0P7l5Bv_cJMp{T;`<-?J#6>l_oev$&;5M==dqWW|MC3;Fbqb(C>R6dU;?Fj2L5Chnl$iV!N=JDaqy4C z->TyXmlLi8wV;c*zaVZ5e-bni*A8X~e}ecP!fz110JgF{$o6jue~R#*aIOEI@W=3{ zi2oG+t;Fxt_W`Q>1gnx$rIPr7%&*A?BED=5h+6cGsoM;AZ$I-0!tne&D z(_Z`vYnc~V(?F{}f@U4`@%~@$1X}i6E=c=+z5{eW@Ad)PRebjcIIiG3K=|e-d8QnE zg|Q^x0Rm2*H8sU(<;`c%74r_E^A)K$@`_aR?5Scq#QTE5Lp-bS!$)|Qk@qy)0et@f zo>e^Ck|5)z&p?d$DEku!d8RqhN^;yV+hh2n96!MRezu432RYvOKe23kw-lb46U2|R zJ&EsP|3A?VeYC?`o^>qRD?&H{?k1eUzd?@|N%ISkBh9bCD)Il0&GKH-e2nSuHQ?jm zFN^4J{J#L70G|S@9Cs`J-QZsE8Snu3CfEo58hi~r3N}gmYw#|3fV^KK?kB(s{`g() z3HZNd-(^TnVb!SAF%3jNiEaujTs3ulp(2KYjz-jrdJBa{c4Cu-%H^b{E(G zpqpUUV;9Xx`wH91VrHw198F$_pBtZ=lv)0NaJTXCUuT)!h_5UbCNzb zCo#fNkN|NIYT=vmAOga`2c|&)_<{Zw{S@#5510VsU=p~%$n}g#!5A0?1E3!afkDu- z@-fDQjHS=bNiTi}+wJ&Wi;TJPTRIq%H!v2@@lAdFhTNRgUz(GqO2+J=--|QLJNcjv z)Pl-6-q{D$pbFTsywiX6uf>6H2WH+2(BDe1e!x5bjPrlPIdBc<&>wL={|We8&X14d zH*v4~GvcoYj}!M4xSRMtXS)r*3j{z0+(g_D^PFdd=fPR<6SjYee+&C=2ew2{zlr}f{C_0was21NOL`kviTgYDm+`DH151weKT!Sw?|%Y2VF##SUnQtw ztX=&u-=8@stbd|4Mf+HXUw@MC&zwUC`xcr;{ARXWdft=P5BR=|?LD4<=xvGX#P1?a zH~V@Rbb6^n0ywBgHOLTtlKRvU{vQ5w;0K_I?ccNSkIxXun3mGtKc;N z9a#pvwfOo;Kz`ub154;C{4Soau6}%6${rtB;Yk%nq;eU|Jzk}a_55a$c z--9w>1{Poi<-i8)zyT^iC8z?t=PxzD32H$ds0R(85j25j&;nXP8)yd|pc8a~ZqNgI zK_BP`17HvgfnhKLM!^^u2NS>rCczYN0}t?mY2X8X5CB0C0$~sVQ4j-hkN`=L0%`C~ z&Mo<*eDXCju!3@619sp56&kOR23M!4IBCJH0@a{N!`Rzo>}@jkHWS-y#5NnT%|`4P zvEv{hpVZqYDP@H`R%$d>8gQK&MyQrhtr4mtRA+?h3Dp}Rowi|R8n;nCxoYXqPN!^< zE&T45|5vukO>(nrlTXQ~<}_DXD^OUR-%v6 zNp(t{P^Z($bSj-_Kez|n3+@B=gU^Ep0RL3r(+9v8!I!|7!9nm9@Ymo$@DSi14}AJ- z;OpRF@CbMmd;=T;e*+GKZ-d9d6W~ek9q<(RE;s_d2c8DcfTQ49@Emv^ya2urUIae? z$G{K4OW;S~IQTKRR9^AjpEZABf4%I zBf8v8hfcrg2x!+opxuCOft*9vP)%!Ic}u%?tB1AQqwA9&)8*!;bv@s)Olh~JT)XYV z@AO{|yxV02dUad9Go+7tXIj_rotUoMyH=g^yEVFf-fbcOTV%W3Dz{0I+^*LFZU>*? zroZDh*>Stn2?XJKz5|BdgFE(}*8#{`P9VJrC z3PN#-v7^M;QDW@aDaAY4vD4VG(}*gS;!>hYji^#1YL^u6B5IcrwabXwEyb+87w*VkF^HDjAteUAlD$%5kSd{KZ|5G_d57%WD?7=CNFy`t5@{sYSsFDh zmD1aU{=#^TW}>u2N_R>rk-MaHx0LRY(mSMduap`Y7>r8wp$4m6vgE1&)BU5s4rWYlt({@umU7ch?)$OaH2y!d_}(UMfuA2Zry~KKa@~`5J*A%Gc%V_kUTwPM%*s zC=bgc^2h;x25=1SkIJL>%ATN_?|>YVzmadsx8yPT@A9|ucXC(`-+uIV zIsBQ&!HaTuDJ|cYZy)}?JT8xa<~eXo9zXnyJkI75^7v7HC19sKAy3>RPaKvfo{=Yx z@+$$v{#c%%F`twtACV`&|CD@3zN3Ffz%!yf+@tc8JVln@mG8psyNBh79MM1UJvs8M z9QmOf`H_52o|dQg%hT|Cnq*HuB2PamPwT4Ce^28c+Po;-V0o_$fCljk0h=jD0baL@0T=RYscv;C+%|F}H=qP!sA zm+v2bLc_@GvR@)E%8TEZ7k?l>kYn;g`QZ_M29C%}@+0}t5q?MHxcpdtB7ZMGm6r$Q z3hNF1x_<%GGmq*i_BPMrd(!hGGmt+hs;#SOr^|J$xOA()X0oe zW@=@oPG%Zprcq{^WTshWT4bhGW;$f1Q)aqkrdwuuWTszc24rSXW`<;DSY}3KW>jXz zWM*7uCS=AXGm|niB{OcB@yLuI7$gEdpr)AbBvq6~+$!u6=BQhJ4*|^LmWHu?YDVa^noK@z^WzHsZcA0a? zT!qY4%Uq4jIc2U^=IUgwUgjEPu1V&aWv)f$T4kne)k%!Oqx zB6Crhi^*JE<`OcO6mC^qT(8v1EA{e9zq~RauT07-UU_9&=F4Q>Eb|tbcglRN%-6|$ zz05bre51@a$$YcSx5#{}%(uyWyUcgUe5cHJ$$YoW_sV>q%=gRufXold{E*C#%KVtj zkIVdo%)4ZMQs$>*-YxSUnfJ;(Wd&tEB=b?3kI8&o<`XiXl=+m*r)54P3uUrkmIaF} zSY@GH7HqO$mj#C`RLDZ5EL6!twJg-gf>Rc1WuZSdup78+%tNfw%Ap+y#2WuZ+L z+GU|b7CL32OBQ-%p-&cg=VVkC#$;h!7A9oDB@2_XFeM9aS@6h$R~Dva!6yrTSqR8N zL>B91u|XCaWwA*Xn`N;@7F%VpO%~f_u|pQS1QEbuuPpY-V!td7$l{lBG#mipf&)S$VZyUTqL&Sxm8v zxfzqOWxFg_$a1ADSIKgXEZ53%oh;YOa)T^4$}$O>Ww}L`TV=UTmfK~yLzX*bxl5M2 zWw}R|du6##miuLSK$eGOd03W5WO-DU$7Fe2xa%&vWO-7Sr)1eJ%LxCyvOF!zK3Vq5 zazK`YvK*4-uq;PpIV#IBS&qwcLY9;K_JhxZN5G@tDew$93SN}ulq{!ZIU}da%*ePL<0ko1C)CDTkb@kW-a%s!C2(%c&YU<&;yka;jTS^~mWmIc=8HRypmE(=~G1 zDW_}YbcdXtmeW2t?U&O5IUSVKAvqnE(-AoxmD4df9hcJyIh~Z#DLI{%(-}EaCTGlY z#v*5|a;99)*yM~|&N$>ug`BC9Gu3jYM$S0pOs$-$mop7=rb*7U$eC6-(hls&JN1iAvrrNXGi4hsGJ>>vlDXGC1)q)?3A2!%UO?{^~%|4IZNh# zIUA6(K{*?evr#!4lXD}lTjX5u+=zB2wd2-KSk9$!-l*sIdPF->?IdyD=$1Dc`00&? z*E8})BR{>-#83Op{IuWldQ3ZnTD8-zosQRo+9A}boi2Wcuk-2Vr#E`!jb82dYiB?^ zgYw3Z_J<1&X-9QvOgrQJv_Em#dBbdk->~2rVI6Yu)BF8!d2>MC9F#YQw<9#VU8Qu<&W#~$94JRI*V~#^tetkA#b|) z>0EGhD($gXd+c}VtZ@nn+=T>`;GzU0v8RyOD9@$q>C)x7jGZes<1jp(37$T1#bz9V zr?b~pa_K6$3bDjZ^3%sn>f9!EZj(B26YR70Y z(i^mJjFg4q*(#*eN9hom7#wkoOPV>WF zq-*bcGq6&zQcl2#xKN|3B$`rypUwkE=YprR4-~>iO#-?coI<>jc|hkG&{e}RjxLa5 zs}KpOT|`i48Z=@HJw&*$MQ%ZzTTmY!;-_=Kx$GIS zI){+XAw+w}^l=3AL5BB9qYF4|KgquGl_~-&6;!eOa>5IZI-zlpQD&%cB*lgK>B@(7 zVK{n+_Vg}YCXPNXtcwmCAtQy}&*d4>M@4ir&gJmsEhB|a8_}UCKfNQyPZx({Y!)iZ zc45n?9ASN&_Bb+D2os9))5pejTAYtO(&{apLZ^)rS}Eh&#i=!%l?LtA8BQTwufy8x zh-WyBvSQ{(_Bd|scnpU!R?L?jov)b^a0prW>6Dh0a#|OUlsJVbT{pdDB-DpkbjZr@ z%`xpzj8&(!>SC;%syG~h^O2{M>jSO&K=zk&AH*5b&WLu%r(EZQbJ^2}vSl2gGcPyJ zLBk^>y`{6#CE^%~FFQu!LS^-h&p5Ai$B5o<(^d!uE*4K8qCHY*k0b4cJw|zi3wsQn zc3qnNO`mrBIL5JLtdG5%KleGp@U<6W$&U?(F4Lit<6I6Gwm8(G3w3ah;kojM3yeEY zP;?fRdIt%r^kF!KFmJe7l?2axp%6G2T(|2IOMGc`;68l4DY37x`Yw#gye@I)YbI z@EQtUYr*R(c)p7%4ocxc9xqK*()GsS=|&?-y1U>F6ui-bH(Bufe>lcpI5hBwh=7i` zR5mJ?tlff@ADDp!SV1|k0XuMj3Q!5EKsBfVPEZT#Ks{&xji4E{fKH(E?g71^AMhG{ z`5+hqV_*VIf+^qz9^eIj5SB|e>Sd#R8|B&BKs)FF3hn+m^QeKpPj8bNlevDFXlzxoTk5Qcub%;{$DBPp;Wt4KGlpCe}qwt8*x6v*@ zJ)+bjO8Z3N6{TOIv{RIRiPA4o`Xx#oqx4IZeu<6)`Xx#`M(LL*Z5gF4qx4IZeu>hi zQQC&<=Tek5jnbx3+BC|!5RHH+h=DkOcQgrTr)U~ve+>RH_{ZQMgMSSEG5E*eAA^4k{xSH+;2(p34E{0r$KW4>e+>RH_{ZQM zgMSSEG5E*eAA^4k{xSH+;2(p34E{0r$KW4>e+>RH_{ZQMgMSSEG5E*eAA^4k{xSH+ z;2(p34E}tY^imA|G5E*eAA>)i(Yq9be;odC_{ZTNhkqRYarnpKABTS&{&D!n;U9;8 z9R6|m$KfA`e;odC_{ZTNhkqRYarnpKABTS&{@m0q#o-x;XB?h!c*fxwhi4p~ad^hz z8HZ;ao^g1_;TeZ#9G-D_#^D)fPVu13HT@ApMZY?{t5Ue;Gckh0{%&OCgGWcXA+)CcqZYQgl7_-Nq8pV znS>{gmP<)^CgGWcXA+)CcqZYQgl7_-Nq8pVnS@^weo6Qx;g^J85`IZ|CE=BXR}x-H zcqQSLgjW(?Nq8mUm4sIkUP*W*;gy6}5?)DoCE=BXR}x-HcqQSLgjW(?Nq8kSFZwk} zzb5I|B>kGCUz7A}3ce}$rr?``ZwkIC_@>~Qf@cbzDR`#fnSy5ueku5+;Fp453Vtd0 zrRc{L{g{Gd3VtcLrRcX5{8I2s!7l~B6#P=~OTjM%zZCpZ@JqpuPZwQE!7l~B6#P=~ zOTmvP@+BU}mr`&_!6yZm6g>3vG)>*p)ICkz)6_jp-P80{n!ZZIAq|H#9MW(|!yyfa zG<}t(uhMWy!zB%uG+fefNy8-#mo!|`^jR7{Y5FV;r!;+*hF6+%G7Yyh+|qDM!z~TB zG~Ci~OT#S;Ff_`1}+)6WZ;sa{u%0@q5c`_pP~L4>YAah8S0v$o*C+y zp`ID)nW3H;>Y1UQ8S0p!J{jthp*|Vvlc7Et>XV@^nWVg3Ru7s$Gw}1f?EN31!ba%r zvJiW+1NWl7x2j24kxGub@FzF1(X8^s01~D<0?3=Vi=5q zNq(2T|07h`@Vs3?rWM*VLX~EDyOJy`$+D6xEBnD9a060SdcidCfgqsrl~E9rx2wtk z$5(NDl?_w?@~D~u9ACxpRUBVUp49_@e5;3m3sA>u(p8hL8CtERX{(mEN6BZ@E^klv zyhbTLN(~LjYMB{WKn17-RiGNw04Jygb)X(JfJV@)@wL^m7VRVQ_$U;wxg5$?_k0xc ze6)AHvgvZ%dQ00!q3)^6Ar|XD3UyuHt8;G!ZL(^nHfCyKrWU-YwrZvZX7-ueK_}=2 zy`Ucqf)OwVCcrfCfdB}CFo=K{h=atc8N%iyNXe?DV%0($SSSIZk=7ZlTFBW#TUp52 zGPr7OShXfstu&b4m|V5CT;98C4Xj!_i7o6Q%_N~AFs!j^t=1_l)JJcSmbSN0Q_HBv zs%3&mLe$$buCYp+SiDAI^bFe+YoR=9%DKI2acLM?LPdutkp{5nQYVcvbP5h2V{4(p zY`9mgIw^amG>iycEFr3-sk${*ts{R($j0S3H`O=t)8**sRqKFJZbgA?A&NR*F3&?U zPoX5ws^+iDB}6^S3L9P`NKbtsN3-aH*d`;`IN8w6^}ox!9D+@uvFKQfPp3ufv1;)H z&4pGjct$Tli}I?Bc%5qxp&$rp=t7MO7#l{f7)5J>oGBL06idWtiOXFVB|?|QsbfiK z811IJvJk2uq*>~7DGf3LSB;Nqsk_~30aj2BY`_j28b*n_+Lswx>3VCW(Yl&~wQAK` zi&qV504-!}*REb}a%&wibw*R_yz4a#LiFz>8(hPM6Zn5u_wos?lXGoZn=}fEn+Z0r za+-4{>iXym*w+132+_(`u2bU@_~<;-73Pu{GD_7%T8uq9X{*L6HKj={V})GWh~qN0 zw(EQ?8RICOdpo-e?Zahg?J!zTwK4}IN_|^WVoL6g=u(yI0yd!*O-;ti**PLYZ%0IEej!? z5qA_TcN8mk6z)&D3S(d#OaNUpZUC`bmlatl#dG~2>&pr z9dndb<{hhyhgTU_tns{CD{BWaSu@*Z&C)7swCWo6i8TfTYZYU%R@E6h{{@s@|oYSF~3JU2`WJqpe*L_Ys|~nm}jps$6jMjy~Z4Rjd}ALbLKVX%xlb( z*O()(F+W~o4vXee)|e-*F+W^meu(-KGy^y>pIc+Dw#NKxjk(qu^QSfDI3U|dkSWO; z(~32Q(`yWS*BIiiF{E8%D7ltSfQ)1rGGxmpCCdXe%VRRjHIU`X%yOz_%Uyspn0K-^ z8*qYVFbqZjX)xhrG2tNl1a1(Ktey0B^0kwm@mkj20$M>EpbR_5+ewd!CW|R1Yv&k8 zC8z=2pcnK5_EnHqg%vme$5-?K%B!Hh6$!~!+5zQM`T_YcxXM;_fgun8LCIEgY&G%K z)TNrdt0}*lysKk?d}`oULq0XsvxYKi;9nC3IMve^ z_3*8yF7?#4f&3cS*Fc+dxn&y%0cAB&MsvMnTS(v11mMv^{;kxx74EIHc`Ny}QdTQv zw35D!c5NfSHu7kv%nmqpkZ%X=)kPb0(S}`=*-gK7v)xUdyW!YdE!nUqi2OLKeazG^L*yBvybzQ_><=X+8-{W?F4+k6j0^zsh;V#_`b0QBLN`XJ zCwGM`cYrL`r!3Z`Y%H)|K~@-;vKW)H7?QFWkg^zXvKVKw7-g~;W3m`gvKUda7)r7j zO0pP5vKTY67%Z|0vc({g#qf~DxRAwYkYz%iWg?zslAUFeont)@tZVt)1#SEHdozc%aJ0a_J z)U(b8s7pPZ>IY@L0e%haYlr~yY3u})*F@T8>f20Rxa!thTEP^E$~srvdTSf#0OZv= z4QQ)2@@}InuDJEKAuuBA?X~NEs?kn$+u7Cb*7xo84p?vPPV)3fZV!t zn3B3_9i+DF-K6IlUGJuax@q-p*!9q|J!IQM&3b6j9*XIu2EC-~3jkWbpKyO4aA{*L4_Dp#G~wx>tn=(&_l?QA zAEN$@tOsbI0BHj>Ux2c)wXX*`CJ4zO>4VfiMBPJl8=v=E4^eK2#=`c#9-$nTfz~6$ zN2m+-@pbIk>)3VIvB|E-X=t9M>)0mOu}7|BdtAq^xQ>l(9h=-bm(Du&wsq`G>)2D) zGm{`K8)aRf7sO=4Tm>eCwdZ17;+U^2GBxN5^nnDvzn9;h3(O5l+Vd$(+0z}Rrq z%Lb?V2B-Q)#W-*Q@~Q}dFi3!mY+#Als3ct_c~z1YYXomZ$OhJi4K9TZtP2}h6*j8b z<`USbAs-%08_u|F)RqH|uchqTYA^!)vQbCc#25(cqJfM(W$h zzQ%UhXo6!C`!@Ze2n*)G0Xr|uHv{MW9<+Jr0E$nZl?AAuv z;Bwe#hf8}8Ade0Upv(^VcF=a6v`;6TyC{=~67PI~F+e;Q!bZ0lP_WRzTfG zC}V`Wk5HFU${ck9@*J%L)MYdZIChMDc`9v;QPwzZG(nvvh?^kY1ofSu92dvCXwymR zImxk;^c~mDhKDp>(tBxBFXc~@A5WYOANBW9Pe1MDCoX^=pk4vmA^=aWgAK->8$pf@ zQs*F?gYau>Wpgb8m1ZIeOY zCYH8M&i&0Q@~&zCJ-`RZqbenv)gxe3HfxBlp`4nqY&xl{vk9-m47@%!3 zK-;YA24jFS7?^F=v%i7;4Q;a7MA=QmHIaW4b!={x&6Z)=Y;ywIgu&EiJNdVhZ#%p@ zi0`Ne-YMwjvJv~BeXHr!p%|IXOy~*QMWOUA16N-dAVqF z7xm;^+?-_JBxxq8`y_cya_kiOOi?E{?a#<*(?c6gM`Y7So-Dg>`dR>K{j{&2I{4w_ zhev?p0@RTu`AwGOHv_bH5TASXW{7%)I5)UAZ-!~dFzv{_c{4(LM~G)FYLns2ChI+$ ztZ8hrjCid@K*${|Ij{9#8Yhljp2T93U>Occ% zmK+9_99DoF^V*y>B{|l@bLCx<<35~Ya+6~nI>!Vh#~N|YVFxvU;~k`9*paIs|4PcL zB;V>b&@Q=}a>+Rcs;-#{)jsL)spA_fZD-yfKM;{)FThu;+Nb=Y}|jfqHI;e1^P$x(yT0=sU-GlN+U7M#sRE zEdZOUSxjO;dKHA0CCOMwnxd81SBEK;0 z8i`Aewdovds5yqWIo2?9aq?oVG8d;CP0%Io8>7tfS?4 zM&}sb=91Kv^|2i1N{+Ru9BWZI)}3;!;pAAu$*~@jV;v^PI*ek{qdF@^b&rYa@rvrL z6*UzRpGNR}+<%7Qu{yzBo;O~P!*T3??pL>Av!QTgeAN+mr_rafW zoASZm2Y(;@een0e-v@v08OjHL&IRRzzYqRC`1|1RgTD{{KKT3K&+||D;Lof}`QXpY zOEL3OKKL^(R6h9o;O~RK5B@&*`{2*aO!?sNgFiDg<%2)ZQRRcb5B@&*`{2*LOZnjM zgFnw$#lHhoKKT3K?}NV|{(kuT;qQmPAO3#$`{D10KjTv6hrb{Ge)#+0&umcn;qQmP zAO3#$^XyiB`1|4Shrb{Ge)#+0?}xu1{(kuT;m^HP`Qh(}zaReGSCt?Be)#+0?}xu1 z{(kuT;mi<~vs%TyTyZZ~0r&^tAAo-V{@mLY_jVP4Kawkj^r zeH3FK6@Y&L{sH(0;Eyd%1>hfme*pdg_y^!0fPVn~0r&^tAAmpOCKZ5x0R92^2jI`W zNd@2^fInj@6@)*!9u%Ute-QrYiBu5&LHGyZ zAB2Ap{z3Q$;U9#55dMq-RS^C`_y^&S4OKBmQbG6!;U9#55dJ~<2jL%te-Qpb_%n}E zLHLK@&wNTTmQ+|aR0#ed_=n&hf`179A^0P;Q%LO;Qacrbe+d2|_+trCA^7t-N)>{C z2>v1XV<%N1_=n&hf`179j8#<#{vr5>;2(m22>z^Zsu27c$127SDg^%!{CR#T{+)qh zuBVvmDJ(831pg5HvAn1d{26Dc5d1^%55YeK{}B8`@Mrv?m>a4P{6p|ZqouIcs1W=^ z@DIU14F53v!|)HoKMa57k}3@UF#N;t55qqU|1kW+@DIa34F53v!|+G*rNZzJ!#@oF zF#NG)t1$e-@DIa34F53v!|)HoKMemc{28~aF#N;t55qqU|1kW+@DIa34F53v!|)Ho zKMemc{KN1M!#@oFF#NGVDaKlg=dB9EKMemc{KN1M!yoIE3d5hVmz&`^22>c`PXD*>4@Q=Vh0)NJEDgyrq{3Gy>z&`^22>c`PkH9|y{|Nje@Q=Vh z0{;m7nU^c9V+x6;LZYdVXsQVOBk;#!rm&c)2>c`P$8x5yoT&)>Bk+&FpEc`PXFRDGPbw^IDgyrq{F(cx2>c`PkH9|ye{2DYF^G!5KLUTOaSCgkiohRRzQUHT zm@g^p`3kL`LaV3H>M68(3ay@E9;LAHD{TAkf)_2ZdHo zq197p^%NFJg;r0o4x!NMDdvX?t)4=wr_kyt=86ifoZus~ z(dsF*dJ3(cLaV3H>M68(3ay?(tEbTFDYSYDt)60?L!s4EX!R6YJ;j=bLaV3H>M68( z3ay?(tEbTFDYSYDt)4=wr_kytw0bHAf3$iEt)4=wr_kytY&r_9o+;c>thNH zpJH7|q2W`Qi4__?g@#XIE>>vx6dFE-*;t|BQ>>*aG<*uXmtw4~(C{fVd1UtD@@Z0 z)3jnvr_k^zG<*sTpF+c@Fjp%yd7>jsRaDd@F~`}6dFE- zhEJj4QwjK^;Ztb%6dFE-*;}FEQ)u`U8a{=FPod#cX!sNwK7~nKq2W_#_!JsGg@#Wt zpHK<-C*YreKXVJk+(ISbkG4;-kfxIGN86_`r7Pwf3T>Z4+o#a>sU-Z-_9;y43T>Z4 z+ozb5DCQ&zZJ%NhPND5n%uf_{M1{6bF;7uR_@nVtX#5l!KgF7+LgS~<_^BlPlkjIQ zqtN;(Y>x`tqeAPa(E2In#R{#TO2QwlpJI-z(E2H~ehRIhLhGl{`YE)23ay_)>!;BA zDYSkHt)IfyqtN;(=G6+VpF-=Wm@_G~ehRIh!VaX+`YE)23ay_)>!+AcDYSkHt)EK4 zKL!63{L%a==2;5OpF;De(EO2 z1r>HQg4ph7RG&ThgBFSRA>ej znnA^)utGPeu@Y#!#U#R2leZ;Gcm%dPBuhl0t8&&>JfBhAIR94E!_j&%i$ee>8{+4WdGW zs50qsL&xQbchOjyh4kpGVss9KLh^^{4?-JpQtkMN1v!zzEWru z6&gi_Mp3bprLbqJ4E)g)DE>#T!mh6}8OfKG0W+`wD<}sxUrCczYN0}t?m zY2X8X5CB0C0$~sVQ4j-hkN`=L0%-t$GyKi)H^biyf4=CGH^U!0Z{7@lGyKi)$L^L# zewRn#mp8*7m0#Wrf0TZCGyGBe<<0O%@s~HlAJt#p41bh=c{BV`|K-i_M*)~O!ygr3 z-VA?i$$2yUQ3K}9@HfLBRbbu>f0TiFGyJjl<<0PC-6d~^zZw2!_+x|4V}s748q8x4 z%%dF4qa4h$T$x8bm}h+^Z-ze#!aM?qycz!34D+nn+0)GqqE%3L%9|>X}tzq5*e+&FA@JB?LM?{!MFqpT%-vWON z{4MZDc9*xnADep~EnObLTpqz(9!*>xOS2I;K4K-aI;{JUXVl75-NETj7tM zDUY5hkDe)yo+*!>DUY5hk8L=QrYUcQKkr=SdFLw6J6CzuEb_c_mH%HH-B)g0TeM}{ zy5D=`NXi?^8_Lj2Ko%=U-*w3L-bWOm0|W@fnr*~?`kAXSSM$U@=4$`xXRgLvjky|g zHRfu})qKx9=4$`x=dQ+G?LYnh>HkmvfBHGB`7U}a)>y2uSo=>uk2UXw`%gcQwg2@0 zr~g0wY}WqM|DS%xI%BlPXpPYtqct~B`%nLW`dO|0r~g0w|LNzp_MiU$^#7;-KmGsd z|4;va`n}D}@y{Ip?1BCV`XA``oIh`ud!V2Bny3Bw{(KMgKhW=)f4)nfr~Wg~5A;9K&x(x|8!NU4`XA_jp#OpX2l^lAcP=o7Yz)~LvN=AQp*(3do-5%+Gr2mnA-frIi z#@y|Z{zv*9fQ`SKGmtTOa|pIa`XA}{PPj+wm2O zvHr*Uy<^Qi!Q2z2{ZI6}ao7|6PxL?0|3v>2{ZI5i(f>sM6a7#0lNs|yJa5E%qMyu|x8gm~PiTzL zn0LH67@YUxc|YD0{ZI6J+uIZUPxL?0@1|k|$4>O0=qEWwa_mI^iGHGEjt+OC-y8H! z^q=TI(SM@|H|B3z+{cb--mW(W!8<3so_a43z{ocgqAaW=APxPPYKhb}p|3v?Z z{uBM)&3B^zME|M&Q~jss{d5KH`JZ#Kh=M#|5X2}{!{%9HFv6?j2Rg-GG=6q#ZDjn-t>2>|5X2}eqv_C z%!rv0GdtCPs{d5~ss2;_r}|IzJO13M{!{&@`W=GKA?Qx^yO-IierIDl)qken?eNa@ zpXvABI|rk4Fgj;tJJWxr|4jdx{xkh&`p@*A=|9tdrvFU;nf^2V4o!Ea-?8b=^q=W> zbh`ece{xkh&`p@*cU)Y)cGyP}!&-9<^Khy8VXJ`7)^q=WJ(|@M_O#hkw zGyP}!-9Yb5zdMMX>38V5GyP}!9lY*Lzf-xL>396Pr}`bh?y3H#`W?c~A?%*&f2#kf zen+wM2EM2IpXz_A|Ed0``kma(!R*{z%+19d&dv$mp6Y+9|EYdA*>g<0r}`b#?y3H# z`k(55s{g6}r~2J%?5X~z`k(55s^2N!9N_M${-^q%>VK-=&rR>Ceuuexs{g5e2fBNz z-!0Uh>VK;Lss5+>-A2tx;GXJtz`LjV9r5m&{%86f^Uk^8+)wS9{%88#QSF(2w^Vzk z-#yiy>3^pGnf_<`pXq<5|C#=0`k(24rvI7#XZoM%f2RML{%881>3^pGnf_<`ohR;@ z{%881>3^o*&DNgjf2RMLe&>sOrvI7#XZqc=?3w;&`k(24rr(X%p6P$4|CxS=$$O^% znf_<`pXq<5|CxUGFngx|nf~YcpX-0F-y!pyNzR$%p6h?E-(mCI&dlx1p6ho%v*-Gs z>wm8Qx&G(+pX-0F|G9qm_j|7Yx&G(+-Qw@L{^$DL-t4)4_h@q{z32L$>wm7_(e#{W z&f)Z)>vyBG=lY-Pf3Dv#^`7g0uK&4y_d9#8-wn^6>vv$i=lX5G@45cx`k(7}yRql` zpX-0F|GEC>`k(86q5p;c7y4i5f1&?{ez$Uaq2ICg+|2ETex6`2^gG_(3;i$jztI0e z{|o&u^uN&WPGm3iztI0e{|o&u^uN&W^mLBC_d@>*{V(*t(C;R1FZ93A|3d!@{V(*t z(EmdJ3;i$jztI0e{|o&u^uN&mLjMc>FZ93A|3d!@{V()8E}!G_z0m(c{|o)@o%TZi z3;i$kztsOy|4aQZ^}p2rQokF)z0~hEYA^M_)c;cdOZ_kPztsOy|4aQ&Y4=k9OZ_kP zztsOy|4aQZ^}p2rQoozSz106w|4aQZ^}p2rQvXZ+FZI9F|5E=;{f^=1MsY9ovmAS= z|E2zy`d{jQso(wLUh03T|E2zy`d{jI*1MPbU+RCU|E2zy`d{jI?z@-zU+I6P|CRn% z`d{gH3%OVNU+I6P-(BQh>3^l)0q3^mFmHt=yU+I6P|CN6ClzXM$ zY4Kj^f2IGGeh2=0rT>-wSNa|M@0I>n`d{gPrT>-wSNdP+f2IGG{#W{6>3^mFmHt=y zU+I6P|CRn%`d{gPrT>-wSNge~z0%L_?3I3~V6XJQ(*IijYyGeFzt->cd9U@q*8f`n zYyGeFy9wQE{jc@E*8f`nYyGeFzt;a+|7-oP^}p8tTK{YPul2vy|62cR{q9QlTK{YP zul2vy|62cR{jc@E*8f_++ta<)|62cR{jc@E*8f`nYyIx@_FDgI{qFVlTECmUz1II) zKQyt|`d{mRt^c+D*ZSS`?X~{b`d{mRqyLS50(WopztR6jzq{AH(f>yO8~tzeztR6j z{~P^p^uN*nM*kcAZ}h*>|3*I>HFtx1qu=f9-spd$|Be1P`rqh(qu)*K-spd$|Be1P z`rX*>js7?K-{^O5yEppZ=;xUBM*kcAZ}h*>?-qA&^t;EMd)zr$-y8jJ^t;X78~tze zztQhbcW?B+(eGY&Z}h*_|5pE7{crWZ)&ExiTm5hKzt#U%|6BcU^}p5sR{vZ5Z}q>` z|5pE7{crWZ)eor5$^72xf2-eJ@ZRcwtN*Qj7HePF`rqn*tKS*^-s*p=|E>PF`rqn*tN*S3xBB1ef2-fE-rnketN*S3xBB1ef2;qk z{{qOX@ z)BjFCCa`z3^sHo&If3E*r|GEBi{pb46^`GlM*MF}6T>rWLbN%P~&-I_{Ki7Y*|6KpM{&W53 z`p@;B>p$0juK!%WJMm!?JJ)}%|6KpM{&W53`p@;B>p$0juK!&Bx&Cwg=laj}pX)!@ zf3E*r|GEBi{pb46^`GlM*MF}6T>rWL_xj)Kf3N?&{`dOd>v!k7_xj)Kf3N?&{`dOd z>wmBRz5e(5-L~(&emBA6JoaAyd;RbAzt{g>|9k!K^}pBeR&;LR_g=qy_;De7um8RN z_xj)Kf3N?&{`dOd>wmBRz5e(5-|K&`-@WPH>wmBRz5e(5-|K&`pKIKE{qOa^*Z*Gs zd;RbAzt{g>|9k!Jm-j*c2mS8y_d)*${U7vy(EmaI2mQdxKInJfx)1u@xbB1g5Bl9c z&zMSqgZ>ZtKj{CU|AYPy`akIZpx>SJKIs3T|AYPy`akIZp#OvZ5Bfjo|DfMJ z^`N!6son?u?yC1e{|Ega^ncL*LH`H+AM}6F57z91e!gHI^t;2|2mK%Pf6(tHcih1~ z=>MSqqyCTjKkEOe|D*no`rYmBqkebXb3?k1`akObsQ;t>kNQ9A|EM40*+=~!^?%g= zQU6E%AN7CK|55)({U7ziKKrQuqyCTjKkEOe|D*no`akObsQ;t>kNQ9AcSpXD`akOb zsQ;sWZekzxf7Jg`|402F^@BwFsNcQu+#Aom@!T8lqyCTjKkEOepL-p5u}}Ix>Hnnv zlm1WoKk0{&_DTOI{h#!I(*H^SC;gxFf71U+|0n&Q^ncR-N&hGPpY(sy|4IKR{h#!I z(*H^SC;gxFf71U+|0n&Q^t*+gTj+h#|4IKR{T#^LM(>mUPx?RU|D^wuem-QM^aE4- zr2mutPx_gWebVoqdY|-zRQsg=lm1WoKk5IZ|C9bt`XQ?^Ci|@av;NQeKkNUj|FeDo zYXH_h>;J6(v;NQe-SqFX{?Gb9>;J6(v;NQeKkEl82W{=M{?Gb9>j${@S^sDKOv{*- zebx_k?X!Nj-}|ipv;NQe-7SwB*k}Ep^?%m?S^sDKpY?y%4{h$VemCR$tpBtA&-y>> z|E&MB{?Gb9>;J6(v;NQeKkNUj|BL=F`oHM^qW_EjFZ#de|DxX=`o8G@qW_EjFZ#de z2hjFK{}=u4)b~X{khU-Szvy?jzAyT}=>MYsi~cYAzv%y>-%b0z=>MYsi+=a*`=bAg z{xAB!=>MYsi+*?S`=Xzp*cbg@^ncOsrh8xXf6@O%{}=sV^ncO+MgJH5U-UB<`=TF` z^II@J`oHM^qW`OYetlo{f7Sn0KS(!5V_)@u)&EuhSN+`4zUu#~-!1vR>i??$tNyS0 zzv}<0|EvD5`eD3%)&EuhSN&i0f7Sn0|5yEA^?%j>RsUE0U-f_0|5g82{fyJT>i??$ ztNyS0zv}<0|EvD5`oHS`s{gD0ulm30|EmA1{;&GK>i?>rH`!PHU-f_0|5g82{onL| z)BjEXH~ruAf7Aa>|2O^L^ncU;O+S0EZ~DLKhlKY{|2O^L^ncU;P5(Fj-}Hae|4sil z{onL|)6XsJoBnV5zv=&`pJNyYGmdND^ncU;P5(Fj-}Hae|4sil{onL|)BjEXH~ruA zf7Aa>|2O?C#K7f!)BjEXH~ruAf7Aa>|2O^L^ncU;P5(Fj-}SQ-`>y}H{_pxZihbAr zUH^Ce-}Qgj&sOZaelU99^?%p@UH^Ce-}Qgj|6Tug{onNim}4>aUH^Ce-}Qgj|6Tug z{oK#K>;JC*yZ-O`zw7_5|GWP0`oHUEIrd%ucm3b>f7kzAzs-ex*AHnA(%yId-}Qgj z|6Tug{onO}*Z*BV53=w2zw7_5|GR$fXg~D-(EmgK5B)#%|Iq(K{}25?^#9QRLqDUm zANqN_{m}nI{}264>3-<{q5p^eANqgj|Dpefet3L8^#9QRLqC_YANqNx{m}nIKlijB z`hV#Eq5p^eANqgj|Dpef{vY~(=>MVrhyEY>f9U_A|A+n``hV#Eq5p^eANqgj|Dpef z{vY~(=>MVrhyDxwEbK1yU+BNk&&BRS|Aqbw{TKQ#^k3-b3wNQPo85)}3;h@RFZ5sN zztDf7|3d$T{tNvV`Y-ff=)cf^q5nevh5ifu7y2*sU+BNkf1&?E|Al^bXBYY}^k3+| z(0`%-LjQ&S3;h@RFZ2WXyU>53|3d$T{tNvV`Y-ff=)cf^q5nevh5n!Vf9mJW_EY~) z{Xg~p)c;fePyIjj^OXCk|EK<+`hV*GssE?`pZb65|Ed3{{-64P>Sx#XQ~yu>KlL+6 z`>Fq@{-64Jr2W*-wv9{LPyIjj|J46e|4;ot_5alWQ~yu>KlT6A|5HE5G=l^Cso&_p ze(L|J|EK<+`i&5bncGkO+}v2uaZmfH|EK<+`hV&FrQcY=e(C?E|CfHl1^cD{m;PV+ zf9e0F|Cjz>`hV&FrJu#yFa5vt|I*J<&)C6!>Hnqwm;PV+jUVinegg>mrT>@yU;2OP z|E1q3!hY%hrT>@yU;2$CjL{mSHHLM+^#9WTOaCwZzx4mo|4aWb{lE18(*H~UFa5vt z|I+_UKmWR4`We{$(*H~UFa5vuvxH;6_gnvO{lE49*8f}oZ~edZ|JMIo|8M=j_5arY zTR%fPs}1|D|F?eY4g0O1tKDz?zxDst|6BiW{lE3|w)?IBxBlPyf9tpSu;2Qb#&Nj& zt^c?F-}-;+|E>SG{@?n4>;J9)xBlPyf9wCP-(tjm>;J9)xBlPyf9wCP|F{0%`hV;H zt)H75>$^YtdEfof|408H{TA59RPK-dKl=aZ|D&I=+#mga^s~WZgZD@OAN_yy|Iz

!k;pY9(&ob|i{!9Ir`kB97 z>c7-~ssB>{rT$C(m-@NqUFyHof2sdc|E2y*{g?W=(_QMn)PJe}Qvap?OZ}JnFZEyQ zztn%J|5E>@{!9Ir`Y-i!tGm>HssB>{rT$C(m-;XDU+TZqf2sdc|E2y*{jB0H^H(ztVrD|4RRr z{ww`g`mgj~>A%u{rQcq{uJm8&ztVrD|4RRr{ww`g`kC=v>A%u{rTA%u{rTA%u{rT%Z22t^Zp8wf<}U*ZQyZU+cftf35#o|F!;W{nz@h^yuk~N+zt(@P|62dG{%if$`mgn0>%Z1-%Z22t)Ic)wf<}U*ZQyZU+d@ccdh?g|F!;W{nz?$^xx>e(SM`=M*ofe z8~r!>Z}i{jztMlA|3?3f{u})_`fv2#=)ci_qyI+#js6?`H~Me%-{`;5f203K|Be0| z{Wtn=^xx>e(SM`=M*ofe8~r!>Z}i{jztMlA|3?3f{u})_`fv2#=)ci_qyI+#js6?` zH~Me%-{`;5f203K|Be0|{Wtn=_225h)qku1R{yR3Tm85CZ}s2mztw-M|5pF4{#*UG z`fv5$>c7>0tN&L2t^Qm6xB74O-|D~Bf2;pi|E>O8{kQsW_225h)qku1R{yR3Tm85C zZ}s2mztw-M|5pF4{#*UG`fv5$>c7>0tN&L2t^Qm6xB5Bv*A%x&;b?dI@ATj4ztexG|4u)H zy*vGP`dRGRE8Cs^JN%Z53um4`ZdH$K_-@X2O{rCFs_227fs&}v7F3j%r-|N5EZ&zUV`Wf-v>%Z53um4{E zz5aXs_xf$g>|X!9{(Jp=_W11G>%Z53um4{Ez5aXs_xhRh-Rr;Cf3N>u|GoZu{rCE9 z((GQpU7FqNzt?}S|6c#S{(Jp45B698U;Tge|J83jYJc_r)&E!jU;TCw_E-O3{eSiU z)o;&ZfA#;>|5yKC{eHS&fA#;>Z-Hum_5aoXSN~uAfA!nF+h6^*a`spMU;Tge|JDCj z|6l!o_5aoXSN~uAfA#;>|5yKC{Wf;?SN~uAfA#;>Z*ymV_5aoXSN~uAfA#;>Z;NNP zc=lKSU;Tge|J84gWdHR4({C+n|MdUUZ=+=Y^#9ZUPyavtc6|0v|3Cfz^#9ZUPyavt z|MdUUZ{uhG^#9ZUPrvo8{nP(XzwMv>)BjKZKmGso|I`0Z|3Cfz^#9ZUPyavt|MdUU z|4;uv{r~j;)BjJuHLv~C|4;uv{r~h^_u4=G|MdUU|4+ZQul>{iPyavt|MdUU|4;uv z{rvy<|MCCh|HuE2{~!N9{(t=c`2X?$s$|KtDtzOave{(t=c z`2X?$s$|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW z|MCCh|HuE2{~!N9{(t=c`2X?$s$|KtD1|BwG4|3ChJ{Qvm> z@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9{(t=c`2X?$s$ z|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9{(t=c`2X?$ zs$|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh z|HuE2{~!N9{(t=c`2X?$s$|KtD1|BwG4|3ChJ{Qvm>@&Duh z$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9{(t=c`2X?$s$|KtD1 z|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9{(t=c`2X?$s$|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2 z{~!N9{(t=c`2X?$s$|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2 zAOAo8fBgUW|MCCh|HuE2{~!N9{(t=c`2X?$s$|KtD1|BwG4 z|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9{(t=c`2X?$s$|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9 z{(t=c`2X?$s$|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8 zfBgUW|MCCh|HuE2{~!N9{(t=c`2X?$s$|KtD1|BwG4|3ChJ z{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9{(t=c`2X?$s$|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9{(t=c z`2X?$s$|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW z|MCCh|HuE2{~!N9{(t=c`2X?$s$|KtD1|BwG4|3ChJ{Qvm> z@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9{(t=c`2X?$s$ z|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9{(t=c`2X?$ zs$|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh z|HuE2{~!N9{(t=c`2X?$s$|KtD1|BwG4|3ChJ{Qvm>@&Duh z$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9{(t=c`2X?$s$|KtD1 z|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9{(t=c`2X?$s$|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2 z{~!N9{(t=c`2X?$s$|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2 zAOAo8fBgUW|MCCh|HuE2{~!N9{(t=c`2X?$s$|KtD1|BwG4 z|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9{(t=c`2X?$s$|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9 z{(t=c`2X?$s$|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8 zfBgUW|MCCh|HuE2{~!N9{(t=c`2X?$s$|KtD1|BwG4|3ChJ z{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9{(t=c`2X?$s$|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9{(t=c z`2X?$s$|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW z|MCCh|HuE2{~!N9{(t=c`2X?$s$|KtD1|BwG4|3ChJ{Qvm> z@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9{(t=c`2X?$s$ z|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9{(t=c`2X?$ zs$|KtD1|BwG4|3ChJ{Qvm>@&Duh$N!K2AOAo8fBgUW|MCCh z|HuE2{~!N9{(t=c`2X?$s$|KtD1|BwG4|3ChJ{Qvm>@&Duh z$N!K2AOAo8fBgUW|MCCh|HuE2{~!N9{(t=c`2X?$|H|B3z+ z{U`cQ^q=TI(SM@|H|B3z+{U`cQ^q=TI(SM@Oa+gs{d5~ss2;_r}|IzpXxu=f2#jf z|Ec~{{ipg*^`GiL)qkr0RR5{|Q~js^*`1BRR2@`PxU|5|5X1|{rrFYe^2#4 z)&ErgQ~msZ{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^ z{D1s^{D1s^{D1s^{D1s^{D1s^{D1s^{C^kvFZA>O@&EDv@&EDv@&EDv@&EDv@&EDv z@&EDv@&EDv@&EDv@&EDv@&EDv@&EDv@&EDv@&EDv@&EDv@&EnQ|5HEzAO9c!AO9c! zAO9c!AO9c!AO9c!AO9c!AO9c!AO9c!AO9c!AO9c!AO9c!AO9c!AO9c!AO9c!AO9c! zAO9c!AO9c!AO9c!AO9c!AO9c!AO9c!AO9c!AO9c!AO9c!AO9c!@ALovTQUQX8Gy_H zWCkEJ0GR>E3_xZ8G6RqqfXo171|Ty4nE}WQKxP0k1CSYj%m8EtATt1&0muwMW&kn+ zkQso?0AvOrGXR+Z$P7Sc05SuR8Gy_HWCkEJ0GR>E3_xZ8G6RqqfXo171|Ty4nE}WQ zKxP0k1CSYj%m8EtATt1&0muwMW&kn+kQso?0AvOrGXR+Z$P7Sc05SuR8Gy_HWCkEJ z0GR>E3_xZ8G6RqqfXo171|Ty4nE}WQKxP0k1CSYj%m8EtATt1&0muwMW&kn+kQso? z0AvOrGXR+Z$P7Sc05SuR8Gy_HWCkEJ0GR>E3_xZ8G6RqqfXo171|Ty4nE}WQKxP0k z1CSYj%m8EtATt1&0muwMW&kn+kQso?0AvOrGXR+Z$P7Sc05SuR8Gy_HWCkEJ0GR>E z3_xZ8G6RqqfXo171|Ty4nE}WQKxP0k1CSYj%m8EtATt1&0muwMW&kn+kQso?0AvOr zGXR+Z$P7Sc05SuR8Gy_HWCkEJ0GR>E3_xZ8G6RqqfXo171|Ty4nE}WQKxP0k1CSYj z%m8EtATt1&0muwMW&kn+kQso?0AvOrGXR+Z$P7Sc05SuR8Gy_HWCkEJ0GR>E3_xZ8 zG6RqqfXo171|Ty4nE}WQKxP0k1CSYj%m8EtATt1&0muwMW&kn+kQso?0AvOrGXR+Z z$P7Sc05SuR8Gy_HWCkEJ0GR>E3_xZ8G6RqqfXo171|Ty4nE}WQKxP0k1CSYj%m8Et zATt1&0muwMW&kn+kQso?0AvOrGXR+Z$P7Sc05SuR8Gy_HWCkEJ0GR>E3_xZ8G6Rqq zfXo171|Ty4nE}WQKxP0k1CSYj%m8EtATt1&0muwMW&kn+kQso?0AvOrGXR+Z$P7Sc z05SuR8Gy_HWCkEJ0GR>E3_xZ8G6RqqfXo171|Ty4nE}WQKxP0k1CSYj%m8EtATt1& z0muwMW&kn+kQso?0AvOrGXR+Z$P7Sc05SuR8Gy_HWCkEJ0GR>E3_xZ8G6RqqfXo17 z1|Ty4nE}WQKxP0k1CSYj%m8EtATt1&0muwMW&kn+kQso?0AvOrGXR+Z$P7Sc05SuR z8Gy_HWCkEJ0GR>E3_xZ8G6RqqfXo171|Ty4nE}WQKxP0k1CSYj%m8EtATt1&0muwM zW&kn+kQso?0AvOrGXR+Z$P7Sc05SuR8Gy_HWCkEJ0GR>E3_xZ8G6RqqfXo1NqyI+# zjs6?`W&kn+(2f2Z{Wtn=^xx>e(SM`=M*ofe8~r!>Z}i{jztMlA|3?3f{u}*f05SuR z8Gy_HWCkEJ0GR>E3_xZ8G6RqqfXo171|Ty4nE}WQKxP0k1CSYj%m8EtATt1&0muwM zW&kn+kQso?0AvOrGXR+Z$P7Sc05SuR8Gy_HWCkEJ0GR>E3_xZ8G6RqqfXo171|Ty4 znE}WQKxP0k1CSYj%m8EtATt1&0muwMW&kn+kQso?0AvOrGXR+Z$P7Sc05SuR8Gy_H zWCkEJ0GR>E3_!Q~Z}s2mztwLBATt1&0muwMxB74O-|D~Bf2;pi|E>O8{kQsW_225h z)qku1R{yR3Tm5(X@ATj4ztexG|4#p%{yY75`tS7L>A%x|r~gj>o&G!hclz)2-|4^8 zf2aRW|DFCj{dfBB^xx^f(|@P`PXC?$JNE3_xZ8G6RqqfXo171|Ty4nE}WQKxP0k1CSYj%m8$+|6c#S z{(JrR`tSAM>%Z53um4`Z8Gy_Hbg%zj|GoZu{bm3%1CSYj%m8EtATt1&0muwMW&kn+ zkQso?0AvOrGXR+Z$P7UD`tSAM>%Z53um4{Ez5aXs_xkVk-|N5Ef3N>u|GoZu{rCFs z_228i*MG17UjM!Rd;RzN@Acp7zt?}S|6c#S{(Jpq05SuR8Gy_HWCkEJ0GR>E3_xZ8 zG6RqqfXo171|Ty4nE}WQKxP0k1CSYj%m8EtATt1&0muwMW&kn+kQso?0AvOrGXR+Z z$P7Sc05SuR8Gy_HWCkEJ0GR>E3_xZ8G6RqqfXo171|Ty4nE}WQKxP0k1CSYj%m8Et zATt1&0muwMW&kn+kQso?0AvOrGXR+Z$P7Sc05SuR8Gy_HWCkEJ0GR>E3_xZ8G6Rqq zfXo171|Ty4nE}WQKxP0k1CSYj%m8EtATt1&0muwMW&kn+kQso?0AvOrGXR+Z$P7Sc z05SuR8Gy_HWCkEJ0GR>E3_xZ8G6RqqfXo171|Ty4nE}WQKxP0k1CSYj%m8EtATt1& z0muwMW&kn+kQso?0AvOrGXR+Z$P7Sc05SuR8Gy_HWCkEJ0GR>E3_xZ8G6RqqfXo17 z1|Ty4nE}WQKxP0k1CSYj%m8EtATt1&0muwMW&kn+kQso?0AvOrGXR+Z$P7Sc05SuR z8Gy_HWCkEJ0GR>E3_xZ8G6RqqfXo171|Ty4nE}WQKxP0k1CSYj%m8EtATt1&0muwM zW&nKtFKwMQfUE&z4Ipa(Sp&!#K-K`V29Py?tN~;VAZq|w1IQXc)&Q~wkTrm;0b~sz zYXDgT$QnS_0I~*01j&ahc$r18o*%<;IIa8SOYk$0UXu<4r>61HGsn!z+nyGum*5g130V!9M%91 zYXFBefWsQVVGZE025?vdIIIC2)&LG`0Eabz!y3S04dAc_a99I4tN|R>01j&ahc$r1 z8o*%<;IIa8SOYk$0UXu<4r>61HGsn!z+nyGum*5g130V!9M%91YXFBefWsQVVGZE0 z25?vdIIIC2)&LG`0Eabz!y3S04dAc_a99I4tN|R>01j&ahc$r18o*%<;IIa8SOYk$ z0UXu<4r>61HGsn!z+nyGum*5g130V!9M%91YXFBefWsQVVGZE025?vdIIIC2)&LG` z0Eabz!y3S04dAc_a99I4tN|R>01j&ahc$r18o*%<;IIa8SOYk$0UXu<4r>61HGsn! zz+nyGum*5g130V!9M%91YXFBefWsQVVGZE025?vdIIIC2)&LG`0Eabz!y3S04dAc_ za99I4tN|R>01j&ahc$r18o*%<;IIa8SOYk$0UXu<4r>61HGsn!z+nyGum*5g130V! z9M%91YXFBefWsQVVGZE025?vdIIIC2)&LG`0Eabz!y3TxKl-f!9M%91YXFBefWsQV zVGZE025?vdIIIC2)&LG`0Eabz!y3S04dAc_a99I4tN|R>01j&ahc$r18o*%<;IIa8 zSOYk$0UXu<4r>61HGsn!z+nyGum*7auYPL)hc$r18o*%<;IIa8SOYk$0UXu<4r>61 zHGsn!z+nyGum*5g130V!9M%91YXFBefWsQVVGZE025?vdIIIC2)&LG`0Eabz!y3S0 z4dAc_a99I4tN|R>01j&ahc$r18o*%<;IIa8SOYk$0UXu<4r>61HGsn!z+nyGum*5g z130V!9M%91YXFBefWsQVVGZE025?vdIIIC2)&LG`0Eabz!y3S04dD2HRovT-+(Z%>N&6941_I$CJMEFx_TQ7V?W$di1`rJ(8bCCF zXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks118 z0MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT z(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V z0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?W zL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz z1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$ zhz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c z1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh z5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC? z4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1 zAR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ( z8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2 zKs1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4Immo zG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4 zfM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCF zXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1Lyc zX#mjxq5(t$Xvx3iU-B>cm;6iqCI6Ct$-m@Z@-O+9{7e2N|B`>nzvN%?FZq}JOa3MQ zl7Gp+cm;6iqCI6Ct$-m@Z@-O+9{7e2N|B`>pzvbWZ zZ~3?UTmCKomVe8?<=^se`M3OA{w@EOf6Kq+-|}zyxBOfFE&rB(%fIE{@^AUK{9FDl z|CXNy5Dg$2Ks1180MP)V0Yn3c1`rLPE&rCE1`rJ(8bCCFw)|WEE&rB(%fIE{@^AUK z{9FDl|CWEtzvbWZZ~18e(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V z0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?W zL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz z1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$ zhz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c z1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh z5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC? z4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1 zAR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ( z8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=M&lp9T;OAR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V z0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?W zL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5<@&{4{`Q0MP)V0Yn3c1`rJ( z8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2 zKs1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4Immo zG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4 zfM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCF zXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks118 z0MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT z(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V z0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?W zL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz z1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$ zhz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c z1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh z5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC? z4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1 zAR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ( z8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2 zKs1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4Immo zG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4 zfM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCF zXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks118 z0MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT z(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V z0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?W zL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz z1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP*EV;5WH{9<02GE}}g%D0LcALR$d zIE=CsBNocbqkMFfSBj^1<-Mc4uej+~em}||iu(ZN_?ZQw^SP-f<>4p}{<+yF<#Lqk zQEo?hRJ36#Pe*xvF|S{K;g8*3zx?ulFKjr=e}3w3XZzpAk8f}M^!CQjHs1Z!uE$&c z{ldoaPXBHk@AU3(Z+7|X#hYE;Y#h62-%)%5uQl)}K7rR-ca*PR%-s?6&K|D_nic`CO=l!n7sa-XS&sA6N zT(vdLRUgaTOeD|EXw%$W{^w@Z``oMvo|{hixoI(*o4%)cbi=9mz#e@S&ZA`>^Qhr> zp3Gj(lc}Y7GUqZ+25#ob)A&5OC7!46{40Dmc|Ff&dFR 0 + 0 + 0 + 0 +<0x00> 0 +<0x01> 0 +<0x02> 0 +<0x03> 0 +<0x04> 0 +<0x05> 0 +<0x06> 0 +<0x07> 0 +<0x08> 0 +<0x09> 0 +<0x0A> 0 +<0x0B> 0 +<0x0C> 0 +<0x0D> 0 +<0x0E> 0 +<0x0F> 0 +<0x10> 0 +<0x11> 0 +<0x12> 0 +<0x13> 0 +<0x14> 0 +<0x15> 0 +<0x16> 0 +<0x17> 0 +<0x18> 0 +<0x19> 0 +<0x1A> 0 +<0x1B> 0 +<0x1C> 0 +<0x1D> 0 +<0x1E> 0 +<0x1F> 0 +<0x20> 0 +<0x21> 0 +<0x22> 0 +<0x23> 0 +<0x24> 0 +<0x25> 0 +<0x26> 0 +<0x27> 0 +<0x28> 0 +<0x29> 0 +<0x2A> 0 +<0x2B> 0 +<0x2C> 0 +<0x2D> 0 +<0x2E> 0 +<0x2F> 0 +<0x30> 0 +<0x31> 0 +<0x32> 0 +<0x33> 0 +<0x34> 0 +<0x35> 0 +<0x36> 0 +<0x37> 0 +<0x38> 0 +<0x39> 0 +<0x3A> 0 +<0x3B> 0 +<0x3C> 0 +<0x3D> 0 +<0x3E> 0 +<0x3F> 0 +<0x40> 0 +<0x41> 0 +<0x42> 0 +<0x43> 0 +<0x44> 0 +<0x45> 0 +<0x46> 0 +<0x47> 0 +<0x48> 0 +<0x49> 0 +<0x4A> 0 +<0x4B> 0 +<0x4C> 0 +<0x4D> 0 +<0x4E> 0 +<0x4F> 0 +<0x50> 0 +<0x51> 0 +<0x52> 0 +<0x53> 0 +<0x54> 0 +<0x55> 0 +<0x56> 0 +<0x57> 0 +<0x58> 0 +<0x59> 0 +<0x5A> 0 +<0x5B> 0 +<0x5C> 0 +<0x5D> 0 +<0x5E> 0 +<0x5F> 0 +<0x60> 0 +<0x61> 0 +<0x62> 0 +<0x63> 0 +<0x64> 0 +<0x65> 0 +<0x66> 0 +<0x67> 0 +<0x68> 0 +<0x69> 0 +<0x6A> 0 +<0x6B> 0 +<0x6C> 0 +<0x6D> 0 +<0x6E> 0 +<0x6F> 0 +<0x70> 0 +<0x71> 0 +<0x72> 0 +<0x73> 0 +<0x74> 0 +<0x75> 0 +<0x76> 0 +<0x77> 0 +<0x78> 0 +<0x79> 0 +<0x7A> 0 +<0x7B> 0 +<0x7C> 0 +<0x7D> 0 +<0x7E> 0 +<0x7F> 0 +<0x80> 0 +<0x81> 0 +<0x82> 0 +<0x83> 0 +<0x84> 0 +<0x85> 0 +<0x86> 0 +<0x87> 0 +<0x88> 0 +<0x89> 0 +<0x8A> 0 +<0x8B> 0 +<0x8C> 0 +<0x8D> 0 +<0x8E> 0 +<0x8F> 0 +<0x90> 0 +<0x91> 0 +<0x92> 0 +<0x93> 0 +<0x94> 0 +<0x95> 0 +<0x96> 0 +<0x97> 0 +<0x98> 0 +<0x99> 0 +<0x9A> 0 +<0x9B> 0 +<0x9C> 0 +<0x9D> 0 +<0x9E> 0 +<0x9F> 0 +<0xA0> 0 +<0xA1> 0 +<0xA2> 0 +<0xA3> 0 +<0xA4> 0 +<0xA5> 0 +<0xA6> 0 +<0xA7> 0 +<0xA8> 0 +<0xA9> 0 +<0xAA> 0 +<0xAB> 0 +<0xAC> 0 +<0xAD> 0 +<0xAE> 0 +<0xAF> 0 +<0xB0> 0 +<0xB1> 0 +<0xB2> 0 +<0xB3> 0 +<0xB4> 0 +<0xB5> 0 +<0xB6> 0 +<0xB7> 0 +<0xB8> 0 +<0xB9> 0 +<0xBA> 0 +<0xBB> 0 +<0xBC> 0 +<0xBD> 0 +<0xBE> 0 +<0xBF> 0 +<0xC0> 0 +<0xC1> 0 +<0xC2> 0 +<0xC3> 0 +<0xC4> 0 +<0xC5> 0 +<0xC6> 0 +<0xC7> 0 +<0xC8> 0 +<0xC9> 0 +<0xCA> 0 +<0xCB> 0 +<0xCC> 0 +<0xCD> 0 +<0xCE> 0 +<0xCF> 0 +<0xD0> 0 +<0xD1> 0 +<0xD2> 0 +<0xD3> 0 +<0xD4> 0 +<0xD5> 0 +<0xD6> 0 +<0xD7> 0 +<0xD8> 0 +<0xD9> 0 +<0xDA> 0 +<0xDB> 0 +<0xDC> 0 +<0xDD> 0 +<0xDE> 0 +<0xDF> 0 +<0xE0> 0 +<0xE1> 0 +<0xE2> 0 +<0xE3> 0 +<0xE4> 0 +<0xE5> 0 +<0xE6> 0 +<0xE7> 0 +<0xE8> 0 +<0xE9> 0 +<0xEA> 0 +<0xEB> 0 +<0xEC> 0 +<0xED> 0 +<0xEE> 0 +<0xEF> 0 +<0xF0> 0 +<0xF1> 0 +<0xF2> 0 +<0xF3> 0 +<0xF4> 0 +<0xF5> 0 +<0xF6> 0 +<0xF7> 0 +<0xF8> 0 +<0xF9> 0 +<0xFA> 0 +<0xFB> 0 +<0xFC> 0 +<0xFD> 0 +<0xFE> 0 +<0xFF> 0 +▁t -0 +▁a -1 +in -2 +he -3 +re -4 +on -5 +er -6 +▁the -7 +▁s -8 +▁w -9 +or -10 +at -11 +nd -12 +ou -13 +▁c -14 +it -15 +es -16 +▁f -17 +is -18 +en -19 +ing -20 +▁b -21 +▁p -22 +▁o -23 +an -24 +ed -25 +al -26 +▁to -27 +▁m -28 +ar -29 +▁and -30 +▁in -31 +▁of -32 +▁d -33 +le -34 +ic -35 +as -36 +om -37 +▁h -38 +ion -39 +▁th -40 +il -41 +▁T -42 +ent -43 +▁l -44 +ve -45 +▁y -46 +ro -47 +st -48 +▁I -49 +▁e -50 +▁re -51 +▁n -52 +▁S -53 +▁g -54 +et -55 +ct -56 +▁A -57 +▁you -58 +▁C -59 +ly -60 +▁for -61 +id -62 +▁is -63 +ay -64 +▁on -65 +▁be -66 +ot -67 +ow -68 +ol -69 +am -70 +ce -71 +ig -72 +us -73 +ad -74 +im -75 +▁M -76 +ch -77 +el -78 +ver -79 +ith -80 +ut -81 +▁st -82 +ation -83 +ur -84 +▁P -85 +▁with -86 +▁that -87 +ir -88 +▁B -89 +▁W -90 +▁The -91 +▁it -92 +▁he -93 +ra -94 +ill -95 +ers -96 +▁al -97 +un -98 +ul -99 +▁an -100 +▁D -101 +▁H -102 +▁F -103 +out -104 +▁pro -105 +▁as -106 +▁wh -107 +▁are -108 +ke -109 +se -110 +ter -111 +▁we -112 +if -113 +▁ha -114 +ge -115 +oo -116 +▁R -117 +our -118 +pp -119 +ck -120 +ate -121 +ess -122 +▁at -123 +▁con -124 +▁com -125 +▁or -126 +▁L -127 +est -128 +her -129 +ore -130 +ment -131 +▁fr -132 +ab -133 +igh -134 +▁- -135 +▁ne -136 +▁N -137 +ort -138 +▁se -139 +▁G -140 +▁your -141 +ld -142 +▁E -143 +ist -144 +ri -145 +op -146 +▁( -147 +▁ex -148 +ity -149 +ure -150 +▁O -151 +em -152 +▁v -153 +qu -154 +ant -155 +art -156 +ive -157 +ust -158 +um -159 +▁was -160 +▁have -161 +pe -162 +▁from -163 +▁this -164 +▁de -165 +▁r -166 +▁sh -167 +th -168 +ain -169 +ies -170 +▁can -171 +up -172 +▁will -173 +▁ch -174 +and -175 +▁by -176 +os -177 +ight -178 +nt -179 +ie -180 +▁us -181 +ome -182 +all -183 +ard -184 +▁not -185 +ud -186 +res -187 +▁le -188 +▁J -189 +ast -190 +▁pl -191 +ost -192 +▁su -193 +▁ab -194 +iv -195 +ear -196 +▁wor -197 +ide -198 +ial -199 +rou -200 +▁all -201 +gh -202 +od -203 +oc -204 +ak -205 +te -206 +ine -207 +ould -208 +▁j -209 +red -210 +ag -211 +▁has -212 +.. -213 +ice -214 +▁Th -215 +ell -216 +▁U -217 +age -218 +▁do -219 +▁k -220 +ack -221 +fe -222 +ook -223 +ac -224 +▁ad -225 +per -226 +▁In -227 +ip -228 +▁comp -229 +ake -230 +▁out -231 +ions -232 +ally -233 +▁up -234 +are -235 +▁but -236 +▁me -237 +▁whe -238 +pt -239 +lo -240 +ry -241 +able -242 +▁our -243 +▁“ -244 +one -245 +ind -246 +▁en -247 +▁more -248 +ail -249 +ite -250 +ther -251 +▁their -252 +▁Y -253 +ich -254 +▁so -255 +very -256 +ime -257 +cc -258 +ood -259 +ated -260 +ong -261 +▁K -262 +▁my -263 +▁sa -264 +for -265 +iz -266 +ame -267 +ber -268 +▁they -269 +▁St -270 +▁te -271 +so -272 +ous -273 +▁one -274 +ans -275 +act -276 +▁about -277 +ll -278 +ike -279 +du -280 +▁cont -281 +ase -282 +og -283 +▁V -284 +▁im -285 +ick -286 +▁cl -287 +ia -288 +ance -289 +▁work -290 +▁inc -291 +ign -292 +▁un -293 +ire -294 +ree -295 +▁off -296 +▁fe -297 +▁who -298 +▁man -299 +ue -300 +ace -301 +ach -302 +reat -303 +ub -304 +▁It -305 +ction -306 +▁go -307 +ne -308 +▁app -309 +▁year -310 +▁new -311 +ep -312 +ult -313 +ib -314 +ap -315 +▁his -316 +ays -317 +erv -318 +▁Ch -319 +▁We -320 +▁res -321 +und -322 +▁" -323 +▁sp -324 +ass -325 +ark -326 +ations -327 +ff -328 +▁qu -329 +ary -330 +▁per -331 +▁also -332 +ile -333 +▁which -334 +▁int -335 +▁time -336 +ove -337 +form -338 +ven -339 +ount -340 +▁get -341 +▁tr -342 +own -343 +▁like -344 +▁some -345 +▁other -346 +ond -347 +ents -348 +ings -349 +vel -350 +▁any -351 +ical -352 +ence -353 +▁part -354 +av -355 +▁been -356 +▁dis -357 +▁This -358 +▁over -359 +ition -360 +ress -361 +pl -362 +ors -363 +▁rec -364 +▁them -365 +▁He -366 +▁sc -367 +▁ar -368 +ild -369 +▁pe -370 +port -371 +ink -372 +low -373 +▁ag -374 +▁ro -375 +▁her -376 +▁when -377 +ound -378 +▁kn -379 +ord -380 +mer -381 +int -382 +▁need -383 +ish -384 +▁pr -385 +irst -386 +ens -387 +ough -388 +▁said -389 +ru -390 +▁pre -391 +▁spe -392 +▁just -393 +wn -394 +ren -395 +▁what -396 +▁there -397 +▁if -398 +▁acc -399 +▁than -400 +▁its -401 +ov -402 +▁Re -403 +day -404 +vers -405 +▁would -406 +ater -407 +fter -408 +▁had -409 +ade -410 +ning -411 +lud -412 +▁hel -413 +▁– -414 +▁were -415 +▁am -416 +old -417 +rough -418 +▁into -419 +▁des -420 +ory -421 +ople -422 +itt -423 +ang -424 +▁help -425 +▁tw -426 +▁how -427 +use -428 +lic -429 +ool -430 +▁bec -431 +▁add -432 +anc -433 +▁first -434 +ose -435 +▁make -436 +▁comm -437 +ons -438 +amp -439 +ob -440 +hed -441 +▁prov -442 +▁Wh -443 +▁tra -444 +... -445 +ft -446 +▁look -447 +▁You -448 +▁includ -449 +ual -450 +▁people -451 +les -452 +▁serv -453 +gr -454 +▁col -455 +ian -456 +ments -457 +ful -458 +▁know -459 +▁produ -460 +ates -461 +iew -462 +▁Ne -463 +▁em -464 +rent -465 +ious -466 +tern -467 +▁she -468 +round -469 +ek -470 +▁every -471 +▁through -472 +▁may -473 +ating -474 +▁no -475 +▁only -476 +pport -477 +▁back -478 +▁most -479 +ect -480 +▁bu -481 +▁want -482 +ict -483 +ices -484 +▁As -485 +▁If -486 +▁well -487 +ities -488 +▁ind -489 +we -490 +▁bet -491 +▁ph -492 +ise -493 +▁use -494 +▁two -495 +▁co -496 +xt -497 +ont -498 +com -499 +▁act -500 +▁und -501 +ph -502 +iness -503 +lect -504 +iss -505 +▁after -506 +oy -507 +▁Se -508 +ife -509 +ause -510 +▁play -511 +fect -512 +▁| -513 +oth -514 +▁& -515 +ily -516 +row -517 +ork -518 +enc -519 +▁exper -520 +ject -521 +▁cons -522 +hen -523 +cial -524 +urn -525 +ert -526 +▁years -527 +als -528 +▁these -529 +ank -530 +ting -531 +▁$ -532 +▁Com -533 +aw -534 +▁bus -535 +▁An -536 +▁Un -537 +▁stud -538 +any -539 +bs -540 +ange -541 +▁For -542 +ures -543 +vent -544 +▁good -545 +ational -546 +aking -547 +▁see -548 +▁ke -549 +ased -550 +ific -551 +▁Pro -552 +▁now -553 +fore -554 +▁under -555 +▁very -556 +▁many -557 +▁reg -558 +▁sm -559 +ward -560 +hing -561 +▁imp -562 +get -563 +oint -564 +▁dif -565 +▁ra -566 +▁way -567 +erson -568 +ience -569 +▁start -570 +ts -571 +pect -572 +▁fin -573 +▁great -574 +▁And -575 +yst -576 +uring -577 +▁De -578 +▁rel -579 +formation -580 +▁gu -581 +ility -582 +ible -583 +▁rem -584 +▁could -585 +oss -586 +hip -587 +▁dec -588 +uch -589 +▁even -590 +▁inv -591 +). -592 +ty -593 +ics -594 +rit -595 +ract -596 +▁own -597 +▁sec -598 +cess -599 +velop -600 +▁day -601 +▁where -602 +▁show -603 +ident -604 +elf -605 +hes -606 +alth -607 +▁high -608 +its -609 +▁loc -610 +air -611 +▁find -612 +olog -613 +▁ac -614 +ull -615 +nds -616 +▁Al -617 +▁don -618 +▁ass -619 +▁home -620 +▁should -621 +line -622 +ath -623 +▁ent -624 +▁best -625 +▁here -626 +▁down -627 +lease -628 +▁then -629 +▁Sh -630 +ied -631 +ble -632 +ular -633 +|| -634 +▁right -635 +The -636 +arch -637 +▁set -638 +chool -639 +ited -640 +▁car -641 +▁av -642 +▁read -643 +▁New -644 +▁mon -645 +gan -646 +▁min -647 +▁take -648 +▁business -649 +erm -650 +▁fam -651 +▁ins -652 +ner -653 +ix -654 +▁inst -655 +▁fl -656 +ys -657 +▁design -658 +▁att -659 +ystem -660 +▁br -661 +alk -662 +▁too -663 +.” -664 +▁che -665 +▁bl -666 +io -667 +▁long -668 +▁much -669 +ative -670 +▁information -671 +▁Be -672 +▁made -673 +▁last -674 +ollow -675 +ason -676 +other -677 +ues -678 +gram -679 +arket -680 +▁product -681 +omet -682 +▁because -683 +ock -684 +ax -685 +▁Fr -686 +), -687 +rib -688 +▁week -689 +▁call -690 +▁did -691 +▁before -692 +▁think -693 +▁Cl -694 +▁team -695 +▁world -696 +atch -697 +me -698 +▁cre -699 +ale -700 +pen -701 +oun -702 +▁again -703 +▁sur -704 +ower -705 +▁Ad -706 +▁vis -707 +ient -708 +▁But -709 +chn -710 +pr -711 +az -712 +ustom -713 +land -714 +▁requ -715 +▁art -716 +▁develop -717 +▁being -718 +▁diffe -719 +▁pres -720 +rest -721 +way -722 +▁person -723 +ng -724 +ener -725 +▁such -726 +▁Le -727 +▁inte -728 +▁mem -729 +▁disc -730 +▁him -731 +ces -732 +▁support -733 +▁life -734 +arn -735 +ug -736 +ving -737 +ced -738 +ouse -739 +unity -740 +ave -741 +ince -742 +irect -743 +▁med -744 +▁Ar -745 +▁does -746 +▁while -747 +▁those -748 +ins -749 +▁provid -750 +ash -751 +arm -752 +view -753 +▁sim -754 +ivers -755 +ros -756 +▁lead -757 +▁sk -758 +akes -759 +ality -760 +▁pol -761 +▁end -762 +▁mod -763 +▁used -764 +▁cur -765 +ives -766 +▁around -767 +ric -768 +led -769 +ier -770 +▁free -771 +ailable -772 +ually -773 +▁each -774 +▁care -775 +▁comple -776 +▁follow -777 +ional -778 +ublic -779 +▁det -780 +▁On -781 +ple -782 +read -783 +der -784 +▁ret -785 +ize -786 +▁trans -787 +ather -788 +▁love -789 +▁There -790 +ages -791 +▁post -792 +ines -793 +▁child -794 +▁system -795 +ars -796 +▁bo -797 +ene -798 +roup -799 +▁eas -800 +▁book -801 +▁num -802 +▁ed -803 +▁How -804 +▁ser -805 +,” -806 +imes -807 +▁Te -808 +▁really -809 +▁count -810 +ets -811 +▁gr -812 +▁str -813 +▁program -814 +▁custom -815 +ton -816 +▁top -817 +▁run -818 +▁del -819 +au -820 +▁All -821 +iet -822 +▁cour -823 +▁found -824 +ffect -825 +▁So -826 +▁place -827 +▁list -828 +ness -829 +ved -830 +iel -831 +▁form -832 +▁month -833 +▁prof -834 +▁char -835 +ah -836 +▁feel -837 +▁To -838 +ute -839 +▁available -840 +▁going -841 +▁inter -842 +ittle -843 +▁They -844 +▁sign -845 +▁sub -846 +gg -847 +▁market -848 +man -849 +ature -850 +ames -851 +▁fun -852 +▁cle -853 +▁still -854 +cept -855 +▁Pl -856 +ways -857 +▁somet -858 +▁different -859 +▁aut -860 +▁both -861 +▁three -862 +▁few -863 +orn -864 +▁health -865 +▁though -866 +▁Ex -867 +ital -868 +ired -869 +▁pur -870 +ering -871 +▁rep -872 +▁adv -873 +▁exp -874 +▁techn -875 +▁happ -876 +▁open -877 +▁lot -878 +▁report -879 +▁company -880 +ata -881 +ween -882 +▁keep -883 +meric -884 +▁Sc -885 +orth -886 +▁plan -887 +▁hand -888 +ining -889 +bers -890 +iqu -891 +▁She -892 +tt -893 +ants -894 +be -895 +▁ext -896 +▁lar -897 +▁game -898 +▁sol -899 +▁point -900 +▁Q -901 +ross -902 +ology -903 +▁say -904 +ves -905 +atur -906 +▁met -907 +▁import -908 +▁process -909 +▁fil -910 +▁frie -911 +▁including -912 +▁family -913 +▁ev -914 +▁using -915 +▁same -916 +work -917 +▁project -918 +ized -919 +uc -920 +oot -921 +▁school -922 +▁between -923 +▁What -924 +ling -925 +ik -926 +▁little -927 +ution -928 +att -929 +ott -930 +▁experience -931 +▁during -932 +." -933 +less -934 +▁state -935 +iving -936 +▁Col -937 +▁i -938 +▁next -939 +uss -940 +els -941 +▁service -942 +aint -943 +▁real -944 +ody -945 +oh -946 +▁build -947 +▁allow -948 +ms -949 +reen -950 +▁opt -951 +▁water -952 +ished -953 +▁things -954 +▁come -955 +▁contin -956 +thing -957 +▁Americ -958 +▁var -959 +▁Ph -960 +▁dri -961 +ists -962 +uck -963 +ever -964 +ern -965 +ield -966 +▁cent -967 +arly -968 +over -969 +rand -970 +▁small -971 +▁rece -972 +▁organ -973 +▁appro -974 +▁rest -975 +gy -976 +▁big -977 +self -978 +▁Ind -979 +▁ref -980 +ex -981 +▁always -982 +▁mus -983 +▁better -984 +▁sure -985 +▁With -986 +▁interest -987 +▁win -988 +aut -989 +loy -990 +▁full -991 +▁pat -992 +▁pass -993 +▁poss -994 +ery -995 +illion -996 +▁online -997 +▁pri -998 +▁iss -999 +▁ty -1000 +▁put -1001 +ined -1002 +cent -1003 +ware -1004 +▁When -1005 +▁result -1006 +▁gener -1007 +▁since -1008 +▁Bl -1009 +▁ve -1010 +ps -1011 +▁try -1012 +▁direct -1013 +▁quest -1014 +iversity -1015 +▁mov -1016 +▁stand -1017 +▁partic -1018 +▁days -1019 +▁perform -1020 +▁group -1021 +ok -1022 +▁val -1023 +▁pay -1024 +▁ide -1025 +▁head -1026 +▁special -1027 +▁bel -1028 +▁Tr -1029 +▁today -1030 +▁Chr -1031 +▁something -1032 +▁class -1033 +▁provide -1034 +ients -1035 +ours -1036 +▁tri -1037 +▁second -1038 +▁services -1039 +▁ann -1040 +▁Our -1041 +ared -1042 +▁Con -1043 +ccess -1044 +▁resp -1045 +joy -1046 +▁phot -1047 +▁conf -1048 +▁Is -1049 +ploy -1050 +▁Or -1051 +▁dist -1052 +▁hard -1053 +▁without -1054 +pping -1055 +con -1056 +▁Sp -1057 +▁number -1058 +▁Z -1059 +ER -1060 +▁bro -1061 +▁def -1062 +▁sl -1063 +▁cor -1064 +▁must -1065 +oney -1066 +▁blo -1067 +▁another -1068 +ision -1069 +▁vide -1070 +stand -1071 +eng -1072 +▁current -1073 +cl -1074 +outh -1075 +▁give -1076 +▁wom -1077 +▁old -1078 +aj -1079 +ically -1080 +▁access -1081 +▁able -1082 +▁webs -1083 +ards -1084 +▁important -1085 +ior -1086 +iver -1087 +," -1088 +▁cr -1089 +ately -1090 +ium -1091 +▁— -1092 +▁cost -1093 +sh -1094 +▁grow -1095 +▁ask -1096 +ope -1097 +ral -1098 +▁meet -1099 +▁fact -1100 +▁invest -1101 +▁At -1102 +▁area -1103 +ruct -1104 +▁Cent -1105 +▁public -1106 +▁got -1107 +raph -1108 +▁Res -1109 +▁wr -1110 +▁bre -1111 +▁soc -1112 +ote -1113 +▁visit -1114 +▁proble -1115 +ered -1116 +▁light -1117 +▁incre -1118 +▁US -1119 +ample -1120 +▁working -1121 +ems -1122 +▁ob -1123 +ense -1124 +▁data -1125 +▁unt -1126 +ann -1127 +rence -1128 +pped -1129 +br -1130 +▁level -1131 +▁proper -1132 +▁looking -1133 +▁never -1134 +▁sal -1135 +▁might -1136 +inal -1137 +▁No -1138 +ats -1139 +ffic -1140 +▁order -1141 +ential -1142 +ember -1143 +▁effect -1144 +ley -1145 +▁event -1146 +▁fac -1147 +▁students -1148 +▁rese -1149 +▁food -1150 +▁local -1151 +▁Man -1152 +ency -1153 +▁four -1154 +▁Comm -1155 +▁eng -1156 +▁profess -1157 +ird -1158 +▁let -1159 +▁That -1160 +ission -1161 +▁offer -1162 +▁inf -1163 +ww -1164 +▁enjoy -1165 +▁site -1166 +▁Pr -1167 +▁spec -1168 +▁season -1169 +▁check -1170 +▁addition -1171 +ertain -1172 +▁within -1173 +▁children -1174 +gin -1175 +▁oper -1176 +▁pos -1177 +▁test -1178 +ording -1179 +▁making -1180 +▁My -1181 +▁view -1182 +lection -1183 +▁room -1184 +▁sit -1185 +▁prom -1186 +▁power -1187 +ories -1188 +ney -1189 +▁expl -1190 +here -1191 +▁ca -1192 +load -1193 +ently -1194 +▁products -1195 +rol -1196 +▁night -1197 +▁past -1198 +▁community -1199 +▁pop -1200 +▁Mar -1201 +▁sing -1202 +▁against -1203 +let -1204 +ream -1205 +tend -1206 +▁until -1207 +ases -1208 +▁less -1209 +▁' -1210 +utes -1211 +▁el -1212 +ains -1213 +agement -1214 +▁est -1215 +med -1216 +ids -1217 +▁email -1218 +ieve -1219 +▁job -1220 +iron -1221 +ised -1222 +ator -1223 +▁quality -1224 +ivid -1225 +▁May -1226 +ina -1227 +▁intern -1228 +▁indust -1229 +to -1230 +ills -1231 +▁gl -1232 +▁website -1233 +▁prote -1234 +▁impro -1235 +▁law -1236 +ode -1237 +ks -1238 +orm -1239 +▁equ -1240 +▁App -1241 +▁turn -1242 +ified -1243 +enn -1244 +urs -1245 +co -1246 +ged -1247 +IN -1248 +▁Br -1249 +▁away -1250 +icle -1251 +▁air -1252 +▁Fe -1253 +▁contact -1254 +▁creat -1255 +▁toget -1256 +We -1257 +▁together -1258 +▁University -1259 +bo -1260 +istr -1261 +ique -1262 +pend -1263 +aring -1264 +▁supp -1265 +▁learn -1266 +▁success -1267 +▁pract -1268 +▁Co -1269 +▁dr -1270 +ury -1271 +▁complete -1272 +▁Can -1273 +▁leg -1274 +iday -1275 +▁applic -1276 +▁expect -1277 +▁needs -1278 +▁include -1279 +por -1280 +▁Christ -1281 +iety -1282 +ocus -1283 +atter -1284 +ider -1285 +▁Cont -1286 +▁. -1287 +▁detail -1288 +▁large -1289 +▁easy -1290 +▁la -1291 +▁Car -1292 +ability -1293 +ret -1294 +▁One -1295 +oci -1296 +▁along -1297 +irl -1298 +▁course -1299 +▁says -1300 +▁change -1301 +▁news -1302 +arent -1303 +aster -1304 +room -1305 +▁present -1306 +ger -1307 +▁offic -1308 +vern -1309 +▁name -1310 +▁chang -1311 +hor -1312 +ism -1313 +▁conc -1314 +yle -1315 +ym -1316 +atures -1317 +▁beaut -1318 +▁Am -1319 +▁Do -1320 +▁activ -1321 +pos -1322 +▁cap -1323 +part -1324 +lish -1325 +ump -1326 +ising -1327 +▁members -1328 +ries -1329 +▁Me -1330 +▁money -1331 +▁Ste -1332 +enef -1333 +min -1334 +iting -1335 +▁employ -1336 +rap -1337 +▁video -1338 +▁bas -1339 +▁times -1340 +the -1341 +▁talk -1342 +▁Eng -1343 +ify -1344 +▁buy -1345 +ec -1346 +augh -1347 +▁beh -1348 +▁music -1349 +itions -1350 +▁Ro -1351 +▁fav -1352 +▁These -1353 +▁house -1354 +une -1355 +▁pa -1356 +ift -1357 +nect -1358 +▁opport -1359 +▁dem -1360 +▁sw -1361 +side -1362 +▁/ -1363 +ane -1364 +▁hist -1365 +▁why -1366 +Th -1367 +▁En -1368 +▁dra -1369 +ably -1370 +▁cond -1371 +▁ce -1372 +▁case -1373 +▁please -1374 +▁treat -1375 +by -1376 +mber -1377 +ron -1378 +veral -1379 +ots -1380 +▁perfect -1381 +aff -1382 +rie -1383 +aterial -1384 +pecial -1385 +▁live -1386 +ready -1387 +fort -1388 +ten -1389 +▁govern -1390 +▁account -1391 +▁dev -1392 +▁short -1393 +ention -1394 +▁thing -1395 +ization -1396 +▁create -1397 +▁following -1398 +▁Che -1399 +▁story -1400 +ON -1401 +▁clo -1402 +▁left -1403 +book -1404 +▁const -1405 +ived -1406 +viron -1407 +▁review -1408 +▁below -1409 +▁trad -1410 +▁understand -1411 +▁hum -1412 +▁million -1413 +son -1414 +!! -1415 +▁side -1416 +itive -1417 +▁having -1418 +alf -1419 +▁Your -1420 +ored -1421 +▁After -1422 +▁hot -1423 +ohn -1424 +ows -1425 +sc -1426 +▁page -1427 +etwork -1428 +▁Med -1429 +▁Fl -1430 +▁based -1431 +▁focus -1432 +▁makes -1433 +of -1434 +▁word -1435 +AT -1436 +RE -1437 +▁research -1438 +▁move -1439 +▁writ -1440 +▁across -1441 +▁camp -1442 +▁personal -1443 +ienc -1444 +▁link -1445 +▁line -1446 +ances -1447 +▁kind -1448 +▁possible -1449 +▁cou -1450 +rop -1451 +▁ever -1452 +▁mar -1453 +▁pot -1454 +uture -1455 +ividual -1456 +▁getting -1457 +▁comes -1458 +▁already -1459 +uly -1460 +▁benef -1461 +ajor -1462 +▁elect -1463 +▁educ -1464 +vious -1465 +▁record -1466 +ured -1467 +uper -1468 +osp -1469 +▁country -1470 +▁become -1471 +▁soft -1472 +▁Rep -1473 +ination -1474 +oice -1475 +orts -1476 +▁often -1477 +▁share -1478 +▁friends -1479 +▁several -1480 +ush -1481 +▁Ass -1482 +▁done -1483 +iven -1484 +ister -1485 +▁social -1486 +▁Count -1487 +▁es -1488 +duct -1489 +▁pack -1490 +▁bit -1491 +wards -1492 +▁fund -1493 +ead -1494 +iam -1495 +▁enough -1496 +▁quick -1497 +▁mil -1498 +▁tre -1499 +ones -1500 +▁minutes -1501 +uro -1502 +▁Please -1503 +conom -1504 +fer -1505 +▁bring -1506 +▁Inst -1507 +inc -1508 +▁women -1509 +uff -1510 +▁development -1511 +▁vers -1512 +▁Serv -1513 +▁hours -1514 +▁Des -1515 +▁body -1516 +▁mult -1517 +unch -1518 +app -1519 +oose -1520 +ips -1521 +▁tell -1522 +ides -1523 +iful -1524 +▁John -1525 +vironment -1526 +▁return -1527 +▁purch -1528 +mend -1529 +▁: -1530 +aim -1531 +▁cut -1532 +▁men -1533 +ners -1534 +▁city -1535 +▁lo -1536 +arl -1537 +reet -1538 +ape -1539 +▁Intern -1540 +▁deal -1541 +▁X -1542 +oon -1543 +▁individual -1544 +AN -1545 +▁exc -1546 +▁won -1547 +ST -1548 +▁ens -1549 +▁young -1550 +ted -1551 +ateg -1552 +▁Here -1553 +▁material -1554 +▁hold -1555 +▁compet -1556 +ograph -1557 +▁sum -1558 +▁... -1559 +▁Comp -1560 +▁others -1561 +▁jo -1562 +yn -1563 +utions -1564 +▁Tw -1565 +▁started -1566 +▁called -1567 +▁industry -1568 +▁months -1569 +▁mom -1570 +▁term -1571 +▁non -1572 +▁orig -1573 +idd -1574 +ights -1575 +▁didn -1576 +ript -1577 +▁land -1578 +ee -1579 +ai -1580 +nder -1581 +▁Gu -1582 +▁walk -1583 +▁clean -1584 +▁future -1585 +▁rele -1586 +▁American -1587 +▁However -1588 +▁pie -1589 +., -1590 +▁City -1591 +▁far -1592 +▁commun -1593 +lished -1594 +ched -1595 +▁po -1596 +▁doing -1597 +▁major -1598 +ained -1599 +▁control -1600 +▁space -1601 +ource -1602 +fact -1603 +ball -1604 +urity -1605 +arr -1606 +osed -1607 +▁wa -1608 +▁low -1609 +ges -1610 +▁cover -1611 +▁Ab -1612 +▁store -1613 +anies -1614 +lement -1615 +ference -1616 +ford -1617 +▁occ -1618 +▁games -1619 +▁means -1620 +AR -1621 +lege -1622 +▁Not -1623 +▁mind -1624 +▁offers -1625 +oring -1626 +▁Tra -1627 +▁yet -1628 +▁bra -1629 +▁Dr -1630 +▁came -1631 +▁five -1632 +▁percent -1633 +▁chall -1634 +▁comb -1635 +▁Min -1636 +▁took -1637 +▁invol -1638 +▁doesn -1639 +sel -1640 +▁lim -1641 +orld -1642 +▁fore -1643 +ilities -1644 +▁* -1645 +▁customers -1646 +▁features -1647 +bal -1648 +▁State -1649 +▁least -1650 +▁strong -1651 +▁step -1652 +▁price -1653 +ches -1654 +▁heart -1655 +▁God -1656 +▁Ke -1657 +urther -1658 +▁range -1659 +▁specific -1660 +▁More -1661 +▁main -1662 +most -1663 +▁require -1664 +▁close -1665 +▁School -1666 +▁once -1667 +▁key -1668 +▁pict -1669 +sw -1670 +err -1671 +ler -1672 +▁upd -1673 +ilt -1674 +ither -1675 +▁mean -1676 +▁Bo -1677 +▁early -1678 +▁ey -1679 +▁cra -1680 +▁Jan -1681 +▁Now -1682 +▁tool -1683 +▁stay -1684 +▁discuss -1685 +▁government -1686 +illed -1687 +aces -1688 +af -1689 +▁series -1690 +▁tem -1691 +ources -1692 +▁hig -1693 +▁priv -1694 +▁Bro -1695 +▁ste -1696 +▁technology -1697 +pro -1698 +cle -1699 +▁install -1700 +▁charact -1701 +▁Im -1702 +atural -1703 +▁Ed -1704 +▁typ -1705 +▁United -1706 +▁redu -1707 +▁beautiful -1708 +atic -1709 +▁By -1710 +▁ago -1711 +▁went -1712 +▁begin -1713 +aken -1714 +// -1715 +▁announ -1716 +org -1717 +▁thought -1718 +▁Pe -1719 +▁pick -1720 +▁told -1721 +▁hope -1722 +▁appear -1723 +ancial -1724 +isk -1725 +It -1726 +resent -1727 +▁anal -1728 +▁happen -1729 +anks -1730 +rew -1731 +▁Gr -1732 +▁Em -1733 +irm -1734 +▁break -1735 +ille -1736 +▁wind -1737 +▁questions -1738 +resh -1739 +OR -1740 +▁York -1741 +▁x -1742 +▁Qu -1743 +come -1744 +▁Pre -1745 +▁content -1746 +▁certain -1747 +▁Add -1748 +oll -1749 +▁everything -1750 +▁prep -1751 +ourn -1752 +hers -1753 +:// -1754 +▁sn -1755 +ians -1756 +irt -1757 +gle -1758 +▁field -1759 +▁companies -1760 +▁travel -1761 +ony -1762 +▁Cal -1763 +▁enc -1764 +▁recom -1765 +▁single -1766 +▁known -1767 +▁added -1768 +▁favor -1769 +▁media -1770 +▁-- -1771 +cell -1772 +▁building -1773 +arning -1774 +▁manag -1775 +▁Park -1776 +aps -1777 +▁search -1778 +▁environment -1779 +▁friend -1780 +▁actually -1781 +aur -1782 +▁address -1783 +ief -1784 +▁tot -1785 +▁ener -1786 +de -1787 +▁study -1788 +▁mess -1789 +eral -1790 +▁vol -1791 +▁tax -1792 +▁press -1793 +▁problem -1794 +play -1795 +isc -1796 +▁later -1797 +▁connect -1798 +ino -1799 +▁works -1800 +ests -1801 +▁Sm -1802 +▁girl -1803 +icy -1804 +▁improve -1805 +gest -1806 +acy -1807 +ibr -1808 +▁taking -1809 +ew -1810 +▁South -1811 +▁ident -1812 +▁maint -1813 +▁sound -1814 +▁pub -1815 +ental -1816 +year -1817 +lebr -1818 +ural -1819 +▁Su -1820 +▁track -1821 +ided -1822 +▁training -1823 +▁watch -1824 +▁results -1825 +ster -1826 +▁staff -1827 +▁card -1828 +▁wond -1829 +abor -1830 +▁North -1831 +▁face -1832 +back -1833 +▁professional -1834 +nes -1835 +ensive -1836 +▁Mc -1837 +▁Just -1838 +ocu -1839 +gs -1840 +ES -1841 +▁film -1842 +▁provides -1843 +wh -1844 +atest -1845 +yl -1846 +▁seen -1847 +▁While -1848 +▁issues -1849 +▁someone -1850 +ama -1851 +▁Per -1852 +▁unique -1853 +▁host -1854 +▁half -1855 +▁front -1856 +▁official -1857 +cer -1858 +▁Euro -1859 +fully -1860 +▁near -1861 +opy -1862 +▁econom -1863 +▁relations -1864 +▁web -1865 +▁sell -1866 +▁particular -1867 +▁National -1868 +▁County -1869 +▁everyone -1870 +▁miss -1871 +▁port -1872 +AL -1873 +▁dig -1874 +urch -1875 +▁due -1876 +▁Aust -1877 +▁Some -1878 +go -1879 +▁recommend -1880 +▁network -1881 +hod -1882 +▁cook -1883 +▁Center -1884 +▁Don -1885 +lex -1886 +▁cred -1887 +▁office -1888 +▁respons -1889 +▁z -1890 +ued -1891 +▁Inc -1892 +▁Oct -1893 +▁simple -1894 +itted -1895 +▁Part -1896 +▁age -1897 +▁ant -1898 +ctor -1899 +ibility -1900 +▁aud -1901 +▁management -1902 +ging -1903 +▁click -1904 +not -1905 +roll -1906 +▁oil -1907 +▁Pol -1908 +▁particip -1909 +time -1910 +▁Dep -1911 +asing -1912 +▁whole -1913 +pecially -1914 +▁mot -1915 +▁bar -1916 +obile -1917 +iod -1918 +▁Acc -1919 +▁Pres -1920 +▁performance -1921 +▁areas -1922 +▁Apr -1923 +▁mor -1924 +▁ess -1925 +pper -1926 +▁fall -1927 +▁author -1928 +cing -1929 +▁given -1930 +ply -1931 +imate -1932 +▁bed -1933 +▁World -1934 +icult -1935 +nding -1936 +▁above -1937 +▁reason -1938 +▁protect -1939 +ites -1940 +▁events -1941 +In -1942 +ators -1943 +aining -1944 +▁among -1945 +▁eff -1946 +ables -1947 +umb -1948 +▁Will -1949 +ops -1950 +▁experienc -1951 +ask -1952 +▁Sec -1953 +▁history -1954 +EN -1955 +▁select -1956 +▁Stud -1957 +omes -1958 +▁black -1959 +ogn -1960 +ED -1961 +▁assist -1962 +▁size -1963 +▁energy -1964 +▁foot -1965 +ison -1966 +cy -1967 +ili -1968 +▁High -1969 +▁details -1970 +▁print -1971 +ledge -1972 +▁htt -1973 +▁Reg -1974 +▁glo -1975 +▁believe -1976 +▁flo -1977 +▁sex -1978 +crib -1979 +▁further -1980 +▁From -1981 +▁amount -1982 +▁Post -1983 +▁six -1984 +▁log -1985 +idence -1986 +ety -1987 +ulation -1988 +▁designed -1989 +▁includes -1990 +▁prob -1991 +▁Friday -1992 +astic -1993 +▁pain -1994 +ands -1995 +vert -1996 +▁cult -1997 +ufact -1998 +▁points -1999 +▁repl -2000 +▁parent -2001 +▁mag -2002 +▁red -2003 +▁Day -2004 +▁property -2005 +AS -2006 +▁Ge -2007 +ruction -2008 +▁Bar -2009 +▁continue -2010 +▁soon -2011 +nov -2012 +▁feature -2013 +▁Aug -2014 +▁value -2015 +urance -2016 +▁et -2017 +▁Mr -2018 +▁Europe -2019 +▁anything -2020 +▁text -2021 +▁various -2022 +itch -2023 +▁coming -2024 +▁question -2025 +▁popular -2026 +▁latest -2027 +itional -2028 +▁according -2029 +aily -2030 +▁lov -2031 +▁living -2032 +rodu -2033 +▁phys -2034 +▁forward -2035 +▁type -2036 +my -2037 +▁fre -2038 +uation -2039 +▁March -2040 +▁phone -2041 +itc -2042 +ouch -2043 +▁consider -2044 +cript -2045 +▁pret -2046 +▁whether -2047 +aturday -2048 +IC -2049 +IT -2050 +▁brand -2051 +▁entire -2052 +▁idea -2053 +ze -2054 +though -2055 +▁claim -2056 +▁white -2057 +edd -2058 +aching -2059 +▁celebr -2060 +▁weeks -2061 +▁gra -2062 +▁dou -2063 +▁needed -2064 +▁Bu -2065 +▁diff -2066 +▁consum -2067 +▁potential -2068 +▁opportunity -2069 +▁comput -2070 +▁deb -2071 +▁El -2072 +▁color -2073 +elt -2074 +▁taken -2075 +▁Us -2076 +▁June -2077 +▁wide -2078 +▁required -2079 +▁receive -2080 +▁par -2081 +▁date -2082 +▁Sept -2083 +▁extra -2084 +selves -2085 +▁Sund -2086 +ung -2087 +itter -2088 +▁docu -2089 +new -2090 +▁third -2091 +▁example -2092 +AC -2093 +▁relationship -2094 +▁safe -2095 +ival -2096 +▁bad -2097 +▁sent -2098 +▁ensure -2099 +This -2100 +itor -2101 +ises -2102 +▁ready -2103 +▁inj -2104 +▁Off -2105 +▁West -2106 +▁, -2107 +▁comfort -2108 +▁currently -2109 +ilar -2110 +amer -2111 +▁meas -2112 +ees -2113 +ires -2114 +▁financial -2115 +▁common -2116 +▁almost -2117 +ffe -2118 +▁sugg -2119 +▁fire -2120 +head -2121 +▁ach -2122 +▁April -2123 +val -2124 +uary -2125 +▁ways -2126 +▁human -2127 +▁kids -2128 +▁Read -2129 +▁Art -2130 +▁pretty -2131 +▁period -2132 +▁quite -2133 +▁Jo -2134 +▁options -2135 +▁final -2136 +▁skin -2137 +▁natural -2138 +▁yourself -2139 +▁especially -2140 +▁veh -2141 +irc -2142 +▁road -2143 +▁style -2144 +▁trying -2145 +▁park -2146 +▁sho -2147 +▁box -2148 +▁Health -2149 +▁Cor -2150 +ring -2151 +▁items -2152 +▁His -2153 +▁answ -2154 +▁paper -2155 +used -2156 +▁member -2157 +▁provided -2158 +▁either -2159 +ese -2160 +ana -2161 +ively -2162 +.... -2163 +▁Saturday -2164 +itting -2165 +onday -2166 +▁coll -2167 +▁engine -2168 +▁choose -2169 +▁hon -2170 +▁self -2171 +▁crit -2172 +▁held -2173 +▁throughout -2174 +▁happy -2175 +▁dam -2176 +▁fit -2177 +▁download -2178 +▁via -2179 +▁swe -2180 +▁attend -2181 +▁wanted -2182 +▁flow -2183 +▁clients -2184 +▁stra -2185 +ication -2186 +▁summer -2187 +▁Pa -2188 +▁recent -2189 +▁Fin -2190 +▁impact -2191 +▁Aut -2192 +▁users -2193 +ada -2194 +▁created -2195 +▁sales -2196 +▁tit -2197 +▁Af -2198 +icro -2199 +▁July -2200 +azing -2201 +▁blog -2202 +▁issue -2203 +▁previous -2204 +▁behind -2205 +▁takes -2206 +arter -2207 +oogle -2208 +▁recently -2209 +hel -2210 +▁TH -2211 +▁software -2212 +▁Dav -2213 +angu -2214 +gress -2215 +IS -2216 +do -2217 +▁init -2218 +cast -2219 +ams -2220 +ux -2221 +▁version -2222 +▁super -2223 +▁Get -2224 +▁Feb -2225 +ried -2226 +▁bott -2227 +▁seem -2228 +▁Up -2229 +▁couple -2230 +▁song -2231 +▁running -2232 +▁insp -2233 +▁hol -2234 +verage -2235 +ume -2236 +ober -2237 +▁clear -2238 +▁collect -2239 +▁problems -2240 +ades -2241 +apt -2242 +▁isn -2243 +▁education -2244 +▁received -2245 +▁method -2246 +oura -2247 +▁table -2248 +▁players -2249 +▁role -2250 +▁represent -2251 +▁reading -2252 +▁Val -2253 +uge -2254 +▁Direct -2255 +eth -2256 +▁Int -2257 +anced -2258 +itten -2259 +▁signific -2260 +atform -2261 +▁likely -2262 +eke -2263 +ole -2264 +earch -2265 +ification -2266 +▁Sw -2267 +par -2268 +▁shows -2269 +▁di -2270 +where -2271 +▁security -2272 +▁increase -2273 +▁accom -2274 +▁States -2275 +▁Mon -2276 +▁favorite -2277 +▁customer -2278 +▁stri -2279 +▁pan -2280 +▁party -2281 +reme -2282 +▁action -2283 +▁skills -2284 +▁regular -2285 +St -2286 +▁difficult -2287 +▁fast -2288 +▁simply -2289 +idge -2290 +OU -2291 +▁sle -2292 +▁else -2293 +▁Face -2294 +▁writing -2295 +▁ele -2296 +▁nice -2297 +aging -2298 +▁Sunday -2299 +▁Monday -2300 +oud -2301 +oid -2302 +▁position -2303 +overed -2304 +▁article -2305 +▁outside -2306 +▁original -2307 +▁Her -2308 +▁probably -2309 +▁cool -2310 +icles -2311 +aving -2312 +mit -2313 +▁cup -2314 +▁necess -2315 +▁inside -2316 +▁fresh -2317 +ID -2318 +istration -2319 +▁asked -2320 +▁wonder -2321 +▁goal -2322 +▁systems -2323 +.) -2324 +▁manufact -2325 +arth -2326 +aby -2327 +▁model -2328 +-- -2329 +▁House -2330 +li -2331 +▁morning -2332 +▁ground -2333 +▁President -2334 +icated -2335 +▁application -2336 +▁leave -2337 +ham -2338 +eter -2339 +▁ful -2340 +▁learning -2341 +▁anim -2342 +uit -2343 +aker -2344 +▁Associ -2345 +▁risk -2346 +▁Act -2347 +▁Black -2348 +▁knowledge -2349 +▁located -2350 +based -2351 +▁contrib -2352 +▁UK -2353 +▁release -2354 +▁projects -2355 +▁lives -2356 +▁changes -2357 +▁tour -2358 +▁Are -2359 +▁Bus -2360 +▁however -2361 +ox -2362 +▁Free -2363 +▁treatment -2364 +▁stop -2365 +medi -2366 +face -2367 +right -2368 +▁Austral -2369 +▁exist -2370 +▁mix -2371 +▁recogn -2372 +▁additional -2373 +▁polit -2374 +adem -2375 +▁Red -2376 +▁activities -2377 +▁private -2378 +▁abs -2379 +▁sat -2380 +▁career -2381 +iple -2382 +name -2383 +▁board -2384 +▁medical -2385 +▁Work -2386 +▁total -2387 +▁Mich -2388 +▁cal -2389 +▁anyone -2390 +▁hit -2391 +▁etc -2392 +artment -2393 +▁fail -2394 +▁ple -2395 +▁TV -2396 +▁accept -2397 +urg -2398 +▁town -2399 +▁Soc -2400 +ague -2401 +▁base -2402 +arget -2403 +aign -2404 +amed -2405 +bor -2406 +OT -2407 +hib -2408 +▁mark -2409 +▁former -2410 +▁contract -2411 +▁matter -2412 +▁included -2413 +▁America -2414 +ming -2415 +ounc -2416 +ules -2417 +▁mach -2418 +ession -2419 +▁Sal -2420 +iol -2421 +▁stock -2422 +▁match -2423 +▁autom -2424 +▁words -2425 +▁significant -2426 +izing -2427 +▁hair -2428 +ipment -2429 +▁saf -2430 +ecut -2431 +▁Ser -2432 +▁meeting -2433 +wood -2434 +▁Of -2435 +▁October -2436 +▁books -2437 +▁September -2438 +ovember -2439 +▁growth -2440 +▁Ac -2441 +▁playing -2442 +▁January -2443 +aced -2444 +▁leaders -2445 +empt -2446 +▁ball -2447 +▁worth -2448 +mon -2449 +irth -2450 +▁round -2451 +▁longer -2452 +▁drive -2453 +▁hy -2454 +▁character -2455 +▁variety -2456 +ny -2457 +▁concern -2458 +▁News -2459 +▁First -2460 +▁practice -2461 +ester -2462 +▁production -2463 +che -2464 +▁function -2465 +▁Sk -2466 +▁Wed -2467 +rict -2468 +▁looks -2469 +▁squ -2470 +ground -2471 +▁exam -2472 +▁late -2473 +reg -2474 +▁San -2475 +ude -2476 +▁lay -2477 +airs -2478 +▁Every -2479 +▁wall -2480 +mercial -2481 +pm -2482 +iff -2483 +▁sun -2484 +ursday -2485 +▁defin -2486 +adu -2487 +▁determ -2488 +na -2489 +▁Ag -2490 +▁August -2491 +▁suggest -2492 +ci -2493 +▁Har -2494 +elcome -2495 +▁worked -2496 +▁weeke -2497 +▁fig -2498 +ville -2499 +▁associ -2500 +uesday -2501 +▁Google -2502 +▁programs -2503 +▁death -2504 +imum -2505 +▁chance -2506 +▁platform -2507 +▁cand -2508 +▁screen -2509 +▁international -2510 +▁Then -2511 +iddle -2512 +▁Let -2513 +ipping -2514 +cks -2515 +rect -2516 +▁deg -2517 +▁true -2518 +▁Dis -2519 +▁nothing -2520 +Wh -2521 +▁challeng -2522 +itchen -2523 +▁loss -2524 +▁general -2525 +▁clos -2526 +▁rather -2527 +▁plans -2528 +arden -2529 +▁Facebook -2530 +▁purchase -2531 +▁estab -2532 +erc -2533 +▁amazing -2534 +▁credit -2535 +▁leading -2536 +▁subject -2537 +▁Department -2538 +▁regard -2539 +▁stat -2540 +cember -2541 +▁allows -2542 +ouncil -2543 +▁seems -2544 +olution -2545 +eds -2546 +▁built -2547 +▁arri -2548 +▁police -2549 +mas -2550 +▁similar -2551 +▁Mus -2552 +▁student -2553 +▁Sim -2554 +▁usually -2555 +▁infl -2556 +▁Pat -2557 +▁rate -2558 +▁quickly -2559 +▁Air -2560 +oke -2561 +▁November -2562 +▁teac -2563 +▁Also -2564 +lin -2565 +AM -2566 +▁Street -2567 +▁draw -2568 +▁national -2569 +ashing -2570 +▁touch -2571 +ought -2572 +▁providing -2573 +▁comment -2574 +▁International -2575 +oph -2576 +light -2577 +▁excell -2578 +▁deep -2579 +nesday -2580 +▁apply -2581 +▁higher -2582 +iter -2583 +iber -2584 +▁choice -2585 +▁photos -2586 +clus -2587 +▁Group -2588 +str -2589 +gar -2590 +▁tast -2591 +ING -2592 +▁respect -2593 +off -2594 +▁collection -2595 +▁safety -2596 +▁image -2597 +▁Out -2598 +▁Cons -2599 +now -2600 +▁hands -2601 +▁marketing -2602 +▁prior -2603 +ondon -2604 +▁ideas -2605 +▁integr -2606 +▁moment -2607 +▁movie -2608 +▁sil -2609 +▁encoura -2610 +▁easily -2611 +▁decision -2612 +example -2613 +▁ut -2614 +▁Cour -2615 +▁location -2616 +▁cell -2617 +▁bal -2618 +▁inde -2619 +▁dom -2620 +hern -2621 +▁rad -2622 +▁prevent -2623 +▁court -2624 +▁af -2625 +▁bud -2626 +▁Wind -2627 +▁op -2628 +▁released -2629 +▁decided -2630 +▁mass -2631 +▁ill -2632 +▁commit -2633 +▁Thursday -2634 +ached -2635 +▁digital -2636 +▁Home -2637 +put -2638 +▁Tuesday -2639 +ournal -2640 +▁emb -2641 +ha -2642 +▁reported -2643 +▁Well -2644 +▁benefits -2645 +▁Calif -2646 +▁file -2647 +ivery -2648 +▁exact -2649 +▁seek -2650 +▁December -2651 +▁introdu -2652 +▁wood -2653 +amb -2654 +▁La -2655 +▁cannot -2656 +ma -2657 +eal -2658 +▁campaign -2659 +▁lost -2660 +reng -2661 +▁display -2662 +▁Most -2663 +▁daily -2664 +▁partners -2665 +▁parents -2666 +▁ord -2667 +▁attack -2668 +▁Business -2669 +ishing -2670 +idents -2671 +hood -2672 +▁involved -2673 +▁agree -2674 +▁announced -2675 +▁cause -2676 +▁sche -2677 +▁effic -2678 +rown -2679 +▁sens -2680 +ructure -2681 +▁Gl -2682 +unities -2683 +▁drink -2684 +▁piece -2685 +▁center -2686 +▁Ang -2687 +ray -2688 +ospital -2689 +▁neg -2690 +atory -2691 +▁user -2692 +▁dest -2693 +OM -2694 +▁related -2695 +▁saw -2696 +▁Any -2697 +▁affect -2698 +▁expected -2699 +▁vict -2700 +ipe -2701 +▁Design -2702 +▁investig -2703 +▁ability -2704 +▁club -2705 +ederal -2706 +▁patients -2707 +▁Wednesday -2708 +▁ep -2709 +▁London -2710 +▁Click -2711 +ruary -2712 +EO -2713 +avy -2714 +▁rout -2715 +▁send -2716 +illing -2717 +▁ri -2718 +▁save -2719 +▁tick -2720 +ilies -2721 +▁modern -2722 +▁norm -2723 +just -2724 +ET -2725 +▁weekend -2726 +▁mobile -2727 +▁circ -2728 +sp -2729 +▁standard -2730 +▁langu -2731 +▁Prof -2732 +▁expert -2733 +▁option -2734 +ett -2735 +▁goes -2736 +▁boy -2737 +▁ded -2738 +▁immedi -2739 +▁green -2740 +▁enter -2741 +▁restaur -2742 +▁computer -2743 +▁Over -2744 +▁fight -2745 +▁War -2746 +▁aw -2747 +▁woman -2748 +▁bag -2749 +▁global -2750 +▁pers -2751 +istic -2752 +board -2753 +lim -2754 +▁target -2755 +▁mother -2756 +ivity -2757 +▁iP -2758 +▁emer -2759 +uel -2760 +▁sym -2761 +▁College -2762 +like -2763 +iring -2764 +▁serious -2765 +▁innov -2766 +▁parts -2767 +▁helps -2768 +▁huge -2769 +▁PM -2770 +▁costs -2771 +▁English -2772 +key -2773 +asons -2774 +oday -2775 +aves -2776 +▁gen -2777 +▁Check -2778 +zz -2779 +ellow -2780 +▁surpr -2781 +▁weight -2782 +▁http -2783 +▁earn -2784 +enge -2785 +uk -2786 +erve -2787 +▁rights -2788 +ara -2789 +▁bank -2790 +▁ones -2791 +ornia -2792 +▁legal -2793 +▁code -2794 +▁solutions -2795 +▁request -2796 +▁equipment -2797 +▁Sen -2798 +▁myself -2799 +▁gives -2800 +▁tools -2801 +▁Afric -2802 +▁warm -2803 +▁arch -2804 +▁Other -2805 +▁insurance -2806 +cription -2807 +raft -2808 +band -2809 +▁Del -2810 +ram -2811 +edding -2812 +▁feed -2813 +▁Hol -2814 +EC -2815 +▁approach -2816 +ault -2817 +▁conditions -2818 +▁played -2819 +▁giving -2820 +▁admin -2821 +▁dress -2822 +▁Ob -2823 +▁Techn -2824 +pri -2825 +▁Book -2826 +attle -2827 +▁attention -2828 +▁roll -2829 +OS -2830 +▁levels -2831 +▁sus -2832 +▁sett -2833 +▁resources -2834 +unt -2835 +▁award -2836 +▁Par -2837 +▁Brit -2838 +▁prim -2839 +hold -2840 +▁deliver -2841 +▁trust -2842 +ension -2843 +iction -2844 +atives -2845 +▁Service -2846 +▁note -2847 +▁sold -2848 +aged -2849 +bert -2850 +▁qual -2851 +▁remember -2852 +▁policy -2853 +▁February -2854 +▁interested -2855 +erous -2856 +▁Play -2857 +▁solution -2858 +▁door -2859 +▁Trans -2860 +▁businesses -2861 +▁capt -2862 +▁gets -2863 +▁planning -2864 +▁subs -2865 +▁highly -2866 +▁lab -2867 +aught -2868 +▁object -2869 +iding -2870 +pose -2871 +▁starting -2872 +▁opp -2873 +▁cases -2874 +partment -2875 +▁Law -2876 +ysis -2877 +▁Christmas -2878 +akers -2879 +▁lower -2880 +▁upon -2881 +▁instead -2882 +▁vac -2883 +▁write -2884 +▁hear -2885 +▁organization -2886 +▁materials -2887 +vey -2888 +▁express -2889 +▁themselves -2890 +▁published -2891 +EL -2892 +irit -2893 +▁California -2894 +ening -2895 +▁president -2896 +▁source -2897 +ica -2898 +▁reach -2899 +▁Gener -2900 +▁plant -2901 +▁condition -2902 +ples -2903 +mission -2904 +ashion -2905 +orge -2906 +urt -2907 +▁sense -2908 +▁fine -2909 +▁streng -2910 +apan -2911 +ibrary -2912 +www -2913 +▁dry -2914 +izes -2915 +▁effective -2916 +▁firm -2917 +▁sale -2918 +bum -2919 +▁mid -2920 +▁photo -2921 +▁written -2922 +▁types -2923 +AP -2924 +▁dise -2925 +▁average -2926 +▁interview -2927 +rup -2928 +urb -2929 +rom -2930 +▁consult -2931 +▁AM -2932 +▁Go -2933 +▁countries -2934 +▁Met -2935 +▁positive -2936 +ule -2937 +▁remov -2938 +▁multiple -2939 +wide -2940 +▁Rem -2941 +▁Services -2942 +iles -2943 +ida -2944 +gu -2945 +ael -2946 +▁lif -2947 +arant -2948 +▁Great -2949 +▁join -2950 +mm -2951 +▁Je -2952 +enty -2953 +unk -2954 +▁slow -2955 +▁Spe -2956 +▁India -2957 +▁trip -2958 +▁describ -2959 +ube -2960 +aches -2961 +ength -2962 +▁began -2963 +ato -2964 +▁interesting -2965 +▁imm -2966 +▁Mod -2967 +▁images -2968 +▁answer -2969 +▁prem -2970 +▁player -2971 +▁cat -2972 +add -2973 +▁viol -2974 +▁opportunities -2975 +urer -2976 +▁message -2977 +▁Cle -2978 +▁employees -2979 +▁dream -2980 +ography -2981 +▁heat -2982 +▁healthy -2983 +ager -2984 +▁Sch -2985 +▁Why -2986 +▁Thanks -2987 +▁sites -2988 +ration -2989 +▁directly -2990 +▁camer -2991 +▁hour -2992 +▁item -2993 +rel -2994 +rought -2995 +▁document -2996 +▁fans -2997 +▁According -2998 +bit -2999 +orage -3000 +press -3001 +▁necessary -3002 +itute -3003 +▁picture -3004 +▁achieve -3005 +▁David -3006 +IL -3007 +▁copy -3008 +▁Hot -3009 +▁Av -3010 +▁Program -3011 +▁essential -3012 +▁completely -3013 +▁lic -3014 +▁Sub -3015 +▁gift -3016 +▁Once -3017 +▁tele -3018 +▁band -3019 +▁families -3020 +▁stories -3021 +sy -3022 +▁prices -3023 +▁groups -3024 +duc -3025 +▁Year -3026 +olf -3027 +▁Phot -3028 +▁commercial -3029 +▁King -3030 +arlier -3031 +▁Rec -3032 +▁Whe -3033 +▁Found -3034 +▁Since -3035 +▁reve -3036 +elling -3037 +▁offe -3038 +▁goals -3039 +ocol -3040 +▁excellent -3041 +▁div -3042 +▁cert -3043 +▁East -3044 +▁Cr -3045 +▁promot -3046 +▁dru -3047 +▁Even -3048 +▁pull -3049 +▁successful -3050 +▁eye -3051 +▁Market -3052 +▁fully -3053 +▁www -3054 +▁growing -3055 +ares -3056 +itely -3057 +▁Mag -3058 +▁hor -3059 +▁led -3060 +▁itself -3061 +itation -3062 +▁Many -3063 +▁Loc -3064 +▁creating -3065 +▁fix -3066 +▁stru -3067 +iant -3068 +▁except -3069 +▁adult -3070 +▁traditional -3071 +▁White -3072 +▁comments -3073 +▁gold -3074 +▁paint -3075 +▁separ -3076 +oul -3077 +erved -3078 +▁Good -3079 +▁fab -3080 +▁aim -3081 +coming -3082 +▁neigh -3083 +▁broad -3084 +▁Germ -3085 +▁Russ -3086 +mb -3087 +▁Green -3088 +ancy -3089 +iable -3090 +▁birth -3091 +onse -3092 +▁propos -3093 +omen -3094 +▁fair -3095 +▁cy -3096 +ooth -3097 +▁gar -3098 +▁device -3099 +BC -3100 +▁reports -3101 +uses -3102 +anch -3103 +▁Best -3104 +▁block -3105 +▁mount -3106 +▁teams -3107 +▁terms -3108 +▁kitchen -3109 +▁cross -3110 +oms -3111 +udd -3112 +▁Spr -3113 +▁stuff -3114 +tee -3115 +▁extreme -3116 +▁dark -3117 +ffee -3118 +▁vehicle -3119 +▁Last -3120 +▁Jack -3121 +▁attempt -3122 +▁Each -3123 +▁glass -3124 +urning -3125 +▁wasn -3126 +▁applications -3127 +ores -3128 +venue -3129 +▁hop -3130 +▁saying -3131 +▁floor -3132 +hest -3133 +▁wrong -3134 +ey -3135 +▁baby -3136 +imately -3137 +▁Tex -3138 +▁dead -3139 +ties -3140 +uth -3141 +▁Bra -3142 +▁China -3143 +▁thinking -3144 +▁Port -3145 +▁rev -3146 +▁depend -3147 +▁shoot -3148 +▁Web -3149 +▁Ty -3150 +inner -3151 +ipped -3152 +▁blood -3153 +ashington -3154 +ecutive -3155 +▁bi -3156 +ald -3157 +oming -3158 +▁Twitter -3159 +▁Develop -3160 +OL -3161 +istry -3162 +▁mention -3163 +▁See -3164 +TM -3165 +”. -3166 +▁gave -3167 +▁Japan -3168 +aughter -3169 +▁Hall -3170 +▁smart -3171 +▁System -3172 +▁wait -3173 +inary -3174 +▁implement -3175 +pite -3176 +▁obs -3177 +rote -3178 +▁profession -3179 +▁speed -3180 +▁aware -3181 +▁serve -3182 +▁spend -3183 +▁attract -3184 +▁director -3185 +▁organiz -3186 +▁Bel -3187 +▁offering -3188 +iced -3189 +▁section -3190 +▁sen -3191 +▁budget -3192 +▁Association -3193 +▁became -3194 +▁farm -3195 +aries -3196 +ological -3197 +▁impress -3198 +▁distrib -3199 +Ch -3200 +rows -3201 +▁Office -3202 +▁ge -3203 +▁Mor -3204 +▁pictures -3205 +▁nation -3206 +▁college -3207 +▁wish -3208 +AD -3209 +▁Pri -3210 +▁correct -3211 +▁Sol -3212 +field -3213 +overn -3214 +▁Make -3215 +▁suit -3216 +▁IN -3217 +▁effort -3218 +▁Mem -3219 +▁developed -3220 +▁places -3221 +▁moving -3222 +▁conduct -3223 +▁coun -3224 +▁tal -3225 +▁carry -3226 +▁dog -3227 +▁limited -3228 +▁individuals -3229 +▁advice -3230 +ils -3231 +▁dro -3232 +vest -3233 +▁son -3234 +pre -3235 +▁rent -3236 +▁avoid -3237 +▁spent -3238 +yond -3239 +ications -3240 +zy -3241 +▁complex -3242 +▁Paul -3243 +▁defe -3244 +lock -3245 +▁bath -3246 +▁title -3247 +▁sleep -3248 +▁situation -3249 +▁Down -3250 +▁Road -3251 +idered -3252 +▁requirements -3253 +▁album -3254 +▁progress -3255 +▁delivery -3256 +ceed -3257 +▁Today -3258 +▁jud -3259 +▁Washington -3260 +▁cas -3261 +▁Vis -3262 +▁Educ -3263 +▁Inter -3264 +▁vot -3265 +▁construction -3266 +rench -3267 +riend -3268 +▁enh -3269 +▁Public -3270 +ibly -3271 +▁About -3272 +house -3273 +haps -3274 +▁ble -3275 +word -3276 +▁Canada -3277 +▁advant -3278 +▁wants -3279 +▁Top -3280 +▁statement -3281 +▁feet -3282 +▁Use -3283 +▁schools -3284 +▁Gold -3285 +▁war -3286 +down -3287 +▁race -3288 +useum -3289 +▁heard -3290 +▁convers -3291 +▁eat -3292 +▁Find -3293 +US -3294 +▁sometimes -3295 +▁sweet -3296 +▁Director -3297 +▁AN -3298 +▁nut -3299 +▁stress -3300 +▁billion -3301 +reci -3302 +▁Lear -3303 +▁quarter -3304 +▁physical -3305 +▁felt -3306 +ancing -3307 +▁hous -3308 +PS -3309 +▁Indian -3310 +▁hotel -3311 +▁Mac -3312 +itary -3313 +▁towards -3314 +▁consist -3315 +▁stage -3316 +▁spot -3317 +▁annual -3318 +▁shop -3319 +▁shot -3320 +▁strateg -3321 +▁Flor -3322 +▁wonderful -3323 +ports -3324 +porate -3325 +▁Open -3326 +▁loved -3327 +▁region -3328 +▁ing -3329 +▁path -3330 +▁Dem -3331 +▁feeling -3332 +▁owners -3333 +▁finish -3334 +▁ver -3335 +▁Pal -3336 +▁THE -3337 +▁aff -3338 +unte -3339 +▁mat -3340 +ari -3341 +▁eyes -3342 +▁pattern -3343 +▁Council -3344 +▁finally -3345 +isions -3346 +▁lik -3347 +ctions -3348 +▁ten -3349 +▁brought -3350 +ION -3351 +▁Texas -3352 +▁language -3353 +▁wife -3354 +▁Care -3355 +▁pet -3356 +▁interact -3357 +▁partner -3358 +▁sports -3359 +▁straight -3360 +rast -3361 +▁inform -3362 +▁Dan -3363 +▁nature -3364 +ads -3365 +▁investment -3366 +▁Club -3367 +roid -3368 +▁respond -3369 +▁concept -3370 +▁nearly -3371 +owl -3372 +dule -3373 +▁helping -3374 +▁Hel -3375 +▁Class -3376 +▁exerc -3377 +▁overall -3378 +▁star -3379 +▁Bre -3380 +▁categ -3381 +▁weather -3382 +▁ult -3383 +▁Apple -3384 +▁max -3385 +▁tried -3386 +▁guide -3387 +▁blue -3388 +▁William -3389 +end -3390 +▁temper -3391 +estival -3392 +▁pow -3393 +▁collabor -3394 +▁largest -3395 +▁Court -3396 +". -3397 +ened -3398 +▁demand -3399 +▁charge -3400 +▁independ -3401 +▁client -3402 +hips -3403 +▁Board -3404 +As -3405 +▁rock -3406 +▁Time -3407 +itect -3408 +ourney -3409 +▁wear -3410 +change -3411 +▁Oh -3412 +ament -3413 +▁pred -3414 +He -3415 +▁advert -3416 +▁definitely -3417 +mitted -3418 +▁appoint -3419 +▁wrote -3420 +▁candid -3421 +▁activity -3422 +▁gas -3423 +▁seven -3424 +▁Windows -3425 +rences -3426 +▁Ann -3427 +▁Ir -3428 +▁cold -3429 +rig -3430 +aly -3431 +▁benefit -3432 +ago -3433 +▁Internet -3434 +▁offered -3435 +inger -3436 +roud -3437 +asc -3438 +▁Australia -3439 +yd -3440 +▁acqu -3441 +▁influ -3442 +▁response -3443 +▁turned -3444 +▁Ant -3445 +wise -3446 +▁double -3447 +▁miles -3448 +▁Review -3449 +▁pieces -3450 +▁uses -3451 +▁Tom -3452 +last -3453 +ounds -3454 +▁earlier -3455 +▁devices -3456 +▁Fam -3457 +▁internet -3458 +uted -3459 +▁beginning -3460 +▁thous -3461 +ned -3462 +▁considered -3463 +▁ahead -3464 +lies -3465 +▁altern -3466 +▁appreci -3467 +ails -3468 +▁grand -3469 +▁reduce -3470 +▁exactly -3471 +▁Adv -3472 +▁histor -3473 +▁View -3474 +▁prec -3475 +▁Research -3476 +▁James -3477 +bon -3478 +▁wedding -3479 +▁active -3480 +▁homes -3481 +▁imag -3482 +▁entertain -3483 +arc -3484 +▁Michael -3485 +▁paid -3486 +ategy -3487 +▁doll -3488 +ustain -3489 +▁transport -3490 +▁difference -3491 +▁belie -3492 +▁Thank -3493 +icks -3494 +olute -3495 +▁political -3496 +▁IT -3497 +▁regul -3498 +▁challenge -3499 +▁served -3500 +▁supply -3501 +▁cho -3502 +more -3503 +▁surround -3504 +ampions -3505 +▁Micro -3506 +▁finished -3507 +▁Rich -3508 +▁Have -3509 +icate -3510 +OV -3511 +▁Big -3512 +umn -3513 +ading -3514 +You -3515 +agn -3516 +▁Rel -3517 +▁cash -3518 +▁Look -3519 +▁creative -3520 +cause -3521 +▁eight -3522 +estern -3523 +ston -3524 +▁understanding -3525 +▁retail -3526 +▁replace -3527 +▁Govern -3528 +icip -3529 +▁states -3530 +LE -3531 +ying -3532 +:|| -3533 +▁Cur -3534 +▁Mark -3535 +▁rates -3536 +orrow -3537 +mod -3538 +▁culture -3539 +▁Char -3540 +antly -3541 +ky -3542 +vin -3543 +oly -3544 +▁European -3545 +▁Super -3546 +▁lots -3547 +▁guarant -3548 +▁easier -3549 +▁experienced -3550 +▁ST -3551 +▁afford -3552 +▁Call -3553 +box -3554 +▁pages -3555 +▁Life -3556 +▁hus -3557 +dd -3558 +▁bottom -3559 +place -3560 +▁expand -3561 +iny -3562 +▁truly -3563 +sec -3564 +▁father -3565 +▁pressure -3566 +▁maybe -3567 +▁flav -3568 +hens -3569 +▁economic -3570 +ales -3571 +▁thank -3572 +▁reflect -3573 +inated -3574 +▁machine -3575 +ses -3576 +▁Company -3577 +error -3578 +rial -3579 +▁analysis -3580 +amic -3581 +icious -3582 +▁fat -3583 +▁IS -3584 +▁immediately -3585 +▁emot -3586 +▁named -3587 +alt -3588 +aled -3589 +▁gradu -3590 +▁numbers -3591 +sych -3592 +het -3593 +▁tom -3594 +▁Child -3595 +▁Det -3596 +▁Angel -3597 +▁demon -3598 +▁girls -3599 +▁exhib -3600 +rey -3601 +▁prot -3602 +▁comfortable -3603 +IP -3604 +erry -3605 +pa -3606 +▁assess -3607 +▁posted -3608 +▁satis -3609 +nown -3610 +▁degree -3611 +▁tips -3612 +chan -3613 +▁helped -3614 +▁damage -3615 +ivil -3616 +▁Ev -3617 +▁opening -3618 +▁Management -3619 +▁garden -3620 +▁dating -3621 +▁Bank -3622 +▁videos -3623 +▁contain -3624 +▁obt -3625 +▁wild -3626 +▁PC -3627 +ronic -3628 +care -3629 +▁storage -3630 +▁Bay -3631 +▁Ret -3632 +▁speak -3633 +▁behav -3634 +phone -3635 +▁subst -3636 +▁remain -3637 +force -3638 +anging -3639 +▁Plan -3640 +▁trade -3641 +▁launch -3642 +undred -3643 +rem -3644 +▁reviews -3645 +▁completed -3646 +▁Ins -3647 +▁II -3648 +ico -3649 +▁pool -3650 +▁Sun -3651 +▁Island -3652 +▁beyond -3653 +amm -3654 +▁lack -3655 +▁disease -3656 +asy -3657 +▁lock -3658 +▁Sing -3659 +▁Rock -3660 +set -3661 +▁threat -3662 +▁purpose -3663 +If -3664 +tion -3665 +▁Water -3666 +order -3667 +orial -3668 +▁cards -3669 +▁Contact -3670 +ado -3671 +▁adjust -3672 +▁Mart -3673 +dom -3674 +que -3675 +▁ter -3676 +▁spread -3677 +▁accur -3678 +▁existing -3679 +▁fashion -3680 +arily -3681 +▁knew -3682 +▁decor -3683 +▁Love -3684 +▁fant -3685 +▁Jes -3686 +▁highest -3687 +▁cancer -3688 +Re -3689 +lied -3690 +▁Florida -3691 +▁plus -3692 +OW -3693 +▁craft -3694 +▁jobs -3695 +soft -3696 +▁Although -3697 +met -3698 +▁conference -3699 +▁Rob -3700 +body -3701 +▁Win -3702 +▁responsible -3703 +▁increasing -3704 +▁Sur -3705 +▁During -3706 +▁allowed -3707 +aling -3708 +▁train -3709 +▁setting -3710 +▁excited -3711 +atever -3712 +▁prefer -3713 +rapy -3714 +▁driving -3715 +▁camera -3716 +▁proud -3717 +door -3718 +▁increased -3719 +▁Sa -3720 +▁sty -3721 +imal -3722 +▁welcome -3723 +▁lines -3724 +▁himself -3725 +▁middle -3726 +▁initial -3727 +▁appropri -3728 +▁Dec -3729 +▁proced -3730 +ona -3731 +aith -3732 +ences -3733 +▁fem -3734 +illa -3735 +▁Sum -3736 +▁Church -3737 +▁certainly -3738 +▁General -3739 +▁passion -3740 +▁frame -3741 +▁furn -3742 +▁coffee -3743 +cel -3744 +▁strugg -3745 +▁journey -3746 +▁Product -3747 +▁holiday -3748 +iling -3749 +▁files -3750 +▁Community -3751 +▁Camp -3752 +▁estate -3753 +▁effects -3754 +▁er -3755 +za -3756 +fl -3757 +▁husband -3758 +▁thanks -3759 +▁Back -3760 +▁frequ -3761 +▁cast -3762 +▁ingred -3763 +aming -3764 +▁steps -3765 +▁button -3766 +▁Republic -3767 +▁length -3768 +▁update -3769 +▁People -3770 +▁pen -3771 +▁Custom -3772 +▁born -3773 +ologies -3774 +▁normal -3775 +istics -3776 +▁efforts -3777 +▁selection -3778 +▁Two -3779 +▁Education -3780 +▁changed -3781 +ously -3782 +▁Mary -3783 +▁batter -3784 +▁Cong -3785 +net -3786 +▁secure -3787 +▁mission -3788 +vant -3789 +▁cru -3790 +anta -3791 +▁spirit -3792 +▁dedicated -3793 +▁bill -3794 +▁owner -3795 +▁clin -3796 +▁relax -3797 +▁surv -3798 +▁shopping -3799 +▁looked -3800 +lying -3801 +icken -3802 +ken -3803 +▁incred -3804 +▁occas -3805 +▁stream -3806 +ovel -3807 +▁moved -3808 +▁Show -3809 +ady -3810 +▁links -3811 +▁mis -3812 +omb -3813 +nection -3814 +▁Cap -3815 +▁science -3816 +ij -3817 +EM -3818 +▁aspect -3819 +▁protection -3820 +): -3821 +oma -3822 +▁haven -3823 +fit -3824 +▁wine -3825 +▁powerful -3826 +▁French -3827 +othing -3828 +▁extend -3829 +▁evening -3830 +▁demonstr -3831 +▁instruct -3832 +▁Take -3833 +▁meaning -3834 +▁background -3835 +▁Like -3836 +oos -3837 +ipp -3838 +▁occur -3839 +▁talking -3840 +▁patient -3841 +▁produce -3842 +IV -3843 +▁particularly -3844 +nded -3845 +▁USA -3846 +enance -3847 +▁aren -3848 +▁guys -3849 +porary -3850 +reed -3851 +friend -3852 +▁measure -3853 +▁Power -3854 +▁Sil -3855 +▁opin -3856 +▁basic -3857 +▁challenges -3858 +▁alone -3859 +ota -3860 +▁Under -3861 +▁Online -3862 +▁fan -3863 +DA -3864 +▁cream -3865 +ocr -3866 +▁payment -3867 +▁biggest -3868 +▁transfer -3869 +▁rules -3870 +▁Gra -3871 +▁doub -3872 +▁session -3873 +CC -3874 +itiz -3875 +▁shared -3876 +▁fill -3877 +leg -3878 +▁spring -3879 +▁fra -3880 +▁winter -3881 +▁sort -3882 +▁Project -3883 +range -3884 +▁runs -3885 +▁whose -3886 +▁letter -3887 +▁basis -3888 +▁couldn -3889 +IM -3890 +▁coach -3891 +▁federal -3892 +▁Information -3893 +▁Special -3894 +azine -3895 +annel -3896 +▁bur -3897 +▁schedule -3898 +▁liter -3899 +free -3900 +▁organizations -3901 +▁Pet -3902 +▁Because -3903 +▁manager -3904 +ios -3905 +istrict -3906 +▁leader -3907 +see -3908 +▁Phil -3909 +icing -3910 +▁drop -3911 +▁Who -3912 +▁models -3913 +▁electric -3914 +▁strength -3915 +▁Music -3916 +▁artist -3917 +acity -3918 +uing -3919 +▁church -3920 +isl -3921 +▁peace -3922 +▁reasons -3923 +uled -3924 +esome -3925 +▁Food -3926 +▁egg -3927 +▁Lake -3928 +▁slight -3929 +iques -3930 +▁absolute -3931 +▁capital -3932 +▁communities -3933 +▁sugar -3934 +▁volunte -3935 +▁extremely -3936 +▁Star -3937 +▁adding -3938 +▁competition -3939 +iture -3940 +▁exclus -3941 +▁guests -3942 +▁instit -3943 +▁onto -3944 +▁views -3945 +▁unit -3946 +▁mer -3947 +▁stick -3948 +▁British -3949 +▁shown -3950 +▁regarding -3951 +istered -3952 +▁Follow -3953 +vision -3954 +iation -3955 +▁residents -3956 +▁Sam -3957 +▁Ve -3958 +▁Thom -3959 +rief -3960 +gency -3961 +▁Profess -3962 +▁hundred -3963 +▁voice -3964 +▁conven -3965 +▁Miss -3966 +umber -3967 +hone -3968 +▁Enter -3969 +azon -3970 +la -3971 +▁seeing -3972 +▁River -3973 +▁chem -3974 +▁taste -3975 +▁ideal -3976 +▁strategy -3977 +apter -3978 +▁Mil -3979 +▁Yes -3980 +▁scient -3981 +▁followed -3982 +▁AP -3983 +▁Dri -3984 +▁Blue -3985 +ustr -3986 +▁daughter -3987 +▁Real -3988 +eria -3989 +▁colors -3990 +oyal -3991 +▁heavy -3992 +▁Institute -3993 +▁trou -3994 +▁compon -3995 +▁sched -3996 +▁Att -3997 +▁cry -3998 +osing -3999 +▁brother -4000 +▁gone -4001 +▁advantage -4002 +imb -4003 +▁notice -4004 +rian -4005 +▁Lou -4006 +▁guid -4007 +esterday -4008 +▁manage -4009 +oman -4010 +▁score -4011 +▁Matt -4012 +▁characters -4013 +▁virt -4014 +ags -4015 +standing -4016 +▁Fire -4017 +▁Police -4018 +▁Fore -4019 +iverse -4020 +▁traffic -4021 +asp -4022 +▁window -4023 +▁surface -4024 +▁ton -4025 +ocolate -4026 +term -4027 +▁Mount -4028 +▁experiences -4029 +▁Pay -4030 +▁smooth -4031 +ette -4032 +▁happened -4033 +▁Mal -4034 +▁reb -4035 +▁Ben -4036 +fast -4037 +▁graph -4038 +▁hom -4039 +▁Vol -4040 +▁names -4041 +▁identify -4042 +encies -4043 +▁shipping -4044 +▁pair -4045 +▁standards -4046 +▁senior -4047 +Sh -4048 +▁Wood -4049 +ech -4050 +icine -4051 +acing -4052 +gen -4053 +mark -4054 +▁talent -4055 +▁u -4056 +itude -4057 +▁District -4058 +BS -4059 +▁hospital -4060 +▁professionals -4061 +▁List -4062 +raw -4063 +▁initi -4064 +uce -4065 +▁breat -4066 +▁although -4067 +▁classic -4068 +▁workers -4069 +▁experts -4070 +ula -4071 +ixt -4072 +TS -4073 +▁luck -4074 +gn -4075 +▁Step -4076 +▁Hist -4077 +▁audience -4078 +▁covered -4079 +▁Est -4080 +▁laws -4081 +ero -4082 +▁Mot -4083 +▁Sign -4084 +▁passed -4085 +▁waiting -4086 +▁academ -4087 +▁guy -4088 +▁dang -4089 +▁beauty -4090 +rooms -4091 +▁fear -4092 +▁approx -4093 +▁continues -4094 +▁Development -4095 +▁finding -4096 +▁Team -4097 +▁snow -4098 +▁flex -4099 +▁efficient -4100 +orney -4101 +▁master -4102 +▁mail -4103 +▁associated -4104 +▁exciting -4105 +▁eval -4106 +▁Elect -4107 +inese -4108 +▁Exper -4109 +▁compared -4110 +inate -4111 +ga -4112 +▁larger -4113 +▁Chic -4114 +ss -4115 +▁critical -4116 +▁laun -4117 +sequ -4118 +▁cars -4119 +▁rob -4120 +▁Color -4121 +▁cab -4122 +▁technical -4123 +▁Family -4124 +▁trail -4125 +icon -4126 +▁ice -4127 +UR -4128 +▁shape -4129 +▁beg -4130 +▁district -4131 +▁keeping -4132 +▁TO -4133 +▁remind -4134 +▁solid -4135 +▁den -4136 +osh -4137 +▁Foundation -4138 +▁England -4139 +▁Science -4140 +▁facilities -4141 +▁boo -4142 +rees -4143 +▁wat -4144 +▁calls -4145 +▁restaurant -4146 +▁scene -4147 +▁maintain -4148 +▁greater -4149 +▁PR -4150 +▁Engine -4151 +▁sustain -4152 +▁officials -4153 +▁sy -4154 +mail -4155 +▁Alex -4156 +▁Bet -4157 +▁Sl -4158 +▁Jesus -4159 +▁posts -4160 +▁station -4161 +▁friendly -4162 +▁epis -4163 +▁Str -4164 +▁driver -4165 +▁sand -4166 +▁bul -4167 +▁listed -4168 +▁recipe -4169 +▁plenty -4170 +▁Glo -4171 +▁forget -4172 +odes -4173 +▁Vir -4174 +▁fish -4175 +▁older -4176 +illage -4177 +cul -4178 +▁rich -4179 +▁Start -4180 +▁continued -4181 +▁football -4182 +incip -4183 +▁package -4184 +▁developing -4185 +itors -4186 +log -4187 +▁Hum -4188 +▁established -4189 +yer -4190 +iller -4191 +▁Brown -4192 +rowd -4193 +▁income -4194 +▁useful -4195 +▁minute -4196 +▁truck -4197 +well -4198 +▁studies -4199 +▁advent -4200 +▁announce -4201 +oop -4202 +▁learned -4203 +ervation -4204 +▁Press -4205 +atically -4206 +▁disapp -4207 +▁tim -4208 +▁produced -4209 +win -4210 +▁motor -4211 +tra -4212 +▁League -4213 +using -4214 +▁rooms -4215 +unately -4216 +▁closed -4217 +▁beat -4218 +▁handle -4219 +▁appropriate -4220 +▁Whether -4221 +▁classes -4222 +unning -4223 +▁origin -4224 +▁military -4225 +ander -4226 +▁Central -4227 +▁artists -4228 +▁died -4229 +gal -4230 +▁Commission -4231 +▁explore -4232 +▁sup -4233 +▁placed -4234 +▁Offic -4235 +CA -4236 +▁economy -4237 +▁kept -4238 +▁thousands -4239 +night -4240 +▁knows -4241 +▁Franc -4242 +▁connection -4243 +▁winning -4244 +▁Smith -4245 +▁remove -4246 +▁pros -4247 +▁Social -4248 +▁evidence -4249 +▁force -4250 +▁primary -4251 +▁CEO -4252 +▁Media -4253 +▁adop -4254 +▁tree -4255 +▁repair -4256 +▁salt -4257 +▁Build -4258 +▁bright -4259 +aded -4260 +▁novel -4261 +▁testing -4262 +▁Download -4263 +iment -4264 +IG -4265 +▁Christian -4266 +▁operations -4267 +▁util -4268 +rael -4269 +▁status -4270 +▁opened -4271 +▁figure -4272 +▁requires -4273 +BA -4274 +▁street -4275 +▁discount -4276 +▁fol -4277 +There -4278 +▁Another -4279 +▁gun -4280 +▁communication -4281 +atab -4282 +ipes -4283 +▁presented -4284 +▁Grand -4285 +rd -4286 +▁decl -4287 +▁Beach -4288 +▁discover -4289 +ka -4290 +What -4291 +▁Obama -4292 +overy -4293 +▁ingredients -4294 +▁teaching -4295 +▁surg -4296 +▁medium -4297 +▁Network -4298 +▁injury -4299 +inn -4300 +▁Arch -4301 +semb -4302 +▁harm -4303 +▁starts -4304 +vention -4305 +oe -4306 +▁brain -4307 +bed -4308 +▁Carol -4309 +▁catch -4310 +▁contains -4311 +iled -4312 +▁selected -4313 +irection -4314 +▁shall -4315 +▁Mex -4316 +outhern -4317 +▁sharing -4318 +▁brings -4319 +look -4320 +action -4321 +▁butter -4322 +arge -4323 +▁doctor -4324 +idential -4325 +▁Disc -4326 +▁structure -4327 +▁advance -4328 +itar -4329 +ideo -4330 +▁poor -4331 +rehens -4332 +▁scen -4333 +men -4334 +▁famous -4335 +asure -4336 +▁pray -4337 +▁dinner -4338 +mp -4339 +▁arrest -4340 +apers -4341 +pective -4342 +▁Dig -4343 +▁prepared -4344 +olic -4345 +▁esc -4346 +▁Scott -4347 +▁Hill -4348 +▁manufacturer -4349 +▁suff -4350 +enses -4351 +▁Mad -4352 +▁Word -4353 +▁pm -4354 +▁serving -4355 +▁Microsoft -4356 +▁jump -4357 +▁Card -4358 +▁ship -4359 +▁loan -4360 +▁architect -4361 +▁Light -4362 +uries -4363 +▁Full -4364 +▁department -4365 +▁mo -4366 +▁remains -4367 +▁funds -4368 +▁Valley -4369 +▁vision -4370 +▁watching -4371 +▁secret -4372 +▁rank -4373 +atively -4374 +▁victim -4375 +PA -4376 +▁sto -4377 +▁Amazon -4378 +▁resist -4379 +▁Cup -4380 +ini -4381 +ctors -4382 +▁veget -4383 +▁gain -4384 +▁Chicago -4385 +aven -4386 +▁Their -4387 +noon -4388 +▁methods -4389 +▁balance -4390 +usion -4391 +lor -4392 +iers -4393 +▁agency -4394 +allery -4395 +▁updated -4396 +▁buying -4397 +▁movement -4398 +”, -4399 +riage -4400 +▁leaves -4401 +CH -4402 +▁Keep -4403 +▁Bill -4404 +▁drug -4405 +▁compl -4406 +▁Chinese -4407 +▁guess -4408 +▁Support -4409 +ooper -4410 +▁Net -4411 +RA -4412 +aked -4413 +▁encourage -4414 +▁Stand -4415 +▁spending -4416 +▁cloud -4417 +▁journal -4418 +▁map -4419 +▁OF -4420 +▁Week -4421 +▁reality -4422 +lands -4423 +▁Award -4424 +going -4425 +ption -4426 +ishes -4427 +▁Africa -4428 +LC -4429 +▁properties -4430 +okes -4431 +lastname -4432 +eless -4433 +▁beach -4434 +▁becoming -4435 +▁happens -4436 +▁Date -4437 +▁Ber -4438 +ellig -4439 +▁bought -4440 +top -4441 +▁sector -4442 +▁cleaning -4443 +▁Women -4444 +▁spons -4445 +▁RE -4446 +▁ID -4447 +▁Mel -4448 +▁leaving -4449 +▁sport -4450 +iency -4451 +▁relig -4452 +▁Commit -4453 +▁showing -4454 +antic -4455 +▁plants -4456 +itness -4457 +life -4458 +▁maintenance -4459 +▁https -4460 +▁facility -4461 +▁metal -4462 +▁Fort -4463 +▁Tor -4464 +ception -4465 +▁perhaps -4466 +▁dep -4467 +▁Times -4468 +essions -4469 +hem -4470 +ki -4471 +▁determine -4472 +ifts -4473 +▁leadership -4474 +▁Long -4475 +▁advanced -4476 +▁worksh -4477 +▁Israel -4478 +▁independent -4479 +▁stores -4480 +▁entry -4481 +▁Rad -4482 +▁Academ -4483 +▁Android -4484 +▁cris -4485 +▁mechan -4486 +▁fee -4487 +▁analy -4488 +▁Where -4489 +▁rain -4490 +berg -4491 +edy -4492 +▁upgr -4493 +▁rare -4494 +osure -4495 +▁unc -4496 +outs -4497 +▁cart -4498 +▁Que -4499 +▁exercise -4500 +▁wouldn -4501 +▁committed -4502 +abilities -4503 +ror -4504 +▁faith -4505 +itz -4506 +▁NY -4507 +▁meant -4508 +alls -4509 +▁vote -4510 +▁sem -4511 +▁iPhone -4512 +▁Mass -4513 +ograp -4514 +▁mist -4515 +▁bird -4516 +craft -4517 +▁Both -4518 +▁fabric -4519 +▁designs -4520 +▁Tim -4521 +▁numerous -4522 +▁ride -4523 +▁focused -4524 +▁anti -4525 +▁markets -4526 +▁Div -4527 +▁brows -4528 +▁Nov -4529 +▁ju -4530 +▁incor -4531 +▁Fil -4532 +fr -4533 +▁signed -4534 +agram -4535 +▁sources -4536 +▁Pub -4537 +▁records -4538 +** -4539 +▁funding -4540 +▁theme -4541 +▁actual -4542 +aturing -4543 +iest -4544 +▁establish -4545 +▁changing -4546 +▁chair -4547 +ae -4548 +▁visitors -4549 +▁steel -4550 +▁visual -4551 +▁multi -4552 +▁ir -4553 +For -4554 +estic -4555 +▁Next -4556 +MS -4557 +▁Los -4558 +▁forms -4559 +iences -4560 +▁crowd -4561 +iance -4562 +▁joined -4563 +▁Organ -4564 +isation -4565 +▁mill -4566 +▁coverage -4567 +▁elements -4568 +▁showed -4569 +rim -4570 +▁kick -4571 +▁selling -4572 +▁Watch -4573 +▁practices -4574 +▁animals -4575 +▁operating -4576 +▁obvious -4577 +fin -4578 +▁menu -4579 +▁busy -4580 +▁Nor -4581 +▁capacity -4582 +▁locations -4583 +▁grant -4584 +▁Medical -4585 +▁songs -4586 +▁fell -4587 +▁Set -4588 +▁neighbor -4589 +▁roof -4590 +▁refer -4591 +▁Head -4592 +isher -4593 +eared -4594 +▁George -4595 +oor -4596 +miss -4597 +▁memory -4598 +▁raised -4599 +▁Only -4600 +rics -4601 +▁worry -4602 +▁whatever -4603 +▁corner -4604 +▁ban -4605 +▁lose -4606 +▁allowing -4607 +igan -4608 +▁listen -4609 +IA -4610 +▁central -4611 +reek -4612 +▁plastic -4613 +▁society -4614 +▁accommod -4615 +gage -4616 +vere -4617 +▁relationships -4618 +SS -4619 +▁Tri -4620 +▁diet -4621 +igation -4622 +▁lux -4623 +▁diagn -4624 +▁thr -4625 +▁managed -4626 +▁Copy -4627 +OP -4628 +▁updates -4629 +▁limit -4630 +▁caused -4631 +▁estim -4632 +▁rap -4633 +▁parking -4634 +▁population -4635 +▁tables -4636 +▁Before -4637 +ya -4638 +▁Note -4639 +▁uns -4640 +", -4641 +fol -4642 +▁parties -4643 +▁decide -4644 +isco -4645 +uty -4646 +▁claims -4647 +▁articles -4648 +▁core -4649 +ano -4650 +▁survey -4651 +▁repe -4652 +▁Mer -4653 +ferences -4654 +▁assistance -4655 +amin -4656 +▁walking -4657 +▁tickets -4658 +▁Its -4659 +▁techniques -4660 +▁thoughts -4661 +ection -4662 +▁CD -4663 +rab -4664 +ivered -4665 +▁Sy -4666 +▁afternoon -4667 +▁colour -4668 +▁documents -4669 +▁wire -4670 +arrant -4671 +▁bowl -4672 +▁ended -4673 +▁transl -4674 +▁youth -4675 +▁brown -4676 +▁combination -4677 +▁vehicles -4678 +lines -4679 +▁flat -4680 +▁forum -4681 +▁yesterday -4682 +▁previously -4683 +▁Game -4684 +▁enjoyed -4685 +▁landsc -4686 +▁Society -4687 +▁profile -4688 +▁courses -4689 +iliar -4690 +▁launched -4691 +▁toward -4692 +▁appears -4693 +DF -4694 +▁eating -4695 +point -4696 +▁sea -4697 +▁Bur -4698 +▁Town -4699 +▁accident -4700 +▁Cre -4701 +▁awesome -4702 +▁filled -4703 +▁optim -4704 +▁teacher -4705 +coh -4706 +▁factors -4707 +bour -4708 +eed -4709 +▁Chris -4710 +▁Technology -4711 +▁temperature -4712 +rs -4713 +▁micro -4714 +▁mort -4715 +pan -4716 +▁psych -4717 +while -4718 +▁generally -4719 +▁putting -4720 +▁shel -4721 +▁charges -4722 +▁Learn -4723 +▁Mont -4724 +▁Trump -4725 +▁citiz -4726 +▁Atl -4727 +▁notes -4728 +▁smaller -4729 +▁Author -4730 +▁firstname -4731 +▁Pack -4732 +▁direction -4733 +▁values -4734 +▁task -4735 +no -4736 +rehensive -4737 +▁counter -4738 +▁Lord -4739 +▁Log -4740 +▁Wil -4741 +▁AL -4742 +▁outdoor -4743 +▁CA -4744 +▁Sand -4745 +▁earth -4746 +▁kid -4747 +▁teachers -4748 +▁panel -4749 +▁becomes -4750 +▁vs -4751 +▁tend -4752 +▁corporate -4753 +orthern -4754 +▁favour -4755 +ola -4756 +▁bon -4757 +▁Arts -4758 +▁Virgin -4759 +▁century -4760 +▁honest -4761 +▁separate -4762 +▁legisl -4763 +?? -4764 +▁cheese -4765 +▁Security -4766 +▁assign -4767 +yan -4768 +▁Congress -4769 +▁matt -4770 +On -4771 +▁sch -4772 +▁truth -4773 +▁purs -4774 +▁concerns -4775 +OD -4776 +▁situ -4777 +▁Committee -4778 +▁Main -4779 +istan -4780 +▁Data -4781 +▁helpful -4782 +▁dur -4783 +▁shut -4784 +▁Jew -4785 +New -4786 +▁swim -4787 +▁Centre -4788 +iration -4789 +▁missing -4790 +▁orders -4791 +▁fold -4792 +▁Jul -4793 +▁Frank -4794 +▁milk -4795 +rain -4796 +▁McC -4797 +een -4798 +▁Government -4799 +▁flu -4800 +▁throw -4801 +!!! -4802 +po -4803 +▁Ext -4804 +▁adapt -4805 +▁polic -4806 +▁innovative -4807 +▁installation -4808 +ownt -4809 +▁Aud -4810 +▁ur -4811 +▁south -4812 +▁relevant -4813 +▁Lo -4814 +▁tow -4815 +▁van -4816 +pet -4817 +ifying -4818 +olars -4819 +rical -4820 +▁Robert -4821 +SP -4822 +▁Museum -4823 +▁decisions -4824 +▁environmental -4825 +ye -4826 +▁discussion -4827 +▁despite -4828 +▁waste -4829 +▁AND -4830 +▁fourth -4831 +▁slightly -4832 +orter -4833 +▁Tur -4834 +oles -4835 +▁inspired -4836 +▁Mike -4837 +▁ang -4838 +▁dance -4839 +▁net -4840 +▁Tre -4841 +▁enhance -4842 +▁Den -4843 +▁apart -4844 +▁Prov -4845 +▁Wall -4846 +▁Jim -4847 +▁scr -4848 +▁spect -4849 +▁mental -4850 +▁Hotel -4851 +▁Old -4852 +▁fantastic -4853 +▁Land -4854 +▁pal -4855 +▁format -4856 +▁Somet -4857 +▁sav -4858 +▁joint -4859 +▁desk -4860 +ita -4861 +▁upcoming -4862 +▁ath -4863 +▁AC -4864 +▁spl -4865 +▁Lead -4866 +▁Dou -4867 +inct -4868 +▁emp -4869 +▁YOU -4870 +▁willing -4871 +rist -4872 +▁hearing -4873 +▁sounds -4874 +▁fuel -4875 +▁commitment -4876 +ups -4877 +▁consumers -4878 +▁appeal -4879 +▁raise -4880 +?” -4881 +▁Manager -4882 +▁civil -4883 +▁UN -4884 +kin -4885 +osen -4886 +▁Place -4887 +▁library -4888 +umin -4889 +SA -4890 +ensions -4891 +▁vir -4892 +▁north -4893 +▁Through -4894 +▁expertise -4895 +▁Report -4896 +▁promote -4897 +▁asking -4898 +▁absolutely -4899 +▁units -4900 +▁Contin -4901 +water -4902 +▁chocolate -4903 +cher -4904 +▁extensive -4905 +▁Louis -4906 +▁movies -4907 +▁delivered -4908 +▁Series -4909 +▁bask -4910 +▁delicious -4911 +▁Ill -4912 +Pro -4913 +▁eth -4914 +▁reached -4915 +▁sets -4916 +zen -4917 +Com -4918 +▁Vict -4919 +known -4920 +▁executive -4921 +uable -4922 +▁plays -4923 +▁agreement -4924 +ternal -4925 +▁Link -4926 +▁radio -4927 +nergy -4928 +▁Posted -4929 +▁Ma -4930 +▁foreign -4931 +▁alle -4932 +▁lunch -4933 +REE -4934 +▁transform -4935 +▁datab -4936 +aser -4937 +▁register -4938 +icians -4939 +▁emergency -4940 +▁thick -4941 +▁struct -4942 +▁trees -4943 +▁Angeles -4944 +▁Invest -4945 +list -4946 +eline -4947 +▁Ham -4948 +▁Lim -4949 +▁Const -4950 +▁Oper -4951 +▁provider -4952 +▁brief -4953 +▁NE -4954 +▁presence -4955 +text -4956 +▁Upd -4957 +▁combined -4958 +▁Fund -4959 +▁rid -4960 +!) -4961 +▁Admin -4962 +▁Fun -4963 +▁achie -4964 +prise -4965 +▁Gal -4966 +▁furniture -4967 +▁seeking -4968 +▁fruit -4969 +▁NOT -4970 +▁Hand -4971 +▁controll -4972 +▁Union -4973 +osition -4974 +▁connected -4975 +▁Join -4976 +bre -4977 +▁Jun -4978 +▁readers -4979 +▁expensive -4980 +▁adults -4981 +▁Person -4982 +▁Cook -4983 +▁Democr -4984 +reens -4985 +▁seconds -4986 +▁feels -4987 +▁poll -4988 +▁ON -4989 +uality -4990 +▁rat -4991 +▁generation -4992 +▁distance -4993 +▁edge -4994 +▁fees -4995 +▁mentioned -4996 +▁recommended -4997 +▁trial -4998 +▁chat -4999 +▁calling -5000 +▁har -5001 +▁nine -5002 +▁cities -5003 +▁chicken -5004 +▁approximately -5005 +▁Plus -5006 +atin -5007 +▁bringing -5008 +TH -5009 +▁consid -5010 +▁Access -5011 +▁Journal -5012 +▁Inte -5013 +▁wel -5014 +▁married -5015 +fortunately -5016 +▁Peter -5017 +▁prepare -5018 +▁websites -5019 +▁operation -5020 +▁alternative -5021 +▁confidence -5022 +▁server -5023 +▁dogs -5024 +IR -5025 +▁registered -5026 +▁stars -5027 +cean -5028 +LA -5029 +▁educational -5030 +▁Master -5031 +burg -5032 +▁Di -5033 +appy -5034 +▁Indust -5035 +▁photograph -5036 +▁restrict -5037 +ef -5038 +ruit -5039 +▁Chief -5040 +▁Ol -5041 +▁tight -5042 +My -5043 +▁Children -5044 +▁centre -5045 +hab -5046 +emporary -5047 +▁square -5048 +▁France -5049 +othes -5050 +▁Spring -5051 +▁tun -5052 +▁returned -5053 +▁lovely -5054 +▁minimum -5055 +▁category -5056 +OC -5057 +▁Live -5058 +azz -5059 +▁exchange -5060 +▁seat -5061 +irmed -5062 +▁stret -5063 +▁Prote -5064 +ears -5065 +▁topic -5066 +▁installed -5067 +▁tea -5068 +▁info -5069 +▁Rest -5070 +rag -5071 +▁tough -5072 +▁brands -5073 +asks -5074 +▁guest -5075 +▁princip -5076 +▁Way -5077 +bu -5078 +▁majority -5079 +▁researc -5080 +atre -5081 +inations -5082 +▁wearing -5083 +▁appearance -5084 +▁female -5085 +how -5086 +▁neck -5087 +▁Minister -5088 +▁colle -5089 +estyle -5090 +ship -5091 +orry -5092 +▁Cy -5093 +IF -5094 +When -5095 +ulated -5096 +aks -5097 +▁ven -5098 +▁accompl -5099 +▁therefore -5100 +▁mostly -5101 +▁instru -5102 +▁Canad -5103 +▁Ok -5104 +▁Price -5105 +elines -5106 +▁maximum -5107 +▁HD -5108 +▁winner -5109 +▁sauce -5110 +▁processes -5111 +▁academic -5112 +▁surgery -5113 +van -5114 +kins -5115 +▁measures -5116 +▁responsibility -5117 +▁Ver -5118 +ifications -5119 +▁leads -5120 +▁impl -5121 +▁teen -5122 +▁Mo -5123 +▁killed -5124 +▁Sup -5125 +▁approved -5126 +▁apps -5127 +▁anywhere -5128 +▁arrange -5129 +▁Max -5130 +nel -5131 +▁Men -5132 +osis -5133 +▁Sports -5134 +▁stre -5135 +▁Video -5136 +▁Hy -5137 +▁importance -5138 +▁Test -5139 +▁gather -5140 +▁ring -5141 +▁climate -5142 +▁Squ -5143 +alian -5144 +▁satisf -5145 +▁detailed -5146 +▁boost -5147 +▁signs -5148 +▁battery -5149 +An -5150 +▁nom -5151 +hi -5152 +▁battle -5153 +▁feedback -5154 +▁chief -5155 +▁veter -5156 +▁Festival -5157 +▁switch -5158 +▁Creat -5159 +mond -5160 +▁dyn -5161 +▁worldwide -5162 +▁featured -5163 +▁scheduled -5164 +▁cooking -5165 +▁disp -5166 +▁highlight -5167 +ius -5168 +lets -5169 +▁Wild -5170 +▁supporting -5171 +▁rise -5172 +ait -5173 +▁crim -5174 +▁Library -5175 +▁sympt -5176 +ulty -5177 +▁cheap -5178 +cohol -5179 +▁comprehensive -5180 +▁predict -5181 +▁participants -5182 +vis -5183 +▁Walk -5184 +▁Jud -5185 +arsh -5186 +▁Cat -5187 +ker -5188 +▁IP -5189 +▁Thomas -5190 +▁affordable -5191 +▁otherwise -5192 +paper -5193 +▁Bob -5194 +▁Tour -5195 +▁defense -5196 +▁Conference -5197 +alend -5198 +ters -5199 +Cl -5200 +cious -5201 +▁bike -5202 +▁Lab -5203 +roy -5204 +otten -5205 +▁properly -5206 +ician -5207 +▁animal -5208 +▁actions -5209 +▁Using -5210 +ulate -5211 +▁clearly -5212 +ena -5213 +▁performed -5214 +▁Earth -5215 +FL -5216 +▁Search -5217 +gl -5218 +▁mur -5219 +▁Pan -5220 +▁purchased -5221 +itable -5222 +bl -5223 +▁Those -5224 +idden -5225 +▁ourselves -5226 +iner -5227 +pected -5228 +oston -5229 +▁Bi -5230 +▁conv -5231 +▁joy -5232 +uts -5233 +▁Copyright -5234 +▁audio -5235 +iser -5236 +▁chemical -5237 +▁meal -5238 +▁vent -5239 +▁competitive -5240 +verse -5241 +anda -5242 +▁Johnson -5243 +▁appeared -5244 +▁windows -5245 +▁advertising -5246 +▁Global -5247 +▁applied -5248 +▁push -5249 +▁motiv -5250 +UT -5251 +bol -5252 +▁Prem -5253 +▁ment -5254 +▁Cam -5255 +▁doors -5256 +▁Soft -5257 +ENT -5258 +▁Party -5259 +▁sister -5260 +▁policies -5261 +gment -5262 +▁pump -5263 +▁mouth -5264 +oga -5265 +▁topics -5266 +▁Form -5267 +▁Jeff -5268 +erg -5269 +▁supported -5270 +▁valid -5271 +▁Bas -5272 +▁technologies -5273 +▁pregn -5274 +▁scale -5275 +▁flowers -5276 +▁rom -5277 +▁behavior -5278 +▁arm -5279 +▁African -5280 +▁sitting -5281 +rastructure -5282 +GB -5283 +MA -5284 +▁minor -5285 +▁writer -5286 +▁familiar -5287 +▁Jose -5288 +▁holding -5289 +▁entertainment -5290 +▁featuring -5291 +▁rub -5292 +▁Germany -5293 +▁episode -5294 +▁coord -5295 +but -5296 +▁bond -5297 +ushed -5298 +▁studio -5299 +▁Western -5300 +▁editor -5301 +▁Charl -5302 +▁opinion -5303 +▁Kore -5304 +▁elim -5305 +alog -5306 +▁Cost -5307 +▁participate -5308 +▁revenue -5309 +▁plug -5310 +▁Haw -5311 +tr -5312 +▁removed -5313 +▁faster -5314 +▁Connect -5315 +▁Fair -5316 +▁Help -5317 +▁Saf -5318 +▁sides -5319 +west -5320 +inch -5321 +▁strategies -5322 +▁Champions -5323 +▁coast -5324 +erts -5325 +▁jew -5326 +▁charged -5327 +▁depending -5328 +col -5329 +▁totally -5330 +prene -5331 +oration -5332 +▁birthday -5333 +▁reliable -5334 +▁visiting -5335 +▁quiet -5336 +▁begins -5337 +▁Martin -5338 +▁species -5339 +▁conversation -5340 +▁described -5341 +UN -5342 +inating -5343 +▁Energy -5344 +▁flight -5345 +orough -5346 +▁caught -5347 +▁Girl -5348 +▁Cert -5349 +▁ap -5350 +▁eventually -5351 +▁monthly -5352 +▁fif -5353 +▁consumer -5354 +hus -5355 +den -5356 +▁Hospital -5357 +tered -5358 +▁Sar -5359 +▁restaurants -5360 +▁tail -5361 +▁meat -5362 +▁housing -5363 +▁cells -5364 +▁dish -5365 +▁teach -5366 +▁MP -5367 +▁deals -5368 +▁inches -5369 +▁Digital -5370 +▁pu -5371 +▁television -5372 +otic -5373 +▁Mic -5374 +▁accounts -5375 +with -5376 +▁improved -5377 +reprene -5378 +ersey -5379 +▁German -5380 +▁Dev -5381 +▁nav -5382 +▁Orig -5383 +apes -5384 +▁Gen -5385 +▁labor -5386 +▁Australian -5387 +▁delight -5388 +inter -5389 +▁university -5390 +▁dim -5391 +▁Id -5392 +▁fly -5393 +▁Joe -5394 +▁officer -5395 +▁marriage -5396 +▁hundreds -5397 +▁neighborhood -5398 +▁campus -5399 +▁revealed -5400 +ario -5401 +▁shoes -5402 +▁employee -5403 +ste -5404 +▁cro -5405 +▁label -5406 +▁breakfast -5407 +ulous -5408 +▁ign -5409 +weight -5410 +▁CH -5411 +▁Ul -5412 +▁confirm -5413 +▁Penn -5414 +▁administration -5415 +▁typically -5416 +SE -5417 +▁occasion -5418 +▁Academy -5419 +▁introduced -5420 +▁celebrate -5421 +▁exclusive -5422 +How -5423 +▁election -5424 +▁covers -5425 +ht -5426 +▁Secret -5427 +▁essay -5428 +▁Mid -5429 +▁appointment -5430 +ighter -5431 +▁volume -5432 +▁Ce -5433 +▁unless -5434 +sm -5435 +▁Opt -5436 +hew -5437 +achel -5438 +▁discovered -5439 +▁specifically -5440 +▁amb -5441 +▁vary -5442 +hent -5443 +▁compar -5444 +iat -5445 +▁internal -5446 +▁indic -5447 +▁planned -5448 +Our -5449 +▁Hope -5450 +▁twe -5451 +▁debt -5452 +▁intended -5453 +NA -5454 +▁cultural -5455 +▁cutting -5456 +▁sessions -5457 +▁AT -5458 +▁Americans -5459 +▁Lt -5460 +▁aspects -5461 +▁manufacturing -5462 +▁remaining -5463 +▁Maybe -5464 +▁Young -5465 +eries -5466 +ushing -5467 +▁mel -5468 +▁sexual -5469 +▁SP -5470 +bur -5471 +ixture -5472 +igr -5473 +▁shares -5474 +edia -5475 +▁nor -5476 +▁Box -5477 +merce -5478 +▁Boy -5479 +▁Second -5480 +▁recovery -5481 +); -5482 +▁basket -5483 +▁fle -5484 +▁Boston -5485 +▁icon -5486 +▁chart -5487 +▁engineering -5488 +▁remote -5489 +▁trading -5490 +ords -5491 +▁concent -5492 +▁Ari -5493 +▁scored -5494 +▁Er -5495 +▁bread -5496 +▁incredible -5497 +▁partnership -5498 +▁Key -5499 +▁investigation -5500 +▁lights -5501 +▁edition -5502 +ournament -5503 +▁dining -5504 +▁Commun -5505 +uke -5506 +asts -5507 +▁industrial -5508 +▁Jon -5509 +▁guarantee -5510 +▁forg -5511 +▁detect -5512 +▁Mur -5513 +CE -5514 +▁invent -5515 +aren -5516 +▁Meet -5517 +cont -5518 +▁Carolina -5519 +▁drivers -5520 +gas -5521 +▁components -5522 +▁Japanese -5523 +▁negative -5524 +▁liqu -5525 +▁hyd -5526 +▁automatically -5527 +mosp -5528 +▁End -5529 +elly -5530 +▁resource -5531 +eper -5532 +▁depos -5533 +▁cake -5534 +ala -5535 +▁Pac -5536 +▁mir -5537 +▁freed -5538 +▁fields -5539 +lymp -5540 +▁burn -5541 +▁Virginia -5542 +odies -5543 +▁practical -5544 +berry -5545 +▁chain -5546 +▁Type -5547 +cm -5548 +▁choices -5549 +▁noted -5550 +rupt -5551 +▁Human -5552 +▁evalu -5553 +▁quot -5554 +▁pock -5555 +▁confirmed -5556 +inet -5557 +▁interior -5558 +▁dollars -5559 +▁seemed -5560 +▁Applic -5561 +otton -5562 +▁Lee -5563 +lywood -5564 +▁cop -5565 +▁victory -5566 +▁bedroom -5567 +▁Jones -5568 +itionally -5569 +▁thus -5570 +▁rule -5571 +idays -5572 +▁suitable -5573 +▁Wal -5574 +iability -5575 +▁argu -5576 +▁depart -5577 +▁arrived -5578 +cles -5579 +▁Brand -5580 +▁Quest -5581 +ua -5582 +unting -5583 +▁perfectly -5584 +Al -5585 +▁FREE -5586 +▁twice -5587 +tters -5588 +hand -5589 +uits -5590 +▁buildings -5591 +▁boys -5592 +Ex -5593 +away -5594 +▁teeth -5595 +▁Tem -5596 +aped -5597 +▁possibly -5598 +▁broken -5599 +▁warrant -5600 +▁Mult -5601 +▁Equ -5602 +king -5603 +abet -5604 +gers -5605 +▁symptoms -5606 +▁films -5607 +▁crew -5608 +▁honor -5609 +uous -5610 +▁shooting -5611 +▁elig -5612 +▁Italian -5613 +▁doubt -5614 +▁bathroom -5615 +▁Victor -5616 +arp -5617 +▁ticket -5618 +▁Know -5619 +▁anc -5620 +arks -5621 +No -5622 +!” -5623 +▁Gar -5624 +▁island -5625 +▁stated -5626 +▁issued -5627 +ailability -5628 +flow -5629 +▁DV -5630 +▁chosen -5631 +ilit -5632 +▁Cast -5633 +rier -5634 +▁considering -5635 +▁enable -5636 +▁commission -5637 +▁Mexico -5638 +▁Steve -5639 +▁Little -5640 +▁injuries -5641 +▁Trust -5642 +urban -5643 +▁candidates -5644 +poses -5645 +▁tests -5646 +related -5647 +otal -5648 +▁Williams -5649 +▁reference -5650 +▁desire -5651 +▁foods -5652 +▁rapid -5653 +▁keeps -5654 +▁corn -5655 +TC -5656 +▁bigger -5657 +ibilities -5658 +road -5659 +▁ris -5660 +▁missed -5661 +ipl -5662 +▁Instead -5663 +▁mode -5664 +▁paying -5665 +ulations -5666 +▁boat -5667 +▁picked -5668 +▁golf -5669 +▁contest -5670 +▁Does -5671 +iors -5672 +▁intellig -5673 +▁circum -5674 +▁Farm -5675 +acks -5676 +▁Students -5677 +▁Hard -5678 +▁appreciate -5679 +▁decades -5680 +▁premium -5681 +▁turns -5682 +▁tomorrow -5683 +▁sizes -5684 +iamond -5685 +▁trend -5686 +▁Games -5687 +▁valuable -5688 +gend -5689 +owntown -5690 +▁fro -5691 +▁settings -5692 +▁Coast -5693 +▁protected -5694 +ien -5695 +▁voc -5696 +▁Tit -5697 +▁Kn -5698 +▁presentation -5699 +▁soul -5700 +▁Mat -5701 +▁Mov -5702 +▁lived -5703 +▁Page -5704 +▁regularly -5705 +▁realize -5706 +mes -5707 +▁earned -5708 +atoes -5709 +▁Current -5710 +▁registration -5711 +▁nurs -5712 +▁Night -5713 +▁config -5714 +▁Ohio -5715 +▁attorney -5716 +▁magazine -5717 +▁citizens -5718 +▁quant -5719 +hetic -5720 +▁aid -5721 +▁failed -5722 +▁oven -5723 +▁AS -5724 +▁database -5725 +fection -5726 +ora -5727 +ris -5728 +▁spr -5729 +▁Assist -5730 +▁therapy -5731 +▁organic -5732 +ias -5733 +▁license -5734 +▁sequ -5735 +wing -5736 +▁Canadian -5737 +weet -5738 +▁Econom -5739 +▁agent -5740 +▁Michigan -5741 +▁surrounding -5742 +AY -5743 +▁mine -5744 +▁affected -5745 +▁greatest -5746 +▁resol -5747 +▁ends -5748 +▁providers -5749 +▁moments -5750 +oosing -5751 +▁ran -5752 +▁county -5753 +▁Olymp -5754 +▁tells -5755 +what -5756 +▁ec -5757 +▁dates -5758 +▁Span -5759 +PR -5760 +▁grown -5761 +▁Cross -5762 +▁reput -5763 +▁MS -5764 +▁athlet -5765 +▁Code -5766 +ev -5767 +▁surf -5768 +▁virtual -5769 +▁investors -5770 +▁Instagram -5771 +▁grade -5772 +spe -5773 +▁Pass -5774 +▁calcul -5775 +▁answers -5776 +.| -5777 +▁loves -5778 +▁shock -5779 +▁supports -5780 +▁painting -5781 +▁inn -5782 +▁draft -5783 +phas -5784 +▁influence -5785 +▁proposed -5786 +lights -5787 +▁agencies -5788 +oup -5789 +▁surprise -5790 +▁History -5791 +pass -5792 +▁Control -5793 +▁Kh -5794 +abled -5795 +▁hero -5796 +▁dial -5797 +▁poly -5798 +▁Sn -5799 +▁explain -5800 +▁weap -5801 +▁accurate -5802 +▁submit -5803 +▁degrees -5804 +▁renew -5805 +▁Bal -5806 +race -5807 +▁recorded -5808 +▁Executive -5809 +▁ages -5810 +▁Van -5811 +▁Point -5812 +oking -5813 +▁owned -5814 +▁convenient -5815 +▁Georg -5816 +▁AR -5817 +▁purposes -5818 +▁Share -5819 +vell -5820 +▁load -5821 +ria -5822 +which -5823 +▁Did -5824 +▁beer -5825 +▁yes -5826 +irms -5827 +▁whom -5828 +fficient -5829 +▁Inf -5830 +▁league -5831 +▁Federal -5832 +▁holds -5833 +▁processing -5834 +ella -5835 +▁Buy -5836 +▁Middle -5837 +TA -5838 +▁gro -5839 +TV -5840 +▁instructions -5841 +▁die -5842 +▁Cas -5843 +▁Asia -5844 +kes -5845 +▁interests -5846 +▁Jackson -5847 +▁Def -5848 +▁apparent -5849 +▁efficiency -5850 +▁pure -5851 +ansas -5852 +hors -5853 +▁jack -5854 +▁atmosp -5855 +▁effectively -5856 +▁Expl -5857 +mar -5858 +▁violence -5859 +luding -5860 +▁returns -5861 +alendar -5862 +▁Comple -5863 +▁Enjoy -5864 +▁element -5865 +▁pleased -5866 +▁awareness -5867 +▁goods -5868 +▁Paris -5869 +vy -5870 +real -5871 +▁messages -5872 +OVID -5873 +cking -5874 +▁pepper -5875 +▁channel -5876 +▁receiving -5877 +▁infrastructure -5878 +print -5879 +▁Ken -5880 +▁pod -5881 +rick -5882 +▁Three -5883 +▁electronic -5884 +▁Ire -5885 +▁occup -5886 +▁Made -5887 +▁forced -5888 +intage -5889 +▁officers -5890 +▁Size -5891 +▁facing -5892 +▁creation -5893 +ospit -5894 +▁musical -5895 +▁standing -5896 +▁Requ -5897 +▁researchers -5898 +▁Dom -5899 +▁sam -5900 +▁incident -5901 +▁Royal -5902 +▁perman -5903 +▁Columb -5904 +▁belong -5905 +▁closer -5906 +irty -5907 +▁lighting -5908 +▁everyday -5909 +▁Try -5910 +▁diverse -5911 +▁grad -5912 +▁Richard -5913 +▁route -5914 +▁Daily -5915 +profit -5916 +ban -5917 +▁Travel -5918 +▁ongoing -5919 +▁distribution -5920 +▁Photo -5921 +▁lit -5922 +▁Cred -5923 +▁causes -5924 +poration -5925 +made -5926 +▁trouble -5927 +▁Ell -5928 +▁thread -5929 +▁apartment -5930 +▁Sher -5931 +▁administr -5932 +▁advoc -5933 +▁usual -5934 +▁wheel -5935 +▁serves -5936 +▁Chair -5937 +▁Ut -5938 +rum -5939 +▁sad -5940 +▁Need -5941 +▁pun -5942 +anche -5943 +▁Store -5944 +▁du -5945 +▁mini -5946 +isters -5947 +▁obtain -5948 +▁kinds -5949 +▁ped -5950 +▁healthcare -5951 +▁favourite -5952 +hy -5953 +▁judge -5954 +▁silver -5955 +▁arts -5956 +▁wid -5957 +PM -5958 +GE -5959 +▁Cath -5960 +▁supposed -5961 +▁meetings -5962 +▁error -5963 +▁crime -5964 +equ -5965 +▁rough -5966 +▁spaces -5967 +▁yellow -5968 +▁knowing -5969 +rete -5970 +▁plate -5971 +▁affili -5972 +udden -5973 +ribe -5974 +▁disappoint -5975 +▁stopped -5976 +▁flour -5977 +▁enthus -5978 +▁fellow -5979 +▁WH -5980 +umes -5981 +▁Wi -5982 +▁bound -5983 +never -5984 +oses -5985 +▁collaboration -5986 +aration -5987 +▁manner -5988 +Tube -5989 +▁Rev -5990 +xy -5991 +▁designer -5992 +itage -5993 +▁licens -5994 +▁construct -5995 +▁concerned -5996 +actions -5997 +▁Andrew -5998 +▁monit -5999 +▁subscrib -6000 +▁massive -6001 +▁Ltd -6002 +person -6003 +anges -6004 +▁weekly -6005 +▁clothes -6006 +▁follows -6007 +ennis -6008 +uction -6009 +▁Low -6010 +▁tut -6011 +▁rot -6012 +▁Four -6013 +ancer -6014 +cue -6015 +sembly -6016 +▁Local -6017 +▁Daniel -6018 +arian -6019 +ello -6020 +▁prison -6021 +▁tur -6022 +▁household -6023 +▁Wr -6024 +yard -6025 +▁simpl -6026 +▁forces -6027 +▁Clean -6028 +▁reduced -6029 +▁regional -6030 +▁challenging -6031 +iveness -6032 +EE -6033 +astern -6034 +▁male -6035 +▁Mean -6036 +▁tack -6037 +▁Guide -6038 +▁functions -6039 +▁stone -6040 +▁Ra -6041 +▁agreed -6042 +pond -6043 +▁hang -6044 +▁Right -6045 +▁script -6046 +▁Room -6047 +▁Santa -6048 +▁Francisco -6049 +oti -6050 +▁Hen -6051 +▁lifestyle -6052 +▁Russian -6053 +▁moist -6054 +▁treated -6055 +orable -6056 +▁horse -6057 +▁debut -6058 +▁complic -6059 +▁Marketing -6060 +▁alcohol -6061 +ansion -6062 +▁assets -6063 +▁native -6064 +▁innovation -6065 +▁payments -6066 +▁sample -6067 +▁fixed -6068 +ml -6069 +▁reserved -6070 +▁successfully -6071 +▁impressive -6072 +Con -6073 +▁powder -6074 +▁crisis -6075 +▁emotional -6076 +▁explained -6077 +FC -6078 +DS -6079 +▁Ep -6080 +Ar -6081 +▁inspiration -6082 +▁cute -6083 +▁Job -6084 +All -6085 +▁Visit -6086 +Un -6087 +ache -6088 +▁witness -6089 +under -6090 +▁leather -6091 +▁spokes -6092 +▁row -6093 +▁Rights -6094 +writ -6095 +ench -6096 +▁fort -6097 +▁forest -6098 +▁password -6099 +ppers -6100 +▁matters -6101 +▁Brook -6102 +▁FOR -6103 +Pl -6104 +ani -6105 +▁identified -6106 +alled -6107 +▁luxury -6108 +▁employment -6109 +BI -6110 +▁photograp -6111 +Be -6112 +▁blogg -6113 +▁drugs -6114 +▁Pot -6115 +▁Summer -6116 +▁Hor -6117 +▁cock -6118 +▁extended -6119 +And -6120 +▁phil -6121 +▁iron -6122 +▁Die -6123 +shire -6124 +igration -6125 +erves -6126 +▁Area -6127 +lyn -6128 +▁determined -6129 +▁rand -6130 +▁accepted -6131 +▁grab -6132 +▁recognized -6133 +▁outstanding -6134 +▁prop -6135 +▁Blo -6136 +▁prompt -6137 +▁der -6138 +▁styles -6139 +▁resolution -6140 +▁Southern -6141 +▁tou -6142 +▁height -6143 +folio -6144 +▁walls -6145 +▁odd -6146 +▁gifts -6147 +▁Rose -6148 +▁clinical -6149 +▁casino -6150 +▁vacation -6151 +▁Name -6152 +▁decre -6153 +▁advis -6154 +▁Cra -6155 +▁accessible -6156 +▁context -6157 +▁nearby -6158 +▁graduate -6159 +liance -6160 +▁conducted -6161 +can -6162 +They -6163 +vate -6164 +▁happening -6165 +rip -6166 +▁Number -6167 +▁positions -6168 +▁worse -6169 +▁Small -6170 +▁dangerous -6171 +▁perspective -6172 +▁Awards -6173 +▁Financial -6174 +▁SH -6175 +▁freedom -6176 +▁gear -6177 +mary -6178 +▁carried -6179 +▁speaking -6180 +▁factor -6181 +letter -6182 +▁Ash -6183 +▁Turn -6184 +▁stunning -6185 +▁sustainable -6186 +▁speech -6187 +▁Colorado -6188 +cling -6189 +▁tag -6190 +▁Scot -6191 +▁folks -6192 +▁significantly -6193 +▁candidate -6194 +▁Oil -6195 +unction -6196 +▁telling -6197 +▁domestic -6198 +ulture -6199 +▁examples -6200 +anged -6201 +▁Avenue -6202 +▁constantly -6203 +rid -6204 +▁committee -6205 +▁emphas -6206 +▁Training -6207 +▁cable -6208 +▁Coll -6209 +▁likes -6210 +▁Lin -6211 +▁symbol -6212 +▁Kim -6213 +▁univers -6214 +▁hardware -6215 +▁mixed -6216 +▁Perform -6217 +ificate -6218 +▁originally -6219 +▁solar -6220 +▁Having -6221 +▁Account -6222 +▁hook -6223 +▁vit -6224 +ucle -6225 +▁Sometimes -6226 +▁Which -6227 +▁stands -6228 +emic -6229 +▁retire -6230 +▁Hon -6231 +▁conflic -6232 +▁awards -6233 +Don -6234 +ployment -6235 +▁adventure -6236 +▁contemporary -6237 +▁showc -6238 +LY -6239 +▁houses -6240 +▁involve -6241 +▁logo -6242 +▁village -6243 +▁fulf -6244 +▁Though -6245 +▁Cond -6246 +▁bless -6247 +▁Spanish -6248 +▁carefully -6249 +▁patterns -6250 +▁supplies -6251 +▁MA -6252 +▁Dub -6253 +▁Select -6254 +▁procedures -6255 +▁Print -6256 +▁DC -6257 +ingly -6258 +▁auto -6259 +▁programme -6260 +▁browser -6261 +▁imagine -6262 +▁Mobile -6263 +▁Despite -6264 +▁stretch -6265 +▁losing -6266 +▁confident -6267 +▁criminal -6268 +▁fitness -6269 +▁replacement -6270 +lete -6271 +▁routine -6272 +▁Available -6273 +▁illustr -6274 +▁adds -6275 +▁Ireland -6276 +▁procedure -6277 +▁engage -6278 +▁Rom -6279 +ca -6280 +▁circumst -6281 +▁Ryan -6282 +▁bottle -6283 +etime -6284 +▁Garden -6285 +▁crazy -6286 +utch -6287 +▁turning -6288 +▁YouTube -6289 +▁random -6290 +▁hosting -6291 +▁taught -6292 +▁rose -6293 +▁expectations -6294 +▁lift -6295 +state -6296 +▁Russia -6297 +▁command -6298 +▁recipes -6299 +▁Tay -6300 +front -6301 +▁Drive -6302 +secut -6303 +▁fo -6304 +▁improvement -6305 +▁alleged -6306 +▁excess -6307 +▁hur -6308 +▁tro -6309 +▁trained -6310 +▁sheet -6311 +▁noticed -6312 +▁mixture -6313 +▁festival -6314 +▁Bon -6315 +▁funny -6316 +illy -6317 +▁tech -6318 +▁OS -6319 +ATE -6320 +▁tab -6321 +▁shots -6322 +▁syn -6323 +▁flavor -6324 +▁reporting -6325 +▁passeng -6326 +▁guitar -6327 +▁ol -6328 +▁hoping -6329 +▁severe -6330 +▁entreprene -6331 +▁COVID -6332 +inder -6333 +▁suspect -6334 +▁eleg -6335 +ether -6336 +▁foundation -6337 +orgeous -6338 +▁Heart -6339 +ington -6340 +▁SU -6341 +▁upper -6342 +ossible -6343 +inem -6344 +anger -6345 +▁Building -6346 +▁Environment -6347 +▁blow -6348 +eration -6349 +▁clothing -6350 +▁scholars -6351 +▁publish -6352 +▁Non -6353 +▁ok -6354 +enced -6355 +anna -6356 +▁Italy -6357 +adium -6358 +▁authent -6359 +▁FA -6360 +▁climb -6361 +▁pink -6362 +comes -6363 +▁Pop -6364 +▁Senior -6365 +rad -6366 +iano -6367 +▁talks -6368 +▁kill -6369 +pat -6370 +▁grew -6371 +▁Son -6372 +▁pil -6373 +hered -6374 +▁Beaut -6375 +▁root -6376 +▁san -6377 +oster -6378 +▁landscape -6379 +tle -6380 +ayer -6381 +▁figures -6382 +▁millions -6383 +ERS -6384 +ums -6385 +▁machines -6386 +▁Country -6387 +ERE -6388 +So -6389 +iece -6390 +▁Jersey -6391 +iversary -6392 +▁Run -6393 +▁Sky -6394 +orders -6395 +▁tasks -6396 +▁vital -6397 +▁reward -6398 +▁attended -6399 +ikes -6400 +▁eggs -6401 +▁tall -6402 +▁identity -6403 +▁tested -6404 +▁hits -6405 +▁PS -6406 +▁Senate -6407 +▁coc -6408 +’. -6409 +▁integrated -6410 +▁champions -6411 +▁laugh -6412 +▁herself -6413 +▁trends -6414 +▁input -6415 +▁Division -6416 +▁Disney -6417 +forcement -6418 +▁vibr -6419 +▁anx -6420 +▁council -6421 +oral -6422 +▁? -6423 +▁Shop -6424 +▁Nick -6425 +▁chapter -6426 +▁Stock -6427 +▁Ref -6428 +HS -6429 +▁shift -6430 +▁mal -6431 +▁Jenn -6432 +▁guard -6433 +▁weak -6434 +▁dram -6435 +▁wealth -6436 +▁Dog -6437 +▁historical -6438 +▁Writ -6439 +▁fishing -6440 +▁incl -6441 +▁baking -6442 +.’ -6443 +▁airport -6444 +▁Proper -6445 +▁depth -6446 +▁AD -6447 +▁museum -6448 +▁improving -6449 +▁smile -6450 +▁invited -6451 +▁arrested -6452 +izz -6453 +host -6454 +RI -6455 +▁wash -6456 +luded -6457 +rition -6458 +▁accessories -6459 +dy -6460 +▁Professor -6461 +ampion -6462 +▁Safety -6463 +▁thin -6464 +▁profit -6465 +▁ease -6466 +▁unf -6467 +▁output -6468 +▁qualified -6469 +▁Ent -6470 +▁Ford -6471 +▁residential -6472 +rate -6473 +▁Want -6474 +riends -6475 +▁rear -6476 +▁upload -6477 +▁abuse -6478 +▁Ha -6479 +▁hire -6480 +▁authorities -6481 +▁tonight -6482 +▁carbon -6483 +▁Georgia -6484 +▁certified -6485 +▁skill -6486 +▁mountain -6487 +▁Fre -6488 +▁wet -6489 +ATION -6490 +▁Sales -6491 +remony -6492 +zil -6493 +▁ordered -6494 +pret -6495 +▁Far -6496 +▁bags -6497 +▁managing -6498 +▁instance -6499 +▁km -6500 +▁destination -6501 +▁Still -6502 +▁entered -6503 +▁thorough -6504 +▁Email -6505 +iana -6506 +▁sole -6507 +▁dropped -6508 +icial -6509 +▁entirely -6510 +▁recy -6511 +▁Bul -6512 +▁institutions -6513 +iami -6514 +▁terror -6515 +▁atmosphere -6516 +▁Silver -6517 +yers -6518 +▁Further -6519 +LS -6520 +▁Supp -6521 +▁Fed -6522 +▁Systems -6523 +▁Luc -6524 +▁Space -6525 +▁closely -6526 +▁sick -6527 +▁guidance -6528 +▁photography -6529 +PC -6530 +▁Stat -6531 +▁breast -6532 +▁Zeal -6533 +▁rating -6534 +ras -6535 +▁tiny -6536 +▁description -6537 +▁Tax -6538 +▁vend -6539 +▁Members -6540 +▁fuck -6541 +▁offices -6542 +▁scientific -6543 +▁transportation -6544 +▁layer -6545 +stone -6546 +▁printed -6547 +long -6548 +De -6549 +▁frequently -6550 +▁Fac -6551 +▁Dist -6552 +▁spin -6553 +eller -6554 +igned -6555 +va -6556 +agues -6557 +▁cooper -6558 +▁entr -6559 +▁EU -6560 +▁yards -6561 +▁shower -6562 +▁searching -6563 +▁cycle -6564 +▁dental -6565 +▁loans -6566 +▁delay -6567 +▁CO -6568 +▁Phone -6569 +▁failure -6570 +▁Pract -6571 +▁kne -6572 +▁medicine -6573 +MP -6574 +▁equal -6575 +▁lessons -6576 +izza -6577 +▁unable -6578 +▁protein -6579 +adow -6580 +ogue -6581 +▁broadcast -6582 +▁founded -6583 +sen -6584 +▁Aff -6585 +▁Finally -6586 +▁cm -6587 +▁column -6588 +▁flexible -6589 +quir -6590 +▁Tech -6591 +▁operate -6592 +▁bonus -6593 +▁typical -6594 +▁compens -6595 +▁Looking -6596 +▁rail -6597 +▁taxes -6598 +aduate -6599 +▁Hou -6600 +▁glad -6601 +▁Should -6602 +▁religious -6603 +▁Never -6604 +▁sac -6605 +▁Engineering -6606 +▁situations -6607 +▁vacc -6608 +▁awarded -6609 +▁bear -6610 +▁PDF -6611 +▁Ca -6612 +▁lad -6613 +▁Ball -6614 +▁Zealand -6615 +oes -6616 +▁Put -6617 +▁eligible -6618 +quality -6619 +▁Very -6620 +▁external -6621 +▁Mach -6622 +▁historic -6623 +▁Sat -6624 +▁alongside -6625 +icket -6626 +awn -6627 +UL -6628 +▁flood -6629 +▁strategic -6630 +▁OR -6631 +▁sudden -6632 +▁unlike -6633 +▁wra -6634 +▁DVD -6635 +worth -6636 +▁assessment -6637 +▁filed -6638 +▁Smart -6639 +osoph -6640 +ilst -6641 +▁networks -6642 +▁seriously -6643 +▁Sus -6644 +▁creates -6645 +▁workshop -6646 +Is -6647 +?" -6648 +umps -6649 +▁worst -6650 +▁rental -6651 +▁Unfortunately -6652 +xx -6653 +▁BE -6654 +▁Charles -6655 +▁transition -6656 +uting -6657 +▁fighting -6658 +▁critic -6659 +▁river -6660 +nam -6661 +▁membership -6662 +ircle -6663 +▁Mountain -6664 +oker -6665 +▁believes -6666 +asters -6667 +bi -6668 +▁platforms -6669 +omy -6670 +▁none -6671 +friendly -6672 +▁availability -6673 +▁attacks -6674 +▁versions -6675 +▁vul -6676 +▁Foot -6677 +▁tracks -6678 +class -6679 +uling -6680 +▁distinct -6681 +erman -6682 +▁younger -6683 +▁Es -6684 +tain -6685 +▁listening -6686 +osite -6687 +▁Fox -6688 +plate -6689 +▁faculty -6690 +▁motion -6691 +aturally -6692 +▁Ask -6693 +▁contribute -6694 +▁hasn -6695 +arrow -6696 +inos -6697 +!" -6698 +▁Professional -6699 +▁juice -6700 +II -6701 +▁proven -6702 +eding -6703 +▁Pacific -6704 +One -6705 +▁hopes -6706 +▁bab -6707 +onto -6708 +star -6709 +aze -6710 +With -6711 +▁joining -6712 +▁letters -6713 +irts -6714 +ucky -6715 +▁risks -6716 +▁performing -6717 +active -6718 +▁Ray -6719 +▁streets -6720 +car -6721 +▁soph -6722 +▁Ariz -6723 +ounter -6724 +you -6725 +▁developers -6726 +▁SC -6727 +▁conver -6728 +▁obl -6729 +▁cups -6730 +▁pounds -6731 +neys -6732 +Fi -6733 +▁cos -6734 +▁recording -6735 +▁Term -6736 +▁tip -6737 +ati -6738 +▁Tele -6739 +zer -6740 +▁Harr -6741 +▁Easy -6742 +▁lucky -6743 +▁Kent -6744 +▁informed -6745 +oured -6746 +▁choosing -6747 +▁surprised -6748 +ented -6749 +▁grass -6750 +▁facilit -6751 +▁meals -6752 +)| -6753 +▁mortgage -6754 +nic -6755 +▁Phys -6756 +obby -6757 +▁infect -6758 +▁capture -6759 +▁liquid -6760 +ican -6761 +▁banks -6762 +▁diss -6763 +▁tournament -6764 +▁PA -6765 +agon -6766 +▁Leg -6767 +▁kit -6768 +▁Fall -6769 +amps -6770 +▁LLC -6771 +▁anticip -6772 +elry -6773 +▁papers -6774 +▁Field -6775 +▁savings -6776 +earing -6777 +At -6778 +▁privacy -6779 +cers -6780 +▁discip -6781 +To -6782 +pons -6783 +uine -6784 +▁Event -6785 +aping -6786 +▁hurt -6787 +born -6788 +▁rein -6789 +▁regulations -6790 +▁Ram -6791 +▁Mom -6792 +▁Broad -6793 +▁inch -6794 +▁decade -6795 +ashed -6796 +law -6797 +ially -6798 +▁charm -6799 +▁Taylor -6800 +▁submitted -6801 +rency -6802 +celer -6803 +▁Kat -6804 +etic -6805 +▁arg -6806 +▁west -6807 +▁Northern -6808 +▁Ter -6809 +▁blend -6810 +▁ille -6811 +Le -6812 +▁reputation -6813 +▁LED -6814 +▁bat -6815 +Se -6816 +▁Po -6817 +▁suggested -6818 +▁monitor -6819 +▁hall -6820 +▁proceed -6821 +▁liked -6822 +▁relief -6823 +▁organized -6824 +▁filter -6825 +▁shops -6826 +▁domain -6827 +▁consequ -6828 +▁mic -6829 +▁Lind -6830 +▁belief -6831 +▁sight -6832 +▁engagement -6833 +entle -6834 +▁Cut -6835 +▁Source -6836 +▁Miami -6837 +bury -6838 +▁extract -6839 +▁pulled -6840 +Read -6841 +▁Radio -6842 +▁Come -6843 +▁Credit -6844 +▁gorgeous -6845 +days -6846 +▁justice -6847 +uter -6848 +pes -6849 +▁Cab -6850 +▁drawing -6851 +▁Sea -6852 +▁negoti -6853 +▁circumstances -6854 +▁capable -6855 +▁quote -6856 +▁Arab -6857 +▁) -6858 +▁tank -6859 +▁monitoring -6860 +ava -6861 +▁empt -6862 +▁crucial -6863 +rell -6864 +▁Think -6865 +▁legs -6866 +▁Order -6867 +▁portfolio -6868 +▁Bible -6869 +▁sky -6870 +bing -6871 +ulf -6872 +ographic -6873 +▁hate -6874 +▁immediate -6875 +▁increases -6876 +▁ads -6877 +▁arrive -6878 +▁exhibition -6879 +▁stir -6880 +▁Ms -6881 +bar -6882 +▁believed -6883 +foot -6884 +▁penal -6885 +▁moves -6886 +▁Insurance -6887 +▁linked -6888 +ta -6889 +athan -6890 +▁Continue -6891 +▁counsel -6892 +▁relatively -6893 +▁treatments -6894 +▁faces -6895 +▁attached -6896 +▁Pak -6897 +▁manual -6898 +faction -6899 +▁soil -6900 +▁crack -6901 +▁adm -6902 +▁defend -6903 +illiant -6904 +uis -6905 +▁mm -6906 +▁jun -6907 +ura -6908 +▁Mir -6909 +▁planet -6910 +resents -6911 +bles -6912 +Ad -6913 +▁technique -6914 +cknow -6915 +▁concert -6916 +▁enjoying -6917 +rowse -6918 +▁guidelines -6919 +▁listing -6920 +esides -6921 +▁directed -6922 +▁interface -6923 +▁injured -6924 +arters -6925 +▁vast -6926 +▁hosted -6927 +▁execut -6928 +▁dent -6929 +▁LA -6930 +▁ast -6931 +▁Conf -6932 +▁Rod -6933 +▁spark -6934 +▁garage -6935 +▁authors -6936 +▁hospit -6937 +▁memories -6938 +uration -6939 +rich -6940 +▁contrast -6941 +▁aside -6942 +▁volunteers -6943 +▁equipped -6944 +sey -6945 +▁Ron -6946 +ardens -6947 +▁Ur -6948 +▁normally -6949 +ppy -6950 +▁estimated -6951 +▁:) -6952 +▁promise -6953 +▁firms -6954 +▁Republican -6955 +▁dreams -6956 +▁Happy -6957 +▁Pow -6958 +onym -6959 +▁Jac -6960 +▁warn -6961 +▁trig -6962 +▁pin -6963 +hot -6964 +▁trick -6965 +▁phase -6966 +▁depress -6967 +▁rice -6968 +▁Remember -6969 +▁urban -6970 +▁illness -6971 +By -6972 +▁Being -6973 +▁Quality -6974 +iger -6975 +▁agents -6976 +▁Justice -6977 +▁acid -6978 +▁prove -6979 +ba -6980 +▁consistent -6981 +oty -6982 +▁dust -6983 +▁spoke -6984 +▁Airport -6985 +▁Houston -6986 +▁pitch -6987 +▁Bed -6988 +▁organis -6989 +▁pleasure -6990 +▁arms -6991 +holders -6992 +aints -6993 +▁matches -6994 +▁Medicine -6995 +AA -6996 +ults -6997 +Bl -6998 +%. -6999 +▁Ide -7000 +▁Talk -7001 +▁portion -7002 +▁Conc -7003 +▁index -7004 +▁Line -7005 +▁chances -7006 +ogether -7007 +▁Brazil -7008 +asant -7009 +▁fasc -7010 +▁Fact -7011 +.' -7012 +icit -7013 +▁lapt -7014 +▁newly -7015 +▁chose -7016 +▁Personal -7017 +▁objects -7018 +▁Carl -7019 +▁dynamic -7020 +ensity -7021 +▁breath -7022 +▁finance -7023 +rm -7024 +▁Arizona -7025 +▁refund -7026 +▁Asian -7027 +▁Living -7028 +▁Standard -7029 +▁Prom -7030 +▁proof -7031 +▁seed -7032 +SC -7033 +eling -7034 +▁passing -7035 +▁continuing -7036 +But -7037 +▁visited -7038 +▁represents -7039 +▁Officer -7040 +▁drinking -7041 +▁Give -7042 +site -7043 +ership -7044 +▁iPad -7045 +cket -7046 +▁formed -7047 +▁storm -7048 +▁ultimate -7049 +▁mile -7050 +pack -7051 +inois -7052 +alle -7053 +▁Brad -7054 +▁Mill -7055 +▁roles -7056 +▁border -7057 +▁Estate -7058 +▁forever -7059 +▁MO -7060 +▁discussed -7061 +▁superv -7062 +▁ceremony -7063 +▁Cru -7064 +annels -7065 +▁approval -7066 +iking -7067 +▁Las -7068 +▁zone -7069 +amber -7070 +▁Welcome -7071 +▁Army -7072 +▁Season -7073 +▁Student -7074 +▁id -7075 +▁suc -7076 +she -7077 +▁stim -7078 +▁exposure -7079 +▁recommendations -7080 +adel -7081 +▁gaming -7082 +▁dealing -7083 +stal -7084 +▁sending -7085 +ultural -7086 +▁Oak -7087 +▁Iran -7088 +▁stake -7089 +▁evol -7090 +▁Therefore -7091 +▁phones -7092 +MC -7093 +anes -7094 +▁Sav -7095 +▁Kevin -7096 +▁capabilities -7097 +▁teasp -7098 +▁division -7099 +▁gallery -7100 +▁Webs -7101 +uclear -7102 +Americ -7103 +whel -7104 +amsung -7105 +▁boxes -7106 +▁downtown -7107 +▁saving -7108 +▁presents -7109 +▁collected -7110 +▁holidays -7111 +respond -7112 +▁lawyer -7113 +▁possibility -7114 +▁fairly -7115 +▁Again -7116 +▁implementation -7117 +iki -7118 +▁vulner -7119 +▁pra -7120 +ainless -7121 +▁mand -7122 +▁susp -7123 +▁hat -7124 +GA -7125 +ja -7126 +▁ensuring -7127 +▁Choose -7128 +▁permanent -7129 +aper -7130 +▁attractive -7131 +▁pharm -7132 +▁smell -7133 +▁cookies -7134 +▁Administration -7135 +▁constit -7136 +▁flash -7137 +▁Site -7138 +▁industries -7139 +ih -7140 +▁tub -7141 +▁hidden -7142 +▁suggestions -7143 +▁scheme -7144 +aste -7145 +bro -7146 +▁trib -7147 +▁finds -7148 +lers -7149 +▁Experience -7150 +izer -7151 +▁porn -7152 +▁Natural -7153 +▁Brian -7154 +ione -7155 +wear -7156 +urse -7157 +▁recognize -7158 +▁Express -7159 +RS -7160 +▁Kenn -7161 +▁instrument -7162 +missions -7163 +▁facts -7164 +phy -7165 +▁Ju -7166 +▁theory -7167 +▁heads -7168 +▁vari -7169 +pot -7170 +▁priority -7171 +▁mainly -7172 +▁acknow -7173 +zes -7174 +▁($ -7175 +lessly -7176 +▁Meanwhile -7177 +Sc -7178 +▁legislation -7179 +ffered -7180 +rible -7181 +▁reader -7182 +▁Clin -7183 +▁Ros -7184 +▁Isl -7185 +▁bodies -7186 +▁Case -7187 +FA -7188 +▁butt -7189 +▁liber -7190 +▁categories -7191 +▁Chall -7192 +▁posting -7193 +▁realized -7194 +▁mut -7195 +▁Hollywood -7196 +anned -7197 +page -7198 +inson -7199 +▁Software -7200 +▁communications -7201 +▁Vers -7202 +▁Ba -7203 +▁solve -7204 +▁Own -7205 +▁bench -7206 +▁personally -7207 +▁Dun -7208 +▁garlic -7209 +▁Secretary -7210 +▁upgrade -7211 +da -7212 +▁bars -7213 +allas -7214 +▁Queen -7215 +boy -7216 +▁bridge -7217 +phones -7218 +▁Emer -7219 +Book -7220 +EA -7221 +▁Stay -7222 +▁incredibly -7223 +▁USB -7224 +then -7225 +▁ancient -7226 +▁Learning -7227 +▁Policy -7228 +CT -7229 +▁Create -7230 +▁reform -7231 +▁tradition -7232 +esy -7233 +▁|| -7234 +▁permission -7235 +▁hole -7236 +▁Bang -7237 +stra -7238 +ingu -7239 +▁tiss -7240 +osc -7241 +▁Prime -7242 +▁Anal -7243 +▁generate -7244 +▁Yet -7245 +odd -7246 +anny -7247 +ounce -7248 +▁Cand -7249 +▁exec -7250 +▁CN -7251 +▁copyright -7252 +▁packages -7253 +▁calendar -7254 +▁rum -7255 +odge -7256 +▁handling -7257 +tw -7258 +ials -7259 +▁substant -7260 +▁travell -7261 +▁pace -7262 +▁basketball -7263 +▁east -7264 +▁magic -7265 +▁Hold -7266 +▁debate -7267 +parent -7268 +OO -7269 +▁victims -7270 +▁raw -7271 +▁claimed -7272 +▁Level -7273 +That -7274 +▁Additionally -7275 +iti -7276 +▁celebration -7277 +▁clar -7278 +▁walked -7279 +▁orange -7280 +▁programming -7281 +▁Jr -7282 +▁doctors -7283 +▁MD -7284 +HA -7285 +ulpt -7286 +▁achieved -7287 +▁fest -7288 +▁giant -7289 +▁cotton -7290 +▁Toronto -7291 +▁absor -7292 +▁forth -7293 +▁purchasing -7294 +▁habit -7295 +onna -7296 +▁prospect -7297 +▁replaced -7298 +▁Cro -7299 +▁Stan -7300 +▁bare -7301 +▁Film -7302 +burgh -7303 +▁fifth -7304 +▁explains -7305 +uls -7306 +▁tooth -7307 +▁Illinois -7308 +▁desired -7309 +▁Studies -7310 +level -7311 +CD -7312 +zing -7313 +isa -7314 +▁king -7315 +▁Tool -7316 +▁manufacturers -7317 +▁spots -7318 +▁titles -7319 +▁gym -7320 +▁saved -7321 +▁Dar -7322 +▁seasons -7323 +▁cuts -7324 +season -7325 +▁somewhere -7326 +▁marked -7327 +▁Auto -7328 +▁proposal -7329 +▁Consult -7330 +▁insight -7331 +▁marks -7332 +▁hotels -7333 +▁initiative -7334 +uster -7335 +▁feelings -7336 +▁venue -7337 +▁slowly -7338 +RL -7339 +▁singer -7340 +▁specialist -7341 +▁suffering -7342 +▁Produ -7343 +▁Catholic -7344 +ila -7345 +▁NFL -7346 +▁expressed -7347 +▁Story -7348 +▁Capital -7349 +▁compat -7350 +▁requests -7351 +▁Irish -7352 +▁drinks -7353 +▁Material -7354 +imize -7355 +▁architecture -7356 +App -7357 +iot -7358 +▁vegetables -7359 +▁Save -7360 +▁Sep -7361 +aron -7362 +▁Agency -7363 +igate -7364 +esh -7365 +▁buyers -7366 +acon -7367 +aters -7368 +▁Joseph -7369 +▁merch -7370 +▁volunteer -7371 +▁gay -7372 +▁exceptional -7373 +▁impossible -7374 +▁stuck -7375 +▁Liber -7376 +▁Table -7377 +▁meets -7378 +▁enables -7379 +▁swimming -7380 +stream -7381 +▁combine -7382 +inton -7383 +▁murder -7384 +▁broke -7385 +bridge -7386 +▁publication -7387 +▁announcement -7388 +▁destroy -7389 +▁tie -7390 +▁extension -7391 +ylvan -7392 +▁causing -7393 +▁ultimately -7394 +▁enem -7395 +VER -7396 +▁consultation -7397 +▁encouraged -7398 +▁reducing -7399 +▁muscle -7400 +▁err -7401 +▁accomplish -7402 +▁Pakistan -7403 +▁Mess -7404 +regon -7405 +nesota -7406 +▁split -7407 +ologist -7408 +▁packaging -7409 +▁yard -7410 +▁surprising -7411 +▁Mix -7412 +▁lets -7413 +▁Pu -7414 +▁publ -7415 +▁Bell -7416 +ickets -7417 +▁magn -7418 +aid -7419 +▁Short -7420 +▁Vegas -7421 +▁Map -7422 +▁actor -7423 +▁rig -7424 +▁printing -7425 +▁Would -7426 +▁enterprise -7427 +▁engaged -7428 +▁Autom -7429 +▁pit -7430 +lements -7431 +▁describe -7432 +▁Camer -7433 +▁heav -7434 +▁massage -7435 +▁pricing -7436 +run -7437 +▁DI -7438 +bel -7439 +apore -7440 +des -7441 +aska -7442 +▁Motor -7443 +▁electrical -7444 +▁noise -7445 +▁mood -7446 +▁Location -7447 +▁widely -7448 +▁preparation -7449 +▁Kids -7450 +ifer -7451 +▁seeds -7452 +▁reasonable -7453 +▁talked -7454 +▁Pen -7455 +▁enroll -7456 +▁blocks -7457 +▁covering -7458 +▁performances -7459 +▁Labor -7460 +ns -7461 +▁Spain -7462 +▁breaking -7463 +▁expansion -7464 +bell -7465 +▁recognition -7466 +▁pill -7467 +olis -7468 +▁default -7469 +▁framework -7470 +eah -7471 +▁wins -7472 +▁Recent -7473 +▁genuine -7474 +▁overwhel -7475 +▁traveling -7476 +▁remark -7477 +▁blank -7478 +▁Forest -7479 +▁seats -7480 +rage -7481 +▁classroom -7482 +RC -7483 +▁agric -7484 +wan -7485 +▁knock -7486 +inator -7487 +cons -7488 +▁Ira -7489 +▁interactive -7490 +uct -7491 +▁concrete -7492 +▁neighb -7493 +▁Theatre -7494 +▁Ess -7495 +▁CB -7496 +iler -7497 +▁Adam -7498 +▁unw -7499 +▁pand -7500 +▁Gallery -7501 +)|| -7502 +▁Studio -7503 +▁birds -7504 +▁formal -7505 +▁Force -7506 +▁Pin -7507 +▁compr -7508 +▁dishes -7509 +▁Band -7510 +wich -7511 +▁Memorial -7512 +▁writers -7513 +▁Ice -7514 +▁franch -7515 +▁resistance -7516 +▁Following -7517 +▁gall -7518 +▁empty -7519 +▁Rs -7520 +▁Toy -7521 +gypt -7522 +▁brilliant -7523 +▁spray -7524 +▁consists -7525 +▁constant -7526 +ulum -7527 +▁scenes -7528 +▁increasingly -7529 +▁staying -7530 +▁compliance -7531 +proof -7532 +▁Square -7533 +▁incorpor -7534 +▁Mrs -7535 +▁resulting -7536 +▁acting -7537 +▁Davis -7538 +▁Annual -7539 +EP -7540 +▁duty -7541 +▁suggests -7542 +▁pic -7543 +▁dad -7544 +▁recover -7545 +ludes -7546 +▁managers -7547 +▁Fred -7548 +▁Member -7549 +▁experiment -7550 +nda -7551 +▁Treat -7552 +▁basically -7553 +▁spiritual -7554 +ateful -7555 +axy -7556 +ding -7557 +▁Things -7558 +▁professor -7559 +ifies -7560 +▁anyway -7561 +▁bow -7562 +▁Diego -7563 +▁nights -7564 +▁Paper -7565 +▁Mah -7566 +being -7567 +▁Spirit -7568 +▁mere -7569 +child -7570 +▁Eric -7571 +books -7572 +▁FL -7573 +leep -7574 +▁graphics -7575 +otted -7576 +▁Dam -7577 +▁lists -7578 +▁Partners -7579 +▁Jord -7580 +▁forecast -7581 +▁slic -7582 +▁slot -7583 +▁Solutions -7584 +▁scan -7585 +▁pride -7586 +▁deck -7587 +▁Samsung -7588 +▁Roman -7589 +abetes -7590 +’, -7591 +▁prize -7592 +▁authority -7593 +▁Shipping -7594 +▁producing -7595 +▁Ly -7596 +rated -7597 +▁Interest -7598 +ilton -7599 +alo -7600 +▁centers -7601 +▁clicking -7602 +▁Seattle -7603 +irus -7604 +▁Model -7605 +▁packed -7606 +una -7607 +▁wireless -7608 +▁Gro -7609 +erate -7610 +alse -7611 +▁Books -7612 +▁everywhere -7613 +▁aims -7614 +ghan -7615 +▁legend -7616 +acle -7617 +▁Golden -7618 +▁Minnesota -7619 +▁enthusi -7620 +ashes -7621 +▁whenever -7622 +▁expenses -7623 +vas -7624 +▁Pur -7625 +▁Age -7626 +▁indeed -7627 +▁healing -7628 +▁Limited -7629 +utional -7630 +▁interpret -7631 +▁closing -7632 +▁Cover -7633 +▁talented -7634 +▁singles -7635 +▁anniversary -7636 +▁succeed -7637 +▁inner -7638 +inding -7639 +▁Lew -7640 +making -7641 +▁involves -7642 +rome -7643 +▁Swed -7644 +▁pocket -7645 +ls -7646 +▁riding -7647 +▁unex -7648 +▁connections -7649 +▁Sound -7650 +▁GM -7651 +heast -7652 +▁channels -7653 +▁obtained -7654 +pends -7655 +▁narr -7656 +▁founder -7657 +▁vice -7658 +▁OK -7659 +ylvania -7660 +▁Magazine -7661 +▁Perhaps -7662 +▁displayed -7663 +▁Customer -7664 +▁Dream -7665 +▁bunch -7666 +▁assum -7667 +▁Total -7668 +▁opens -7669 +greg -7670 +▁Collection -7671 +▁delivering -7672 +▁Month -7673 +▁Bad -7674 +▁Dallas -7675 +▁designers -7676 +▁struggle -7677 +ureau -7678 +▁lemon -7679 +Press -7680 +▁trips -7681 +▁Based -7682 +▁Steel -7683 +▁attrib -7684 +▁differences -7685 +stein -7686 +▁acts -7687 +▁ending -7688 +▁Working -7689 +▁driven -7690 +▁Pict -7691 +lder -7692 +abeth -7693 +▁CP -7694 +nders -7695 +▁Station -7696 +ronics -7697 +▁defined -7698 +▁Mother -7699 +▁watched -7700 +▁complim -7701 +▁improvements -7702 +▁mob -7703 +▁Cloud -7704 +▁primarily -7705 +coin -7706 +▁CL -7707 +▁loving -7708 +▁vintage -7709 +bits -7710 +▁Action -7711 +▁gender -7712 +▁boss -7713 +sters -7714 +▁guaranteed -7715 +▁introduction -7716 +▁Rub -7717 +▁Oregon -7718 +▁booking -7719 +▁Dark -7720 +ambling -7721 +▁returning -7722 +▁Rand -7723 +oom -7724 +▁Sym -7725 +▁sensitive -7726 +▁fits -7727 +▁shouldn -7728 +▁Eastern -7729 +▁SS -7730 +▁podcast -7731 +Fr -7732 +▁apparently -7733 +▁Everyone -7734 +▁Anth -7735 +▁Base -7736 +▁politics -7737 +owa -7738 +▁officially -7739 +pool -7740 +issions -7741 +▁precise -7742 +oned -7743 +▁Common -7744 +▁rug -7745 +▁Products -7746 +rive -7747 +▁alive -7748 +▁headed -7749 +▁Bru -7750 +▁Return -7751 +AB -7752 +▁chopped -7753 +su -7754 +▁Miller -7755 +iders -7756 +▁fing -7757 +▁unus -7758 +▁Jay -7759 +▁Spec -7760 +▁Blog -7761 +▁coat -7762 +▁Change -7763 +▁narrow -7764 +▁highlights -7765 +▁protest -7766 +▁trim -7767 +▁recre -7768 +AND -7769 +▁potentially -7770 +▁honey -7771 +▁shell -7772 +▁Transport -7773 +ailing -7774 +▁percentage -7775 +▁authentic -7776 +▁Austin -7777 +▁filling -7778 +▁tape -7779 +▁maintaining -7780 +▁lin -7781 +▁Capt -7782 +▁analyst -7783 +▁retirement -7784 +▁Cry -7785 +▁casual -7786 +▁speaker -7787 +▁crash -7788 +pson -7789 +atics -7790 +riers -7791 +▁Among -7792 +▁assistant -7793 +▁charity -7794 +▁personality -7795 +▁Corporation -7796 +wart -7797 +▁acquis -7798 +▁scientists -7799 +jo -7800 +▁Kingdom -7801 +▁resident -7802 +▁Guard -7803 +▁falling -7804 +inent -7805 +lose -7806 +scribe -7807 +raid -7808 +▁plot -7809 +▁DO -7810 +▁elev -7811 +▁Iraq -7812 +pection -7813 +iac -7814 +▁bills -7815 +▁opinions -7816 +onut -7817 +▁Josh -7818 +▁Barb -7819 +▁strike -7820 +▁licensed -7821 +▁aircraft -7822 +▁heading -7823 +ali -7824 +▁CR -7825 +▁Nic -7826 +▁naturally -7827 +▁Dead -7828 +acher -7829 +raction -7830 +▁consumption -7831 +ydney -7832 +▁renov -7833 +▁Sarah -7834 +▁carrying -7835 +▁tired -7836 +▁gentle -7837 +arliam -7838 +▁colours -7839 +Cont -7840 +▁Jewish -7841 +▁Egypt -7842 +▁correspond -7843 +▁obviously -7844 +▁functional -7845 +▁preparing -7846 +asted -7847 +▁involving -7848 +▁suffered -7849 +▁Stephen -7850 +▁iOS -7851 +ilipp -7852 +▁oz -7853 +▁Ap -7854 +▁smartphone -7855 +▁NC -7856 +▁Ven -7857 +▁tap -7858 +teen -7859 +▁intent -7860 +▁winners -7861 +osophy -7862 +▁stronger -7863 +▁controls -7864 +athy -7865 +▁instant -7866 +▁Singapore -7867 +▁CON -7868 +ockey -7869 +▁satisfaction -7870 +oices -7871 +▁communicate -7872 +▁Jason -7873 +▁aggress -7874 +▁integration -7875 +▁Golf -7876 +▁attending -7877 +ilty -7878 +▁colleagues -7879 +hour -7880 +phia -7881 +▁lect -7882 +▁statements -7883 +▁Dave -7884 +▁racing -7885 +ador -7886 +▁Jam -7887 +tics -7888 +anchester -7889 +▁rising -7890 +▁finger -7891 +▁component -7892 +epend -7893 +▁minimal -7894 +▁gained -7895 +▁tracking -7896 +▁ski -7897 +uma -7898 +▁Weight -7899 +▁Atlanta -7900 +▁undert -7901 +iley -7902 +▁Website -7903 +terior -7904 +▁Creek -7905 +▁reduction -7906 +▁Past -7907 +▁behalf -7908 +vard -7909 +▁trailer -7910 +▁bid -7911 +unes -7912 +arliament -7913 +icted -7914 +Man -7915 +rovers -7916 +tery -7917 +lee -7918 +etch -7919 +▁checking -7920 +▁Vice -7921 +▁principles -7922 +▁superior -7923 +▁dollar -7924 +▁signing -7925 +▁preferred -7926 +▁focuses -7927 +▁illegal -7928 +▁findings -7929 +▁displ -7930 +▁tone -7931 +▁deposit -7932 +sylvania -7933 +▁Friends -7934 +▁ED -7935 +inally -7936 +avor -7937 +▁Rog -7938 +adelphia -7939 +▁Lic -7940 +lace -7941 +▁Bit -7942 +▁producer -7943 +ocation -7944 +▁Britain -7945 +▁batt -7946 +▁Korea -7947 +ression -7948 +ucks -7949 +▁enforcement -7950 +intend -7951 +▁heating -7952 +bow -7953 +▁Image -7954 +▁intelligence -7955 +fall -7956 +▁clubs -7957 +aka -7958 +inals -7959 +amel -7960 +▁Pick -7961 +▁regardless -7962 +▁assault -7963 +▁humans -7964 +▁Tony -7965 +▁emails -7966 +▁divor -7967 +limited -7968 +▁Kansas -7969 +▁participating -7970 +▁Columbia -7971 +brid -7972 +▁removal -7973 +▁newspaper -7974 +haust -7975 +▁; -7976 +iously -7977 +vi -7978 +gent -7979 +▁till -7980 +▁Marc -7981 +▁Uk -7982 +▁array -7983 +---- -7984 +▁Bridge -7985 +htt -7986 +aine -7987 +makers -7988 +zzle -7989 +▁anymore -7990 +▁yield -7991 +▁Winter -7992 +cker -7993 +▁flower -7994 +▁lifetime -7995 +▁acceler -7996 +▁Has -7997 +▁Below -7998 +▁lesson -7999 +UM -8000 +ints -8001 +▁Hi -8002 +roke -8003 +▁compare -8004 +▁roads -8005 +olit -8006 +▁usage -8007 +uten -8008 +▁loose -8009 +▁Hun -8010 +▁Sem -8011 +▁entering -8012 +▁ham -8013 +▁jewelry -8014 +▁Eliz -8015 +▁Quick -8016 +pir -8017 +▁faced -8018 +▁stations -8019 +ventional -8020 +▁Flow -8021 +▁stable -8022 +▁passionate -8023 +▁divid -8024 +ho -8025 +AV -8026 +▁conclus -8027 +giving -8028 +▁Village -8029 +▁intense -8030 +▁CC -8031 +ran -8032 +▁ocean -8033 +tail -8034 +mers -8035 +▁Front -8036 +etary -8037 +▁Resources -8038 +▁expression -8039 +▁Lady -8040 +▁introduce -8041 +▁drama -8042 +▁poet -8043 +▁recip -8044 +▁rural -8045 +▁diseases -8046 +▁Break -8047 +▁pandemic -8048 +onic -8049 +▁occurred -8050 +swe -8051 +▁somewhat -8052 +▁string -8053 +orous -8054 +▁Kitchen -8055 +pending -8056 +▁exception -8057 +▁warranty -8058 +▁NEW -8059 +aza -8060 +▁lie -8061 +▁Tenn -8062 +▁Henry -8063 +▁sharp -8064 +▁olive -8065 +▁careful -8066 +▁strange -8067 +▁workshops -8068 +▁Excell -8069 +▁oppon -8070 +▁viewing -8071 +▁damaged -8072 +▁baseball -8073 +▁BB -8074 +▁compete -8075 +▁EN -8076 +▁strict -8077 +▁Was -8078 +▁factory -8079 +▁solo -8080 +▁literally -8081 +oked -8082 +▁elegant -8083 +▁computers -8084 +▁creativity -8085 +▁whilst -8086 +▁visible -8087 +itled -8088 +winning -8089 +▁shoulder -8090 +▁screens -8091 +▁era -8092 +▁association -8093 +▁Bol -8094 +EST -8095 +▁EX -8096 +▁Sale -8097 +▁referred -8098 +▁photographs -8099 +oween -8100 +▁Lar -8101 +Up -8102 +▁Due -8103 +▁amounts -8104 +▁exceed -8105 +acular -8106 +▁DJ -8107 +▁visits -8108 +▁Baby -8109 +▁Without -8110 +▁attach -8111 +▁Impro -8112 +▁Wilson -8113 +liers -8114 +▁conflict -8115 +▁flying -8116 +inity -8117 +▁SM -8118 +▁Ministry -8119 +▁stead -8120 +▁limits -8121 +EG -8122 +▁depends -8123 +▁je -8124 +▁lady -8125 +walk -8126 +▁withd -8127 +acc -8128 +▁twist -8129 +▁Process -8130 +▁Body -8131 +▁fraud -8132 +cycle -8133 +▁Deb -8134 +from -8135 +▁AB -8136 +▁Edition -8137 +▁SEO -8138 +▁smoke -8139 +that -8140 +▁Pennsylvania -8141 +▁Steven -8142 +▁Sug -8143 +▁Creative -8144 +▁artwork -8145 +▁chips -8146 +▁engaging -8147 +▁phr -8148 +▁Grow -8149 +▁drawn -8150 +▁signature -8151 +▁coaching -8152 +Do -8153 +▁flexibility -8154 +riculum -8155 +▁developer -8156 +▁bact -8157 +▁samples -8158 +▁resort -8159 +▁newsletter -8160 +▁minister -8161 +▁Sydney -8162 +achelor -8163 +▁Jordan -8164 +icide -8165 +usive -8166 +▁Bry -8167 +▁Stone -8168 +▁Single -8169 +umer -8170 +▁golden -8171 +▁speakers -8172 +▁Money -8173 +▁regions -8174 +▁Forum -8175 +▁Must -8176 +▁accuracy -8177 +▁invite -8178 +▁puts -8179 +▁setup -8180 +▁NAS -8181 +▁Perfect -8182 +pton -8183 +▁! -8184 +▁accused -8185 +▁fault -8186 +▁Fast -8187 +▁persons -8188 +yes -8189 +lier -8190 +awa -8191 +▁Mu -8192 +unicip -8193 +▁decent -8194 +▁Tru -8195 +▁consideration -8196 +▁Eth -8197 +▁Bern -8198 +▁Holy -8199 +▁weren -8200 +▁anxiety -8201 +▁sees -8202 +▁electricity -8203 +▁Sche -8204 +▁shirt -8205 +obe -8206 +▁Kelly -8207 +▁bold -8208 +ubs -8209 +Te -8210 +▁participation -8211 +▁discl -8212 +▁Staff -8213 +▁farmers -8214 +▁Bush -8215 +atile -8216 +▁wondering -8217 +▁focusing -8218 +▁Supreme -8219 +▁Hills -8220 +▁Ave -8221 +▁temperatures -8222 +▁necessarily -8223 +PD -8224 +anted -8225 +▁Greg -8226 +month -8227 +▁grateful -8228 +▁interviews -8229 +▁subsequ -8230 +found -8231 +▁contributions -8232 +▁cabin -8233 +▁virus -8234 +▁insights -8235 +▁Manchester -8236 +▁Philadelphia -8237 +anguage -8238 +uminum -8239 +mates -8240 +onsin -8241 +▁yoga -8242 +..... -8243 +She -8244 +▁Histor -8245 +▁boards -8246 +▁applying -8247 +▁fundra -8248 +▁NO -8249 +▁rely -8250 +▁afraid -8251 +▁lip -8252 +▁loyal -8253 +▁sculpt -8254 +isconsin -8255 +▁HERE -8256 +▁Harry -8257 +▁Father -8258 +▁texture -8259 +otive -8260 +mble -8261 +▁tor -8262 +▁Kind -8263 +▁reaction -8264 +▁Private -8265 +▁Details -8266 +▁seam -8267 +ifting -8268 +▁childhood -8269 +▁stages -8270 +▁controlled -8271 +style -8272 +▁structures -8273 +▁raising -8274 +▁recon -8275 +▁hung -8276 +▁cameras -8277 +▁Hay -8278 +▁brush -8279 +▁vill -8280 +▁lies -8281 +▁compensation -8282 +Me -8283 +▁phen -8284 +▁wanting -8285 +▁wake -8286 +▁Reading -8287 +▁blind -8288 +bourne -8289 +▁Nut -8290 +athered -8291 +▁promoting -8292 +▁conserv -8293 +▁ear -8294 +▁Scotland -8295 +▁weapons -8296 +acious -8297 +▁personnel -8298 +TER -8299 +oted -8300 +▁struggling -8301 +▁checked -8302 +oken -8303 +▁unknown -8304 +▁generated -8305 +▁Eag -8306 +iary -8307 +▁writes -8308 +▁cul -8309 +claim -8310 +onder -8311 +▁zero -8312 +▁synt -8313 +▁param -8314 +▁Comb -8315 +▁Flo -8316 +▁clock -8317 +ista -8318 +quis -8319 +▁Tel -8320 +▁pig -8321 +▁Him -8322 +▁sour -8323 +step -8324 +▁tweet -8325 +▁Corn -8326 +▁Everything -8327 +including -8328 +▁panels -8329 +▁tied -8330 +▁arrival -8331 +▁Comments -8332 +▁Explore -8333 +▁commonly -8334 +▁reveal -8335 +▁Install -8336 +▁mold -8337 +▁yours -8338 +▁Fab -8339 +▁signal -8340 +▁Trade -8341 +▁graphic -8342 +▁semi -8343 +CO -8344 +▁Modern -8345 +▁convenience -8346 +▁champion -8347 +▁Civil -8348 +▁myst -8349 +▁recruit -8350 +▁territ -8351 +▁teaspoon -8352 +▁mac -8353 +▁promotion -8354 +high -8355 +▁vess -8356 +▁Matthew -8357 +▁Ever -8358 +date -8359 +▁ly -8360 +▁Attorney -8361 +▁chest -8362 +▁Core -8363 +▁opposite -8364 +aire -8365 +gon -8366 +▁laptop -8367 +▁wave -8368 +▁controvers -8369 +▁Exp -8370 +▁Regional -8371 +Sp -8372 +▁Appro -8373 +▁warning -8374 +chers -8375 +▁comparison -8376 +wr -8377 +▁exposed -8378 +▁entrance -8379 +▁Iowa -8380 +▁lake -8381 +▁harder -8382 +▁PH -8383 +elled -8384 +▁CS -8385 +ulating -8386 +▁borrow -8387 +▁SE -8388 +▁Protection -8389 +obody -8390 +▁exploring -8391 +▁Arm -8392 +▁Muslim -8393 +▁elected -8394 +▁Dim -8395 +mun -8396 +▁criteria -8397 +▁Sport -8398 +oof -8399 +sole -8400 +▁wooden -8401 +▁gotten -8402 +▁codes -8403 +▁Victoria -8404 +▁donation -8405 +▁remained -8406 +▁flag -8407 +▁Poly -8408 +'. -8409 +▁scoring -8410 +oda -8411 +▁escape -8412 +vironments -8413 +stract -8414 +▁laid -8415 +▁languages -8416 +▁Prim -8417 +edge -8418 +▁vine -8419 +och -8420 +sex -8421 +▁wal -8422 +eb -8423 +▁nuclear -8424 +▁hanging -8425 +▁subscription -8426 +ervations -8427 +▁asset -8428 +ellect -8429 +▁outcome -8430 +▁Patrick -8431 +▁Bh -8432 +▁greatly -8433 +▁Kong -8434 +▁Pitt -8435 +▁Ont -8436 +▁definition -8437 +▁thinks -8438 +▁diversity -8439 +▁Ox -8440 +▁Orange -8441 +sized -8442 +sea -8443 +gor -8444 +▁sexy -8445 +▁addresses -8446 +▁painted -8447 +▁toile -8448 +▁reaching -8449 +apping -8450 +week -8451 +▁Editor -8452 +▁Hawai -8453 +▁Elizabeth -8454 +▁Philipp -8455 +▁isol -8456 +▁scores -8457 +▁pets -8458 +▁Published -8459 +hol -8460 +▁unexpected -8461 +war -8462 +itution -8463 +roit -8464 +▁Classic -8465 +angers -8466 +▁repairs -8467 +▁Description -8468 +▁Prior -8469 +▁shipped -8470 +▁union -8471 +▁Features -8472 +▁Sex -8473 +▁Innov -8474 +▁sections -8475 +power -8476 +▁Psych -8477 +▁uncom -8478 +▁clip -8479 +▁repeated -8480 +▁Ah -8481 +▁admitted -8482 +words -8483 +▁durable -8484 +▁demands -8485 +▁export -8486 +olec -8487 +IGH -8488 +▁outcomes -8489 +ira -8490 +oen -8491 +nce -8492 +▁functionality -8493 +▁Wisconsin -8494 +▁Bowl -8495 +▁everybody -8496 +ivalent -8497 +▁tsp -8498 +▁complicated -8499 +EW -8500 +▁sin -8501 +char -8502 +▁plane -8503 +▁Lyn -8504 +▁employers -8505 +▁Youth -8506 +screen -8507 +▁Assistant -8508 +▁tutorial -8509 +▁Cool -8510 +▁fiber -8511 +▁UC -8512 +▁ALL -8513 +▁safely -8514 +▁struck -8515 +rets -8516 +▁twenty -8517 +▁Manufact -8518 +▁concepts -8519 +▁dough -8520 +▁employer -8521 +▁representative -8522 +▁networking -8523 +▁alarm -8524 +▁toys -8525 +▁plain -8526 +▁Bat -8527 +▁Gi -8528 +▁Bow -8529 +▁Hur -8530 +▁requirement -8531 +▁obst -8532 +▁tum -8533 +▁contribution -8534 +▁Collect -8535 +rug -8536 +▁certificate -8537 +▁Ten -8538 +▁Advis -8539 +▁studying -8540 +▁Var -8541 +▁Additional -8542 +▁evaluation -8543 +▁revolution -8544 +▁instruction -8545 +▁Chapter -8546 +▁Simply -8547 +eping -8548 +▁deploy -8549 +▁jail -8550 +erning -8551 +▁cooked -8552 +heim -8553 +▁Rick -8554 +▁Industry -8555 +▁bands -8556 +▁completion -8557 +▁newest -8558 +▁matching -8559 +▁Latin -8560 +▁beef -8561 +▁tradem -8562 +minute -8563 +ruption -8564 +▁calm -8565 +▁Exchange -8566 +▁temporary -8567 +dale -8568 +arring -8569 +▁Blu -8570 +rog -8571 +▁cher -8572 +ieties -8573 +▁strongly -8574 +▁Indiana -8575 +▁possess -8576 +▁Nav -8577 +▁guilty -8578 +▁Complete -8579 +▁Halloween -8580 +▁Championship -8581 +▁initially -8582 +▁bass -8583 +aver -8584 +▁deeply -8585 +▁hip -8586 +▁waters -8587 +▁holes -8588 +▁arr -8589 +▁maintained -8590 +▁Kar -8591 +utter -8592 +ko -8593 +▁organisation -8594 +▁Send -8595 +lude -8596 +pense -8597 +▁Reviews -8598 +▁Employ -8599 +▁releases -8600 +▁romantic -8601 +▁screw -8602 +▁residence -8603 +▁Entertain -8604 +▁Glass -8605 +▁hearts -8606 +▁reception -8607 +▁pizza -8608 +ocal -8609 +▁Host -8610 +▁Cancer -8611 +▁Method -8612 +%, -8613 +▁hes -8614 +▁Currently -8615 +▁requested -8616 +▁actress -8617 +oz -8618 +enities -8619 +Get -8620 +erving -8621 +xic -8622 +oir -8623 +▁salad -8624 +▁blogs -8625 +▁exhaust -8626 +▁voters -8627 +Tr -8628 +▁Viet -8629 +▁supplement -8630 +▁approaches -8631 +vals -8632 +▁singing -8633 +▁satisfied -8634 +▁Donald -8635 +▁subjects -8636 +▁Circ -8637 +▁tablet -8638 +▁Independ -8639 +▁haz -8640 +▁Days -8641 +ounge -8642 +▁Brother -8643 +▁coal -8644 +▁Finance -8645 +▁loud -8646 +▁sought -8647 +▁sorry -8648 +iche -8649 +IST -8650 +▁represented -8651 +▁Nation -8652 +▁boot -8653 +▁swing -8654 +▁Tam -8655 +▁Ak -8656 +▁Update -8657 +▁Questions -8658 +▁southern -8659 +▁franchise -8660 +▁carpet -8661 +▁Content -8662 +▁Six -8663 +▁kil -8664 +oore -8665 +▁prime -8666 +▁bone -8667 +▁Ban -8668 +▁Anton -8669 +▁Drag -8670 +▁slee -8671 +▁III -8672 +▁Five -8673 +▁insert -8674 +▁athletes -8675 +▁reject -8676 +▁convert -8677 +▁Sir -8678 +▁frustr -8679 +▁Bud -8680 +▁stored -8681 +▁infection -8682 +▁automatic -8683 +▁Hong -8684 +▁cancell -8685 +▁flights -8686 +▁overse -8687 +▁environments -8688 +▁Planning -8689 +▁bull -8690 +▁Jess -8691 +▁tact -8692 +▁errors -8693 +▁inventory -8694 +▁Bath -8695 +▁desktop -8696 +oln -8697 +▁Du -8698 +charge -8699 +▁chef -8700 +▁prayer -8701 +▁hopefully -8702 +▁orient -8703 +▁sink -8704 +▁dual -8705 +▁Round -8706 +▁Sony -8707 +▁falls -8708 +▁unusual -8709 +▁investments -8710 +▁donations -8711 +▁comedy -8712 +agen -8713 +▁Self -8714 +▁BM -8715 +▁delivers -8716 +▁Wars -8717 +izon -8718 +▁Study -8719 +▁stamp -8720 +▁gap -8721 +▁nose -8722 +▁pregnant -8723 +▁discussions -8724 +itis -8725 +link -8726 +▁Ocean -8727 +ormal -8728 +▁TR -8729 +urious -8730 +ificial -8731 +▁accommodation -8732 +▁false -8733 +▁granted -8734 +▁initiatives -8735 +▁ft -8736 +▁transactions -8737 +▁McG -8738 +▁stylish -8739 +aret -8740 +game -8741 +▁react -8742 +▁rapidly -8743 +▁Restaur -8744 +▁Iss -8745 +▁Lewis -8746 +▁transaction -8747 +▁strengthen -8748 +▁Univers -8749 +▁worn -8750 +▁ladies -8751 +▁lens -8752 +▁indoor -8753 +ART -8754 +▁Tal -8755 +▁pregnancy -8756 +▁experim -8757 +▁hiring -8758 +▁dispos -8759 +▁ET -8760 +▁directions -8761 +oons -8762 +▁certification -8763 +oa -8764 +▁ensures -8765 +imp -8766 +▁finishing -8767 +▁crypt -8768 +ji -8769 +Qu -8770 +▁Sciences -8771 +▁Property -8772 +ingham -8773 +hire -8774 +▁largely -8775 +▁edit -8776 +via -8777 +▁resume -8778 +▁Large -8779 +Every -8780 +▁extraord -8781 +amental -8782 +▁Major -8783 +olly -8784 +▁Exam -8785 +▁Electric -8786 +▁deeper -8787 +▁therap -8788 +lah -8789 +▁hyp -8790 +▁Saint -8791 +▁Ren -8792 +otes -8793 +▁acquired -8794 +▁trusted -8795 +▁surve -8796 +▁Benef -8797 +▁Greek -8798 +df -8799 +▁Style -8800 +▁catalog -8801 +▁cats -8802 +etts -8803 +▁restrictions -8804 +▁Fresh -8805 +▁Prince -8806 +▁branch -8807 +▁Performance -8808 +▁ranging -8809 +HL -8810 +▁globe -8811 +▁Application -8812 +rape -8813 +▁Less -8814 +▁aimed -8815 +uties -8816 +▁collections -8817 +ds -8818 +▁hitting -8819 +▁WW -8820 +igious -8821 +piece -8822 +▁Michel -8823 +▁mild -8824 +▁Democratic -8825 +▁emerging -8826 +▁underst -8827 +▁Computer -8828 +▁correctly -8829 +▁Delivery -8830 +Ne -8831 +ensus -8832 +▁makeup -8833 +world -8834 +elle -8835 +▁HP -8836 +▁expanded -8837 +aug -8838 +▁Architect -8839 +▁literature -8840 +▁attitude -8841 +orship -8842 +▁targeted -8843 +▁Comment -8844 +TE -8845 +▁Early -8846 +onymous -8847 +▁finest -8848 +▁workplace -8849 +▁mistake -8850 +orses -8851 +▁Course -8852 +▁Op -8853 +Hz -8854 +▁gate -8855 +▁contained -8856 +ffective -8857 +▁diamond -8858 +▁Barn -8859 +wall -8860 +agers -8861 +▁fabulous -8862 +ALL -8863 +▁Snow -8864 +isdom -8865 +▁tissue -8866 +▁Ross -8867 +▁surge -8868 +▁Parent -8869 +▁rib -8870 +▁Such -8871 +American -8872 +doors -8873 +▁BBC -8874 +▁COM -8875 +IVE -8876 +▁alt -8877 +▁hell -8878 +▁sed -8879 +ceeds -8880 +▁tube -8881 +▁tons -8882 +▁homeown -8883 +▁Tro -8884 +▁contracts -8885 +▁Environmental -8886 +▁experiencing -8887 +▁offensive -8888 +nie -8889 +▁mask -8890 +achus -8891 +uana -8892 +▁skilled -8893 +▁weigh -8894 +▁estimate -8895 +▁sne -8896 +bound -8897 +iffe -8898 +▁containing -8899 +▁appointed -8900 +▁Section -8901 +▁sophist -8902 +▁Challenge -8903 +▁rolling -8904 +UST -8905 +swered -8906 +▁refrig -8907 +▁bub -8908 +▁Laure -8909 +▁Hom -8910 +▁SA -8911 +▁themes -8912 +▁demonstrate -8913 +▁Premier -8914 +▁engines -8915 +▁suffer -8916 +essee -8917 +▁Entertainment -8918 +▁sufficient -8919 +tes -8920 +▁telephone -8921 +itate -8922 +▁weird -8923 +othe -8924 +uters -8925 +▁container -8926 +Free -8927 +▁editing -8928 +rus -8929 +▁Moon -8930 +viously -8931 +▁Advent -8932 +▁stayed -8933 +▁Os -8934 +▁Text -8935 +▁personalized -8936 +han -8937 +covery -8938 +PL -8939 +UD -8940 +▁subsid -8941 +pay -8942 +▁equally -8943 +▁Song -8944 +▁Celebr -8945 +▁ministry -8946 +▁workout -8947 +▁copies -8948 +▁assume -8949 +▁segment -8950 +▁formula -8951 +▁defensive -8952 +▁Integr -8953 +graduate -8954 +▁math -8955 +▁Construction -8956 +incoln -8957 +▁floors -8958 +▁aged -8959 +orneys -8960 +▁Side -8961 +▁photographer -8962 +▁responded -8963 +core -8964 +▁Cooper -8965 +▁voted -8966 +▁Server -8967 +▁Buff -8968 +▁drives -8969 +raine -8970 +▁sentence -8971 +▁tours -8972 +▁roots -8973 +aya -8974 +ela -8975 +▁Anderson -8976 +▁couples -8977 +▁recru -8978 +▁Dur -8979 +aga -8980 +▁Events -8981 +▁Advanced -8982 +▁drain -8983 +▁utility -8984 +orms -8985 +▁breaks -8986 +▁shapes -8987 +▁BC -8988 +▁Compet -8989 +▁outs -8990 +▁’ -8991 +▁indicate -8992 +▁Channel -8993 +▁purchases -8994 +▁stood -8995 +Last -8996 +▁beans -8997 +▁pilot -8998 +▁viewed -8999 +▁inspect -9000 +▁admit -9001 +▁nursing -9002 +▁layout -9003 +▁Double -9004 +cut -9005 +icking -9006 +achusetts -9007 +▁aest -9008 +▁Linux -9009 +▁stability -9010 +▁wise -9011 +▁advantages -9012 +▁Galaxy -9013 +otte -9014 +▁muscles -9015 +▁wrap -9016 +▁WordPress -9017 +gi -9018 +▁Bring -9019 +▁sear -9020 +▁Veter -9021 +▁streaming -9022 +From -9023 +▁ranked -9024 +inar -9025 +▁Allen -9026 +gree -9027 +▁Lock -9028 +||$ -9029 +▁Les -9030 +▁Laur -9031 +▁powered -9032 +▁buff -9033 +▁Inn -9034 +▁hosts -9035 +▁Tickets -9036 +ouses -9037 +▁displays -9038 +▁outfit -9039 +oli -9040 +▁Iron -9041 +▁Sant -9042 +▁keys -9043 +▁religion -9044 +▁Ath -9045 +▁Along -9046 +▁campaigns -9047 +home -9048 +▁Mind -9049 +▁Hair -9050 +iour -9051 +▁align -9052 +▁conscious -9053 +▁Shel -9054 +▁amend -9055 +bered -9056 +▁excite -9057 +▁Grant -9058 +▁leaf -9059 +▁Detroit -9060 +▁Anthony -9061 +▁grace -9062 +▁explan -9063 +▁belt -9064 +▁login -9065 +▁nutrition -9066 +hell -9067 +▁Near -9068 +▁lowest -9069 +▁Mail -9070 +▁IM -9071 +▁Gre -9072 +iscal -9073 +▁emotions -9074 +▁studied -9075 +▁File -9076 +▁killing -9077 +▁equivalent -9078 +▁Sweet -9079 +▁onion -9080 +▁Overall -9081 +▁Football -9082 +▁alert -9083 +▁respectively -9084 +SW -9085 +▁cam -9086 +▁Thus -9087 +▁Heat -9088 +▁accounting -9089 +▁neither -9090 +zed -9091 +▁Final -9092 +▁peak -9093 +inyl -9094 +▁productivity -9095 +looking -9096 +▁Albert -9097 +▁equity -9098 +▁fewer -9099 +utor -9100 +▁FR -9101 +▁statistics -9102 +▁evil -9103 +▁Kir -9104 +▁Marsh -9105 +cor -9106 +ESS -9107 +▁SD -9108 +OG -9109 +▁junior -9110 +ouri -9111 +▁organisations -9112 +▁neut -9113 +▁Maryland -9114 +▁god -9115 +▁Categ -9116 +▁Tree -9117 +▁Mini -9118 +rat -9119 +▁importantly -9120 +Post -9121 +▁Mayor -9122 +▁institution -9123 +ellite -9124 +▁encounter -9125 +athon -9126 +▁Title -9127 +▁Ontario -9128 +iatric -9129 +▁Nig -9130 +▁Girls -9131 +▁objective -9132 +lined -9133 +▁respective -9134 +opher -9135 +fire -9136 +ervice -9137 +▁letting -9138 +cery -9139 +▁Denver -9140 +▁Records -9141 +▁locally -9142 +undry -9143 +ighth -9144 +umni -9145 +▁deserve -9146 +loved -9147 +▁Address -9148 +ECT -9149 +▁Clinton -9150 +Ab -9151 +▁parks -9152 +▁Register -9153 +▁comprom -9154 +!!!! -9155 +▁Roll -9156 +▁Valent -9157 +▁URL -9158 +▁veteran -9159 +▁dess -9160 +wa -9161 +▁layers -9162 +▁babies -9163 +▁compliment -9164 +▁Article -9165 +▁characteristics -9166 +▁Index -9167 +▁earnings -9168 +▁operated -9169 +▁Clark -9170 +email -9171 +▁maps -9172 +▁adopted -9173 +roduction -9174 +▁combat -9175 +▁banking -9176 +KE -9177 +▁beneficial -9178 +▁suppliers -9179 +▁por -9180 +▁Always -9181 +▁entrepreneurs -9182 +▁Hu -9183 +onda -9184 +criptions -9185 +▁votes -9186 +▁Release -9187 +▁ru -9188 +▁Polit -9189 +▁stocks -9190 +▁mistakes -9191 +▁Sure -9192 +▁losses -9193 +▁Coach -9194 +▁balls -9195 +fa -9196 +▁Trail -9197 +▁worried -9198 +ILL -9199 +py -9200 +▁Inv -9201 +dri -9202 +▁Birth -9203 +▁possibilities -9204 +▁resid -9205 +▁diabetes -9206 +pany -9207 +▁rubber -9208 +▁patch -9209 +lahoma -9210 +▁Hart -9211 +▁Trad -9212 +ousing -9213 +▁Chem -9214 +▁Craft -9215 +assion -9216 +▁hide -9217 +oding -9218 +▁spir -9219 +mate -9220 +FF -9221 +▁complement -9222 +terest -9223 +▁pursue -9224 +▁inspire -9225 +▁expanding -9226 +▁Az -9227 +▁Rain -9228 +▁AI -9229 +▁attempts -9230 +▁Brew -9231 +▁walks -9232 +era -9233 +▁refres -9234 +▁dies -9235 +▁pul -9236 +ogen -9237 +▁Speed -9238 +▁delighted -9239 +overs -9240 +▁graduated -9241 +▁ownership -9242 +olen -9243 +oto -9244 +▁continu -9245 +▁acquisition -9246 +BM -9247 +▁intention -9248 +▁venture -9249 +▁shortly -9250 +▁prohib -9251 +Pr -9252 +▁Massachusetts -9253 +grade -9254 +▁laser -9255 +QL -9256 +▁aer -9257 +▁brothers -9258 +▁Olympic -9259 +▁auction -9260 +abama -9261 +▁consequences -9262 +▁abilities -9263 +▁sevent -9264 +▁vendors -9265 +▁tender -9266 +▁picking -9267 +▁checks -9268 +ijuana -9269 +▁Da -9270 +▁bomb -9271 +▁aband -9272 +▁depression -9273 +▁spectacular -9274 +bie -9275 +▁hunt -9276 +▁Prep -9277 +cked -9278 +itches -9279 +dep -9280 +▁therm -9281 +▁stainless -9282 +angle -9283 +▁distributed -9284 +', -9285 +▁ske -9286 +▁crown -9287 +▁unif -9288 +▁celebrated -9289 +▁oral -9290 +▁squad -9291 +▁occurs -9292 +View -9293 +▁Ult -9294 +▁implemented -9295 +ceived -9296 +▁Schools -9297 +▁reviewed -9298 +▁Future -9299 +▁bol -9300 +▁Mort -9301 +▁exercises -9302 +▁suite -9303 +▁secondary -9304 +▁Gift -9305 +▁gathering -9306 +ii -9307 +▁Jane -9308 +▁consistently -9309 +▁pointed -9310 +▁representatives -9311 +▁reportedly -9312 +▁northern -9313 +▁entitled -9314 +berries -9315 +▁vibrant -9316 +▁inspection -9317 +incess -9318 +▁survive -9319 +▁chronic -9320 +▁lawyers -9321 +▁Years -9322 +▁Pers -9323 +▁Rober -9324 +overty -9325 +▁Tell -9326 +▁passes -9327 +▁strip -9328 +▁gig -9329 +▁instantly -9330 +▁Unt -9331 +▁Remove -9332 +through -9333 +▁retired -9334 +▁cater -9335 +unate -9336 +▁shr -9337 +▁coaches -9338 +▁retailers -9339 +▁vert -9340 +▁enhanced -9341 +▁appreciated -9342 +▁Bird -9343 +▁Machine -9344 +▁disaster -9345 +▁bes -9346 +▁Stadium -9347 +▁deadline -9348 +store -9349 +▁investing -9350 +shop -9351 +▁Corp -9352 +ortion -9353 +▁hydro -9354 +▁Tips -9355 +▁argument -9356 +▁Gun -9357 +▁pip -9358 +aha -9359 +▁gre -9360 +▁Tennessee -9361 +▁Anyone -9362 +▁brew -9363 +▁transmission -9364 +▁Wales -9365 +urities -9366 +▁fiction -9367 +oust -9368 +gov -9369 +▁Simon -9370 +▁curriculum -9371 +▁mountains -9372 +olar -9373 +abil -9374 +▁Better -9375 +▁ratio -9376 +▁reply -9377 +▁universities -9378 +▁advertise -9379 +tical -9380 +mouth -9381 +▁Sab -9382 +build -9383 +▁Beh -9384 +▁overnight -9385 +▁sacr -9386 +▁soup -9387 +▁knee -9388 +▁iconic -9389 +mo -9390 +▁tackle -9391 +mont -9392 +inst -9393 +▁hyper -9394 +▁Unit -9395 +▁privile -9396 +▁thousand -9397 +ussion -9398 +▁suddenly -9399 +erk -9400 +▁Pear -9401 +idal -9402 +ami -9403 +▁Lincoln -9404 +▁Wire -9405 +▁shed -9406 +▁CT -9407 +encing -9408 +▁restore -9409 +▁impressed -9410 +▁concerning -9411 +Pe -9412 +iah -9413 +amics -9414 +itty -9415 +▁cow -9416 +▁amongst -9417 +▁Communications -9418 +▁followers -9419 +ouver -9420 +▁Relations -9421 +▁documentation -9422 +▁philosophy -9423 +▁answered -9424 +▁Commercial -9425 +sect -9426 +▁offense -9427 +▁western -9428 +After -9429 +▁HR -9430 +▁repeat -9431 +▁decline -9432 +▁beautifully -9433 +▁circle -9434 +▁abroad -9435 +▁fundamental -9436 +airy -9437 +▁championship -9438 +▁extent -9439 +osit -9440 +▁stom -9441 +owned -9442 +▁heavily -9443 +▁explos -9444 +eters -9445 +▁servers -9446 +▁compatible -9447 +FT -9448 +fish -9449 +▁Jean -9450 +▁PL -9451 +▁Governor -9452 +Des -9453 +▁heritage -9454 +▁contributed -9455 +apolis -9456 +▁Harris -9457 +▁ink -9458 +▁credits -9459 +Not -9460 +amon -9461 +▁wildlife -9462 +▁passengers -9463 +▁Wine -9464 +▁interaction -9465 +inth -9466 +sis -9467 +hyth -9468 +▁quotes -9469 +▁describes -9470 +ugs -9471 +rection -9472 +Ind -9473 +▁captured -9474 +▁phenomen -9475 +▁Nature -9476 +▁producers -9477 +▁Gas -9478 +▁Kit -9479 +▁Roy -9480 +▁Portland -9481 +▁IC -9482 +▁beds -9483 +▁wheels -9484 +▁Springs -9485 +▁gathered -9486 +▁adequ -9487 +ellectual -9488 +gorith -9489 +gged -9490 +pon -9491 +▁perm -9492 +▁User -9493 +▁semin -9494 +▁handy -9495 +▁Burn -9496 +▁Delhi -9497 +gener -9498 +▁discounts -9499 +ustration -9500 +▁Turkey -9501 +▁minds -9502 +▁fruits -9503 +▁salary -9504 +▁Rail -9505 +▁operational -9506 +▁sponsored -9507 +▁dramatic -9508 +▁currency -9509 +▁Afghan -9510 +▁Making -9511 +▁applicable -9512 +▁meters -9513 +▁Ast -9514 +▁assigned -9515 +▁SO -9516 +semble -9517 +▁Morgan -9518 +▁Fashion -9519 +▁generations -9520 +abling -9521 +▁Several -9522 +quarters -9523 +▁Updated -9524 +ICE -9525 +▁customized -9526 +▁excellence -9527 +▁overcome -9528 +▁periods -9529 +enth -9530 +▁mature -9531 +▁happiness -9532 +ima -9533 +▁targets -9534 +▁Cart -9535 +▁Justin -9536 +▁rated -9537 +▁bedrooms -9538 +▁Bab -9539 +▁NBA -9540 +▁Vit -9541 +▁portra -9542 +▁indicated -9543 +▁moisture -9544 +▁Represent -9545 +▁Altern -9546 +▁chairman -9547 +▁compact -9548 +▁Rap -9549 +▁Judge -9550 +▁cann -9551 +▁continuous -9552 +▁Oklahoma -9553 +▁empower -9554 +▁divorce -9555 +▁template -9556 +FO -9557 +iere -9558 +▁representing -9559 +▁Results -9560 +enna -9561 +▁vanilla -9562 +ancouver -9563 +▁dancing -9564 +▁Safe -9565 +▁scratch -9566 +▁Doctor -9567 +▁Cow -9568 +▁conversations -9569 +▁Alabama -9570 +▁alter -9571 +▁hospitals -9572 +olds -9573 +offs -9574 +▁moon -9575 +Res -9576 +▁medication -9577 +▁sleeping -9578 +▁Authority -9579 +▁stack -9580 +▁accommodate -9581 +▁Tan -9582 +racy -9583 +▁Easter -9584 +▁Rh -9585 +▁meaningful -9586 +▁sectors -9587 +▁Dress -9588 +▁lasting -9589 +▁actively -9590 +▁scar -9591 +▁harvest -9592 +▁apple -9593 +rot -9594 +▁observed -9595 +▁terr -9596 +▁Lat -9597 +olid -9598 +▁races -9599 +▁Balt -9600 +ibl -9601 +iate -9602 +▁Lam -9603 +uition -9604 +▁drum -9605 +ITY -9606 +▁Getting -9607 +▁okay -9608 +atal -9609 +▁rehab -9610 +▁reserve -9611 +orse -9612 +▁voting -9613 +▁hub -9614 +▁elections -9615 +▁nail -9616 +▁gambling -9617 +▁cust -9618 +May -9619 +▁compan -9620 +▁Coc -9621 +▁excitement -9622 +eman -9623 +▁impression -9624 +▁entries -9625 +▁Dor -9626 +▁Salt -9627 +works -9628 +▁comic -9629 +itzer -9630 +▁ware -9631 +rix -9632 +▁wider -9633 +▁Chel -9634 +agan -9635 +▁potatoes -9636 +▁shade -9637 +▁Democrats -9638 +▁evolution -9639 +▁entertaining -9640 +this -9641 +▁rescue -9642 +ropical -9643 +▁straw -9644 +▁lun -9645 +▁bell -9646 +▁Cher -9647 +▁supporters -9648 +▁protocol -9649 +▁pushed -9650 +ika -9651 +▁alike -9652 +▁smoking -9653 +▁Deep -9654 +▁hun -9655 +▁Cash -9656 +▁Affairs -9657 +▁Atlantic -9658 +DC -9659 +oenix -9660 +▁Move -9661 +▁coconut -9662 +▁chip -9663 +▁existence -9664 +▁remem -9665 +▁Rout -9666 +igen -9667 +town -9668 +aware -9669 +aux -9670 +▁Andy -9671 +▁eliminate -9672 +ige -9673 +▁pushing -9674 +▁Chairman -9675 +▁pour -9676 +▁responses -9677 +▁heaven -9678 +Welcome -9679 +▁stops -9680 +▁Plant -9681 +oper -9682 +▁Utah -9683 +▁inquir -9684 +▁mirror -9685 +▁opposition -9686 +iameter -9687 +▁developments -9688 +▁publishing -9689 +▁frozen -9690 +opping -9691 +▁Navy -9692 +▁Aqu -9693 +?! -9694 +▁celebrating -9695 +▁Holiday -9696 +▁amenities -9697 +▁Imp -9698 +▁sheets -9699 +▁generous -9700 +▁viewers -9701 +▁substantial -9702 +▁Moore -9703 +▁withdraw -9704 +▁objectives -9705 +▁fired -9706 +▁Malays -9707 +was -9708 +▁chemicals -9709 +▁Agr -9710 +▁Ped -9711 +▁accord -9712 +▁processed -9713 +▁Budd -9714 +▁Py -9715 +DP -9716 +▁financing -9717 +▁flaw -9718 +▁thoroughly -9719 +▁wound -9720 +▁completing -9721 +▁asks -9722 +▁chairs -9723 +Ed -9724 +▁Lane -9725 +▁ceiling -9726 +vo -9727 +▁relative -9728 +apped -9729 +▁robust -9730 +▁Virt -9731 +▁hunting -9732 +▁actors -9733 +▁Bron -9734 +▁recall -9735 +▁regulatory -9736 +mart -9737 +attan -9738 +boards -9739 +▁gal -9740 +children -9741 +▁surrounded -9742 +▁accompan -9743 +uild -9744 +▁instruments -9745 +▁physician -9746 +BO -9747 +▁Besides -9748 +rer -9749 +▁Version -9750 +▁preview -9751 +▁lawn -9752 +▁buttons -9753 +▁extraordinary -9754 +▁diagnosis -9755 +▁precious -9756 +▁Melbourne -9757 +▁folder -9758 +▁engineers -9759 +▁protecting -9760 +▁spokesman -9761 +▁virtually -9762 +▁emotion -9763 +▁relaxing -9764 +prof -9765 +papers -9766 +▁fib -9767 +Rep -9768 +▁habits -9769 +▁Works -9770 +▁specialists -9771 +ez -9772 +ridge -9773 +▁courts -9774 +osa -9775 +maker -9776 +unicipal -9777 +▁Kentucky -9778 +▁Mand -9779 +▁soccer -9780 +▁Nothing -9781 +▁Cub -9782 +▁padd -9783 +▁caps -9784 +otion -9785 +▁encouraging -9786 +▁penalty -9787 +mann -9788 +▁Highway -9789 +▁fingers -9790 +▁hal -9791 +elson -9792 +▁oils -9793 +ocument -9794 +▁exclusively -9795 +▁” -9796 +▁cooperation -9797 +elve -9798 +▁TX -9799 +▁latter -9800 +▁Casino -9801 +▁Gall -9802 +▁Bureau -9803 +▁prev -9804 +▁seal -9805 +▁desper -9806 +organ -9807 +▁Mission -9808 +▁proved -9809 +http -9810 +▁theater -9811 +▁balanced -9812 +▁designing -9813 +▁buyer -9814 +Us -9815 +▁Original -9816 +▁evaluate -9817 +,' -9818 +omed -9819 +▁vulnerable -9820 +▁libr -9821 +▁sixth -9822 +▁backup -9823 +▁Missouri -9824 +bean -9825 +▁surely -9826 +▁prevention -9827 +▁Legal -9828 +▁scope -9829 +▁resulted -9830 +▁ward -9831 +▁Answ -9832 +▁qualify -9833 +▁complaint -9834 +asty -9835 +▁Jacob -9836 +▁Phill -9837 +break -9838 +▁module -9839 +▁spell -9840 +▁dil -9841 +▁Meeting -9842 +▁subscribe -9843 +▁fur -9844 +▁seller -9845 +UC -9846 +These -9847 +▁Dance -9848 +▁snap -9849 +▁DNA -9850 +▁bug -9851 +▁cabinet -9852 +iology -9853 +▁Economic -9854 +atern -9855 +▁Kate -9856 +▁da -9857 +liances -9858 +▁Tai -9859 +▁tremend -9860 +▁discovery -9861 +▁integrity -9862 +▁Chap -9863 +%) -9864 +PP -9865 +laim -9866 +ACK -9867 +▁listings -9868 +▁dismiss -9869 +▁rarely -9870 +▁showcase -9871 +eness -9872 +▁USD -9873 +▁cinem -9874 +isms -9875 +▁discrim -9876 +▁refe -9877 +▁resolve -9878 +▁Certified -9879 +▁exch -9880 +?) -9881 +▁toler -9882 +▁Gary -9883 +▁musicians -9884 +▁tennis -9885 +nut -9886 +upid -9887 +▁flavors -9888 +▁suspend -9889 +▁guides -9890 +▁contents -9891 +▁Moh -9892 +▁consulting -9893 +▁divided -9894 +▁nutri -9895 +ders -9896 +▁MB -9897 +▁greet -9898 +uan -9899 +▁favorites -9900 +▁define -9901 +▁somehow -9902 +cho -9903 +▁Eve -9904 +itters -9905 +▁Beautiful -9906 +▁burning -9907 +▁Hig -9908 +NS -9909 +▁Present -9910 +emed -9911 +▁AV -9912 +Col -9913 +▁prints -9914 +ova -9915 +▁deaths -9916 +▁edges -9917 +▁betting -9918 +▁enjoyable -9919 +inkle -9920 +▁Republicans -9921 +Add -9922 +▁Sac -9923 +▁nerv -9924 +▁Ark -9925 +market -9926 +▁produces -9927 +▁Vancouver -9928 +▁estimates -9929 +▁artistic -9930 +ims -9931 +▁adj -9932 +▁bacteria -9933 +▁Language -9934 +▁danger -9935 +▁courtesy -9936 +▁worship -9937 +INE -9938 +▁Assembly -9939 +▁compos -9940 +ASE -9941 +enter -9942 +▁Maint -9943 +▁span -9944 +▁Raj -9945 +▁powers -9946 +▁fascinating -9947 +don -9948 +▁Technical -9949 +▁Practice -9950 +umbing -9951 +udes -9952 +▁exterior -9953 +▁EV -9954 +Can -9955 +▁conventional -9956 +▁principal -9957 +▁Wy -9958 +▁Ele -9959 +▁Diamond -9960 +▁DE -9961 +▁popularity -9962 +▁mystery -9963 +▁bottles -9964 +▁screening -9965 +▁Liver -9966 +▁summary -9967 +ahoo -9968 +▁Together -9969 +ori -9970 +▁Subscribe -9971 +▁Leave -9972 +▁Lu -9973 +▁caring -9974 +▁Jennifer -9975 +▁grounds -9976 +▁Ta -9977 +▁addressed -9978 +▁dirt -9979 +▁assembly -9980 +▁legacy -9981 +▁outdoors -9982 +▁coron -9983 +▁processor -9984 +estone -9985 +▁duties -9986 +▁abund -9987 +▁toy -9988 +▁Peace -9989 +▁settlement -9990 +arts -9991 +▁elder -9992 +yal -9993 +addy -9994 +▁rust -9995 +TR -9996 +▁YOUR -9997 +▁divide -9998 +▁employed -9999 +▁Photos -10000 +▁Coffee -10001 +ologists -10002 +▁strive -10003 +▁pound -10004 +▁exists -10005 +▁remarkable -10006 +▁ultra -10007 +▁acres -10008 +▁ABC -10009 +▁Gab -10010 +▁uncertain -10011 +▁toilet -10012 +▁incent -10013 +▁arrangements -10014 +▁Scient -10015 +▁Particip -10016 +▁Clear -10017 +▁Beat -10018 +▁Korean -10019 +▁Cell -10020 +shirt -10021 +eland -10022 +▁Brooklyn -10023 +▁keen -10024 +▁fert -10025 +▁HIV -10026 +▁aluminum -10027 +▁Junior -10028 +▁frequent -10029 +▁acquire -10030 +rep -10031 +▁lean -10032 +▁Phoenix -10033 +▁reven -10034 +▁emissions -10035 +LD -10036 +▁Wedding -10037 +gets -10038 +PG -10039 +▁pants -10040 +iox -10041 +fers -10042 +▁governments -10043 +▁decrease -10044 +▁connecting -10045 +▁Esc -10046 +owed -10047 +▁BR -10048 +▁Flash -10049 +▁Much -10050 +inf -10051 +▁mad -10052 +Your -10053 +▁Writing -10054 +▁navigate -10055 +obb -10056 +ito -10057 +bra -10058 +▁Leon -10059 +rett -10060 +▁Defense -10061 +▁lightweight -10062 +▁Cov -10063 +▁Gene -10064 +▁utilize -10065 +▁modified -10066 +ifier -10067 +▁Dutch -10068 +udi -10069 +▁Rol -10070 +▁EC -10071 +gging -10072 +▁tort -10073 +▁horses -10074 +ationally -10075 +▁dried -10076 +▁jacket -10077 +▁defect -10078 +▁Simple -10079 +▁Ident -10080 +▁Hal -10081 +avirus -10082 +,’ -10083 +utical -10084 +bsp -10085 +▁operators -10086 +▁Gh -10087 +▁pup -10088 +pire -10089 +▁admission -10090 +▁Howard -10091 +▁bust -10092 +▁overview -10093 +▁Ton -10094 +vity -10095 +▁episodes -10096 +▁operates -10097 +tech -10098 +SU -10099 +▁inspiring -10100 +▁Cream -10101 +mat -10102 +▁difficulty -10103 +▁Stra -10104 +▁myster -10105 +▁gonna -10106 +▁Thanksgiving -10107 +beat -10108 +AG -10109 +▁slide -10110 +▁roughly -10111 +▁fastest -10112 +▁SW -10113 +idth -10114 +▁refuge -10115 +▁Death -10116 +▁worker -10117 +▁lawsuit -10118 +about -10119 +case -10120 +anches -10121 +▁Marine -10122 +▁drops -10123 +▁anytime -10124 +▁Dyn -10125 +▁tomatoes -10126 +▁Stop -10127 +▁Craig -10128 +▁conclusion -10129 +ded -10130 +▁denied -10131 +▁Pant -10132 +▁wisdom -10133 +MI -10134 +▁liability -10135 +ront -10136 +stock -10137 +▁Furthermore -10138 +▁Cos -10139 +▁magnet -10140 +▁charging -10141 +▁myth -10142 +emade -10143 +▁applies -10144 +▁ratings -10145 +▁Registration -10146 +▁assists -10147 +▁understood -10148 +▁Fat -10149 +▁essentially -10150 +selling -10151 +▁sophisticated -10152 +Mar -10153 +▁Unlike -10154 +▁removing -10155 +ICK -10156 +▁Race -10157 +iro -10158 +▁Dean -10159 +▁rend -10160 +▁WE -10161 +▁gam -10162 +▁imper -10163 +scale -10164 +▁algorith -10165 +▁EP -10166 +▁Bag -10167 +▁Player -10168 +▁threats -10169 +▁fake -10170 +▁Magic -10171 +▁Wa -10172 +card -10173 +▁cheaper -10174 +More -10175 +▁antib -10176 +▁cooling -10177 +acles -10178 +▁Anne -10179 +▁Moreover -10180 +▁Alliance -10181 +▁Nik -10182 +▁conversion -10183 +▁specialized -10184 +▁versatile -10185 +▁offerings -10186 +arest -10187 +burn -10188 +▁gardens -10189 +▁fulfill -10190 +ITH -10191 +▁Discover -10192 +▁waves -10193 +▁oldest -10194 +▁conservation -10195 +▁circuit -10196 +Please -10197 +▁fantasy -10198 +▁adoption -10199 +▁surfaces -10200 +▁Dra -10201 +▁Gil -10202 +▁unve -10203 +lend -10204 +▁memorable -10205 +table -10206 +ishop -10207 +▁stem -10208 +▁boots -10209 +▁poster -10210 +▁supplier -10211 +▁presidential -10212 +pur -10213 +▁piano -10214 +▁enjoys -10215 +▁annoy -10216 +osystem -10217 +▁paintings -10218 +▁Pink -10219 +poons -10220 +rous -10221 +asting -10222 +▁documentary -10223 +uros -10224 +▁associate -10225 +aki -10226 +ping -10227 +▁pleasant -10228 +▁calories -10229 +▁cha -10230 +▁permit -10231 +istol -10232 +▁renown -10233 +EX -10234 +▁soldiers -10235 +▁sorts -10236 +Just -10237 +▁trucks -10238 +▁Success -10239 +▁behaviour -10240 +▁theatre -10241 +▁Indones -10242 +open -10243 +ishers -10244 +▁Reserved -10245 +▁Jer -10246 +▁Feel -10247 +▁titled -10248 +▁efficiently -10249 +▁dozen -10250 +▁Andre -10251 +itations -10252 +ANT -10253 +▁Pier -10254 +▁grey -10255 +▁Fish -10256 +▁Block -10257 +iy -10258 +▁Within -10259 +▁universe -10260 +▁Gree -10261 +En -10262 +▁Speaking -10263 +▁Images -10264 +▁legit -10265 +▁clinic -10266 +usal -10267 +▁washing -10268 +jection -10269 +▁eager -10270 +▁Contract -10271 +▁Nations -10272 +▁investigate -10273 +rend -10274 +▁opposed -10275 +▁army -10276 +▁GP -10277 +veland -10278 +after -10279 +▁tune -10280 +▁references -10281 +▁VA -10282 +▁agenda -10283 +ilation -10284 +▁Record -10285 +▁Insp -10286 +▁examine -10287 +▁Ow -10288 +▁tries -10289 +▁Mexican -10290 +▁conj -10291 +▁practition -10292 +▁elsewhere -10293 +post -10294 +▁que -10295 +▁knife -10296 +iva -10297 +enz -10298 +▁McK -10299 +▁drove -10300 +▁mining -10301 +▁frequency -10302 +▁wishes -10303 +▁gang -10304 +▁competitors -10305 +▁angle -10306 +▁mechanical -10307 +▁Movie -10308 +▁Cape -10309 +▁declared -10310 +▁cig -10311 +▁endless -10312 +OUN -10313 +▁explo -10314 +▁funded -10315 +urches -10316 +disc -10317 +phan -10318 +▁terrible -10319 +▁Amb -10320 +▁occasions -10321 +▁Wik -10322 +▁Medic -10323 +▁Analysis -10324 +▁prest -10325 +▁barrel -10326 +▁lbs -10327 +▁Vent -10328 +TML -10329 +isure -10330 +▁sensor -10331 +▁hired -10332 +▁BL -10333 +▁relaxed -10334 +▁nervous -10335 +VP -10336 +▁Nurs -10337 +olph -10338 +▁prizes -10339 +▁Trib -10340 +▁defeat -10341 +▁assured -10342 +Why -10343 +▁Audio -10344 +▁Whit -10345 +▁Restaurant -10346 +▁Third -10347 +▁Nash -10348 +▁DIY -10349 +▁fed -10350 +plates -10351 +▁departments -10352 +▁examination -10353 +▁promised -10354 +▁profiles -10355 +ourse -10356 +▁overwhelming -10357 +uum -10358 +▁Apply -10359 +▁Resort -10360 +▁Chamber -10361 +▁promises -10362 +aved -10363 +▁kiss -10364 +def -10365 +ressed -10366 +▁API -10367 +▁Occ -10368 +▁dash -10369 +▁candy -10370 +▁affiliate -10371 +▁bite -10372 +▁cruise -10373 +▁Industrial -10374 +▁dipl -10375 +▁Write -10376 +▁Fant -10377 +▁mouse -10378 +▁disappointed -10379 +▁transformation -10380 +▁poverty -10381 +▁nationwide -10382 +▁fluid -10383 +NY -10384 +imore -10385 +▁gently -10386 +▁glasses -10387 +backs -10388 +▁nuts -10389 +▁retain -10390 +ultane -10391 +▁timely -10392 +▁placing -10393 +▁colon -10394 +only -10395 +▁Fine -10396 +▁keyboard -10397 +▁unem -10398 +▁emphasis -10399 +▁contacts -10400 +▁Pitts -10401 +▁GB -10402 +▁Heritage -10403 +▁neighbors -10404 +plant -10405 +▁colorful -10406 +hr -10407 +rec -10408 +▁configuration -10409 +▁Row -10410 +▁Krist -10411 +Or -10412 +▁footage -10413 +▁Anyway -10414 +▁Joy -10415 +▁administrative -10416 +erator -10417 +▁Hop -10418 +Here -10419 +▁beloved -10420 +▁Kings -10421 +▁Rome -10422 +▁aggressive -10423 +▁alleg -10424 +commend -10425 +bage -10426 +▁Pharm -10427 +ummy -10428 +secutive -10429 +istent -10430 +▁involvement -10431 +afe -10432 +▁Import -10433 +uther -10434 +▁font -10435 +▁cookie -10436 +▁fleet -10437 +▁fiscal -10438 +▁audiences -10439 +▁expecting -10440 +fly -10441 +▁advised -10442 +▁acknowled -10443 +▁True -10444 +▁CV -10445 +▁Lux -10446 +▁Belg -10447 +▁Dry -10448 +km -10449 +▁priced -10450 +▁treats -10451 +▁referr -10452 +▁attorneys -10453 +aceutical -10454 +immer -10455 +▁Buck -10456 +▁Bull -10457 +▁Say -10458 +▁Fle -10459 +▁possession -10460 +aa -10461 +▁Until -10462 +gie -10463 +▁browse -10464 +▁marijuana -10465 +▁representation -10466 +zens -10467 +▁Tea -10468 +▁physically -10469 +▁cloth -10470 +▁ships -10471 +▁les -10472 +▁vocal -10473 +▁publications -10474 +▁Intel -10475 +▁immune -10476 +▁Li -10477 +ropri -10478 +▁sustainability -10479 +▁tire -10480 +▁relating -10481 +pled -10482 +urrent -10483 +▁disability -10484 +Im -10485 +▁situated -10486 +▁Pain -10487 +▁Osc -10488 +▁Bright -10489 +▁beaches -10490 +▁toss -10491 +▁stomach -10492 +▁Xbox -10493 +▁Bear -10494 +▁profits -10495 +▁soap -10496 +mic -10497 +▁quantity -10498 +▁Foreign -10499 +inates -10500 +▁somebody -10501 +▁unlikely -10502 +▁scholarship -10503 +▁Could -10504 +▁pics -10505 +elfare -10506 +▁exit -10507 +▁spare -10508 +▁Agricult -10509 +▁Blood -10510 +▁Incre -10511 +▁combines -10512 +▁lately -10513 +▁unlock -10514 +▁substance -10515 +▁operator -10516 +▁backed -10517 +▁makers -10518 +▁occasionally -10519 +▁nations -10520 +▁tourism -10521 +▁Mall -10522 +utors -10523 +mother -10524 +▁Portug -10525 +TP -10526 +▁advise -10527 +▁Enterprise -10528 +▁maker -10529 +bus -10530 +▁productive -10531 +▁exhibit -10532 +▁Dick -10533 +even -10534 +▁thrilled -10535 +azed -10536 +▁trails -10537 +▁Bott -10538 +▁gene -10539 +▁Sorry -10540 +acter -10541 +▁Metro -10542 +▁cosmet -10543 +▁curious -10544 +▁reveals -10545 +▁Element -10546 +▁constructed -10547 +▁contractor -10548 +▁solely -10549 +▁Activ -10550 +roduct -10551 +▁wines -10552 +▁weapon -10553 +▁molec -10554 +▁prominent -10555 +▁steam -10556 +sl -10557 +▁Gord -10558 +▁ordinary -10559 +▁Castle -10560 +reach -10561 +▁shoe -10562 +▁directory -10563 +▁CM -10564 +▁Screen -10565 +▁Eye -10566 +▁foam -10567 +inking -10568 +cled -10569 +Star -10570 +▁odds -10571 +▁loop -10572 +▁rounds -10573 +pha -10574 +▁PhD -10575 +▁Falls -10576 +▁honored -10577 +▁Had -10578 +▁bulk -10579 +gments -10580 +▁Palest -10581 +▁sits -10582 +▁Reserve -10583 +opt -10584 +UI -10585 +▁Wat -10586 +gra -10587 +prises -10588 +▁upset -10589 +ilitation -10590 +▁ME -10591 +wer -10592 +▁gran -10593 +iden -10594 +▁Consider -10595 +▁grocery -10596 +▁subtle -10597 +ORT -10598 +▁tatt -10599 +olk -10600 +vation -10601 +▁Terms -10602 +▁merely -10603 +▁Cleveland -10604 +▁Liter -10605 +▁bearing -10606 +▁IL -10607 +mind -10608 +▁ruling -10609 +▁waist -10610 +▁Jonathan -10611 +▁loaded -10612 +.- -10613 +ishment -10614 +▁uncle -10615 +▁grants -10616 +▁flooring -10617 +▁Urban -10618 +▁migr -10619 +icked -10620 +▁Lisa -10621 +kee -10622 +leans -10623 +▁bout -10624 +Now -10625 +asket -10626 +akh -10627 +▁grat -10628 +▁Located -10629 +▁Related -10630 +▁renowned -10631 +▁facilitate -10632 +▁digest -10633 +Co -10634 +▁oh -10635 +omatic -10636 +bally -10637 +▁donate -10638 +omic -10639 +▁batteries -10640 +▁allegedly -10641 +oning -10642 +▁broker -10643 +▁bund -10644 +▁immigration -10645 +▁Pod -10646 +▁spacious -10647 +▁magical -10648 +▁Deal -10649 +▁versus -10650 +▁distingu -10651 +▁presentations -10652 +▁Walker -10653 +▁GR -10654 +▁counts -10655 +▁workforce -10656 +▁Cole -10657 +▁Alexander -10658 +▁immigr -10659 +▁Companies -10660 +▁carries -10661 +▁artificial -10662 +▁movements -10663 +▁Photography -10664 +CL -10665 +cal -10666 +kind -10667 +▁supplied -10668 +▁drag -10669 +ocracy -10670 +▁Lot -10671 +▁regulation -10672 +enger -10673 +▁slip -10674 +▁overseas -10675 +▁rac -10676 +▁Antonio -10677 +aughters -10678 +▁lovers -10679 +▁Coord -10680 +▁Hyd -10681 +▁shelter -10682 +▁consent -10683 +▁urg -10684 +▁carrier -10685 +▁width -10686 +▁enorm -10687 +▁consecutive -10688 +▁sust -10689 +▁sweat -10690 +heat -10691 +▁manufactured -10692 +▁disorder -10693 +▁Stars -10694 +▁annually -10695 +▁Premium -10696 +▁Later -10697 +▁Berlin -10698 +▁stolen -10699 +html -10700 +▁paste -10701 +▁jazz -10702 +itched -10703 +ospel -10704 +▁refused -10705 +▁Queens -10706 +ounced -10707 +▁Charlotte -10708 +▁pipe -10709 +▁Spot -10710 +▁Purch -10711 +▁Rat -10712 +▁Der -10713 +ften -10714 +▁thro -10715 +▁striking -10716 +▁survival -10717 +▁romance -10718 +▁oblig -10719 +▁Bruce -10720 +who -10721 +▁Roberts -10722 +▁Crit -10723 +▁careers -10724 +▁Nether -10725 +▁Mississ -10726 +▁superb -10727 +▁stopping -10728 +▁settled -10729 +▁Leadership -10730 +aud -10731 +▁Maine -10732 +isp -10733 +OVE -10734 +emet -10735 +▁vegetable -10736 +flix -10737 +perform -10738 +Today -10739 +▁Alan -10740 +FP -10741 +▁compassion -10742 +inge -10743 +▁flags -10744 +▁reverse -10745 +▁filing -10746 +▁festiv -10747 +▁bio -10748 +▁downloaded -10749 +▁Wayne -10750 +▁passenger -10751 +oops -10752 +▁Nort -10753 +issa -10754 +isan -10755 +▁begun -10756 +ugg -10757 +▁handful -10758 +▁achievement -10759 +▁rings -10760 +ARD -10761 +▁Got -10762 +▁Amy -10763 +▁royal -10764 +▁Mun -10765 +▁directors -10766 +orious -10767 +▁wrapped -10768 +bes -10769 +▁Roc -10770 +▁IV -10771 +▁Dubai -10772 +▁dialogue -10773 +▁Region -10774 +▁donated -10775 +▁genre -10776 +▁disturb -10777 +closure -10778 +▁appliances -10779 +▁Edward -10780 +heart -10781 +▁assemb -10782 +▁hadn -10783 +hou -10784 +▁slots -10785 +writer -10786 +▁settle -10787 +rena -10788 +▁barely -10789 +▁lease -10790 +▁poetry -10791 +fund -10792 +ampa -10793 +▁scream -10794 +▁Bond -10795 +▁fancy -10796 +▁accomplished -10797 +▁horm -10798 +▁Homes -10799 +▁inex -10800 +Tw -10801 +▁pasta -10802 +▁Jen -10803 +▁Hub -10804 +▁cub -10805 +▁veterans -10806 +▁BY -10807 +▁secured -10808 +▁Target -10809 +xygen -10810 +isted -10811 +▁towns -10812 +wick -10813 +ateur -10814 +▁allerg -10815 +▁Nap -10816 +▁teens -10817 +▁fragr -10818 +dam -10819 +▁Battle -10820 +▁pione -10821 +▁participated -10822 +▁attendance -10823 +▁Pil -10824 +▁incorporate -10825 +▁Suite -10826 +▁Too -10827 +▁preced -10828 +▁suits -10829 +asses -10830 +▁rival -10831 +▁iT -10832 +woman -10833 +▁hybrid -10834 +SL -10835 +▁Animal -10836 +▁Som -10837 +▁Oxford -10838 +▁achieving -10839 +▁composition -10840 +forward -10841 +▁Orleans -10842 +▁Beauty -10843 +▁Graph -10844 +▁deleg -10845 +▁Focus -10846 +▁demonstrated -10847 +▁governor -10848 +▁Susan -10849 +▁sq -10850 +▁wherever -10851 +▁impacts -10852 +▁intellectual -10853 +stick -10854 +innamon -10855 +omin -10856 +▁tailored -10857 +shine -10858 +▁affects -10859 +▁aver -10860 +ifferent -10861 +▁Anna -10862 +▁convey -10863 +▁receip -10864 +IO -10865 +▁Hamilton -10866 +▁Metal -10867 +▁Compl -10868 +▁bicy -10869 +theless -10870 +bird -10871 +▁collective -10872 +hill -10873 +▁tablespoons -10874 +irates -10875 +▁poker -10876 +cure -10877 +itative -10878 +▁Touch -10879 +▁innings -10880 +▁ax -10881 +▁THIS -10882 +▁Boo -10883 +otta -10884 +▁Fig -10885 +eled -10886 +▁inters -10887 +▁shine -10888 +▁preserve -10889 +▁labels -10890 +▁confront -10891 +▁Emergency -10892 +▁routes -10893 +▁contacted -10894 +▁laundry -10895 +▁portable -10896 +▁cord -10897 +quet -10898 +tage -10899 +▁stays -10900 +▁hood -10901 +▁Whatever -10902 +▁dealer -10903 +comed -10904 +Pal -10905 +▁specialty -10906 +▁complaints -10907 +▁Corporate -10908 +apse -10909 +ader -10910 +▁shaped -10911 +▁NYC -10912 +IB -10913 +essed -10914 +Am -10915 +▁bot -10916 +▁Charlie -10917 +guard -10918 +.; -10919 +▁accred -10920 +▁Gov -10921 +▁accompanied -10922 +holder -10923 +▁trials -10924 +▁Qual -10925 +▁Alaska -10926 +▁glow -10927 +▁optional -10928 +▁accidents -10929 +atching -10930 +▁patio -10931 +linary -10932 +▁Apart -10933 +▁measured -10934 +best -10935 +olitan -10936 +Gu -10937 +Int -10938 +▁sisters -10939 +dem -10940 +FS -10941 +fight -10942 +ippi -10943 +lando -10944 +HD -10945 +▁enabling -10946 +OB -10947 +▁simultane -10948 +adesh -10949 +▁mathem -10950 +▁pockets -10951 +▁Lad -10952 +▁loads -10953 +▁Yellow -10954 +▁formation -10955 +▁disabled -10956 +▁ears -10957 +▁tags -10958 +▁Cou -10959 +▁Maria -10960 +owered -10961 +▁Bitcoin -10962 +▁anch -10963 +▁VP -10964 +▁Russell -10965 +▁dressing -10966 +KS -10967 +reedom -10968 +▁Hind -10969 +acco -10970 +▁encount -10971 +gins -10972 +▁spectrum -10973 +▁dirty -10974 +▁capability -10975 +▁testim -10976 +ussy -10977 +▁homeless -10978 +kers -10979 +▁Solar -10980 +ailed -10981 +▁** -10982 +▁Gate -10983 +▁reduces -10984 +▁purple -10985 +▁mit -10986 +▁edited -10987 +▁intrig -10988 +▁Skin -10989 +▁Vietnam -10990 +▁fitting -10991 +Donald -10992 +▁rolled -10993 +▁realistic -10994 +▁specifications -10995 +ushes -10996 +ACT -10997 +▁proport -10998 +abis -10999 +▁Storage -11000 +inance -11001 +▁translation -11002 +▁snack -11003 +▁tear -11004 +esty -11005 +▁dedication -11006 +▁consultant -11007 +▁scrap -11008 +▁wellness -11009 +▁NJ -11010 +▁Luck -11011 +▁Ky -11012 +▁survived -11013 +UB -11014 +RP -11015 +▁adventures -11016 +▁Fix -11017 +▁AG -11018 +Ste -11019 +▁Prize -11020 +▁mush -11021 +▁epic -11022 +adows -11023 +▁tale -11024 +▁Commerce -11025 +▁Aw -11026 +▁enabled -11027 +▁Hawaii -11028 +▁territory -11029 +▁fitted -11030 +▁pant -11031 +emetery -11032 +▁UV -11033 +▁optimal -11034 +▁Pra -11035 +plus -11036 +▁plates -11037 +▁tempt -11038 +edded -11039 +iffs -11040 +▁Ho -11041 +▁Gulf -11042 +▁Athlet -11043 +▁Beck -11044 +▁Doll -11045 +anship -11046 +▁essays -11047 +ML -11048 +fortable -11049 +▁spa -11050 +Work -11051 +▁disorders -11052 +▁conditioning -11053 +▁jur -11054 +▁blame -11055 +stop -11056 +▁Analy -11057 +▁Nigeria -11058 +▁accum -11059 +▁specified -11060 +eld -11061 +▁fallen -11062 +▁attempted -11063 +icut -11064 +▁contractors -11065 +▁frames -11066 +▁rally -11067 +▁designated -11068 +▁Grace -11069 +▁Israeli -11070 +▁thrown -11071 +azines -11072 +▁CBS -11073 +▁ecosystem -11074 +▁Stewart -11075 +▁Furn -11076 +▁Dental -11077 +angel -11078 +▁Wonder -11079 +▁MC -11080 +▁provision -11081 +▁Baltimore -11082 +▁Liverpool -11083 +▁imagination -11084 +▁% -11085 +▁scenario -11086 +clusive -11087 +▁horror -11088 +▁sized -11089 +aco -11090 +▁reflects -11091 +▁Scholars -11092 +▁dresses -11093 +▁crimes -11094 +▁neutral -11095 +annah -11096 +first -11097 +▁Luke -11098 +▁devast -11099 +▁Nev -11100 +spec -11101 +▁Cars -11102 +▁Friend -11103 +▁Posts -11104 +▁Organization -11105 +▁Value -11106 +▁indicates -11107 +ceive -11108 +▁tasty -11109 +▁HTML -11110 +▁installing -11111 +▁Si -11112 +▁Chrome -11113 +ao -11114 +▁coupon -11115 +▁Palm -11116 +▁Sad -11117 +robe -11118 +▁tourist -11119 +▁attractions -11120 +odium -11121 +▁manip -11122 +▁formats -11123 +▁pine -11124 +rapeut -11125 +▁premier -11126 +Well -11127 +asures -11128 +▁Ec -11129 +inciple -11130 +▁courage -11131 +▁Begin -11132 +▁enemy -11133 +▁speaks -11134 +▁booth -11135 +kg -11136 +▁(“ -11137 +lan -11138 +▁distract -11139 +▁Culture -11140 +lay -11141 +▁dose -11142 +▁satellite -11143 +▁lineup -11144 +▁Given -11145 +▁Latest -11146 +▁spoken -11147 +▁cheer -11148 +▁cous -11149 +▁Ground -11150 +bian -11151 +▁rhyth -11152 +▁Fro -11153 +▁Wolf -11154 +▁entrepreneur -11155 +▁Jobs -11156 +ocated -11157 +▁landing -11158 +OUR -11159 +▁Individual -11160 +▁FC -11161 +▁dressed -11162 +ruptcy -11163 +▁Netherlands -11164 +gate -11165 +▁wing -11166 +▁tremendous -11167 +▁collecting -11168 +▁Imag -11169 +▁GA -11170 +▁colored -11171 +▁Dak -11172 +▁Transportation -11173 +ailand -11174 +shot -11175 +nered -11176 +▁Thompson -11177 +▁nicely -11178 +▁Virtual -11179 +▁tears -11180 +▁wax -11181 +▁venues -11182 +PT -11183 +▁Laura -11184 +ctic -11185 +▁Rd -11186 +rac -11187 +▁tong -11188 +▁Lem -11189 +▁hardly -11190 +isers -11191 +▁rush -11192 +▁diagnosed -11193 +▁dump -11194 +▁printer -11195 +▁warned -11196 +▁Dise -11197 +▁Cold -11198 +▁tuned -11199 +ipper -11200 +wind -11201 +▁Users -11202 +▁province -11203 +▁electronics -11204 +▁Christopher -11205 +▁girlfriend -11206 +▁Hours -11207 +▁merchand -11208 +▁Scottish -11209 +▁marine -11210 +▁destinations -11211 +▁expense -11212 +▁selecting -11213 +▁seating -11214 +▁relation -11215 +▁ye -11216 +▁throwing -11217 +▁apartments -11218 +▁ties -11219 +▁punch -11220 +▁Valentine -11221 +▁embrace -11222 +▁legendary -11223 +▁Butter -11224 +andal -11225 +die -11226 +▁confirmation -11227 +uman -11228 +CAA -11229 +▁Chelsea -11230 +enny -11231 +▁crop -11232 +▁deput -11233 +▁irrit -11234 +▁stakeholders -11235 +▁Rot -11236 +▁Ukraine -11237 +▁Recently -11238 +▁destroyed -11239 +▁Benefits -11240 +▁Chicken -11241 +▁Regard -11242 +▁Chocolate -11243 +▁Carib -11244 +PE -11245 +▁Upon -11246 +▁explanation -11247 +▁nonprofit -11248 +asm -11249 +▁tension -11250 +▁damages -11251 +cca -11252 +▁seeks -11253 +▁Vin -11254 +Che -11255 +there -11256 +▁seasonal -11257 +▁Inside -11258 +▁Trip -11259 +▁declined -11260 +▁breathing -11261 +▁AW -11262 +▁calc -11263 +▁mg -11264 +health -11265 +▁Cann -11266 +▁intimate -11267 +▁aqu -11268 +▁Summit -11269 +▁Adobe -11270 +▁GT -11271 +ounces -11272 +URE -11273 +▁Lib -11274 +▁resistant -11275 +▁foster -11276 +▁nest -11277 +▁Mars -11278 +▁locked -11279 +▁handed -11280 +▁lobby -11281 +▁stupid -11282 +▁peaceful -11283 +▁vertical -11284 +conf -11285 +ribution -11286 +▁welcomed -11287 +▁steady -11288 +▁grain -11289 +▁cyber -11290 +▁vitamin -11291 +owship -11292 +▁figured -11293 +gun -11294 +▁unlimited -11295 +▁hill -11296 +▁healthier -11297 +ni -11298 +▁Han -11299 +▁gel -11300 +▁engineer -11301 +VC -11302 +auc -11303 +▁handled -11304 +▁insect -11305 +Oh -11306 +▁nobody -11307 +▁Sean -11308 +lar -11309 +▁internationally -11310 +▁cents -11311 +▁verify -11312 +▁gains -11313 +ixel -11314 +▁exploration -11315 +▁appet -11316 +▁Track -11317 +father -11318 +▁PO -11319 +▁Nich -11320 +▁anybody -11321 +▁timing -11322 +▁pork -11323 +▁publicly -11324 +▁feeding -11325 +Date -11326 +▁amid -11327 +▁GPS -11328 +vol -11329 +izations -11330 +BN -11331 +▁Extra -11332 +tends -11333 +speed -11334 +▁colleges -11335 +▁travelling -11336 +▁tires -11337 +▁gross -11338 +News -11339 +▁Housing -11340 +bet -11341 +▁Active -11342 +▁iTunes -11343 +▁ordering -11344 +▁Netflix -11345 +▁Strateg -11346 +ricane -11347 +▁differently -11348 +▁publisher -11349 +▁competing -11350 +▁touchdown -11351 +▁branches -11352 +▁voices -11353 +▁spo -11354 +▁Ready -11355 +▁reads -11356 +▁blessed -11357 +Point -11358 +uisine -11359 +writing -11360 +▁Lawrence -11361 +▁tent -11362 +▁locate -11363 +▁collaborative -11364 +ELL -11365 +▁Emb -11366 +commerce -11367 +▁placement -11368 +aro -11369 +cat -11370 +erals -11371 +▁bits -11372 +▁immers -11373 +▁earning -11374 +▁Om -11375 +▁ought -11376 +▁semester -11377 +▁petition -11378 +▁mixing -11379 +▁assignment -11380 +▁implementing -11381 +ascular -11382 +party -11383 +▁Bloom -11384 +▁dont -11385 +▁cultiv -11386 +▁pulling -11387 +▁phrase -11388 +▁Associate -11389 +▁Official -11390 +allel -11391 +▁Investment -11392 +▁ISO -11393 +▁Coun -11394 +▁medications -11395 +▁corpor -11396 +▁leak -11397 +▁Technologies -11398 +▁dozens -11399 +▁Wol -11400 +▁measurements -11401 +▁onions -11402 +isode -11403 +▁humor -11404 +▁effectiveness -11405 +▁facial -11406 +▁grid -11407 +▁automated -11408 +▁guns -11409 +▁shades -11410 +▁Pittsburgh -11411 +▁proposals -11412 +shore -11413 +▁discussing -11414 +▁cultures -11415 +▁adopt -11416 +▁lyrics -11417 +▁cum -11418 +▁duration -11419 +▁mailing -11420 +▁minim -11421 +▁trigger -11422 +▁toxic -11423 +UE -11424 +▁treating -11425 +ividually -11426 +▁Lif -11427 +▁Ott -11428 +empl -11429 +▁rack -11430 +▁pond -11431 +▁nurse -11432 +▁bay -11433 +▁difficulties -11434 +▁burg -11435 +▁diameter -11436 +▁Strong -11437 +lishing -11438 +▁Chi -11439 +necess -11440 +▁painful -11441 +▁receives -11442 +gic -11443 +▁reminder -11444 +▁synd -11445 +▁GO -11446 +▁submission -11447 +▁WITH -11448 +▁countless -11449 +▁armed -11450 +acent -11451 +▁Yoga -11452 +▁addiction -11453 +▁Assess -11454 +▁Ring -11455 +▁applicants -11456 +▁RS -11457 +▁Includes -11458 +quer -11459 +▁Starting -11460 +▁Ga -11461 +▁mechanism -11462 +▁vinyl -11463 +atherine -11464 +MB -11465 +▁WA -11466 +▁Ali -11467 +▁fool -11468 +chestra -11469 +▁Ka -11470 +▁pron -11471 +▁Hunter -11472 +▁Barbara -11473 +▁tablets -11474 +▁overs -11475 +▁launching -11476 +unct -11477 +▁Sout -11478 +▁femin -11479 +▁fence -11480 +▁aging -11481 +umble -11482 +▁suspended -11483 +▁Parliament -11484 +▁Repair -11485 +▁bun -11486 +▁Captain -11487 +cend -11488 +▁Category -11489 +▁subsequent -11490 +▁hiking -11491 +▁Designed -11492 +▁desert -11493 +▁rewards -11494 +▁Nelson -11495 +▁zip -11496 +▁Bor -11497 +▁CBD -11498 +▁uniform -11499 +▁Adult -11500 +abs -11501 +▁ESP -11502 +▁fu -11503 +▁Intellig -11504 +▁Essential -11505 +▁suspension -11506 +▁HE -11507 +Some -11508 +iches -11509 +▁hack -11510 +▁yarn -11511 +▁Marg -11512 +▁Pas -11513 +▁absence -11514 +▁Gib -11515 +Ph -11516 +▁Afghanistan -11517 +▁narrative -11518 +▁judges -11519 +▁concluded -11520 +▁Chen -11521 +▁angry -11522 +▁checkout -11523 +ictions -11524 +vet -11525 +▁innoc -11526 +▁restoration -11527 +▁jam -11528 +▁automotive -11529 +▁Innovation -11530 +▁robot -11531 +▁Earlier -11532 +▁NBC -11533 +▁delicate -11534 +▁stroke -11535 +▁boyfriend -11536 +▁motivated -11537 +▁Funeral -11538 +sk -11539 +DAY -11540 +▁ridic -11541 +▁Partner -11542 +▁prescription -11543 +▁Ridge -11544 +ju -11545 +▁reliability -11546 +▁Drug -11547 +▁Orlando -11548 +iper -11549 +▁Connecticut -11550 +▁Morning -11551 +▁Former -11552 +▁Kay -11553 +breaking -11554 +DR -11555 +▁Something -11556 +itivity -11557 +▁Malaysia -11558 +▁Math -11559 +amily -11560 +▁Half -11561 +▁conservative -11562 +▁Bachelor -11563 +▁seniors -11564 +▁ML -11565 +▁brace -11566 +▁Na -11567 +▁marketplace -11568 +▁Mississippi -11569 +▁Adams -11570 +▁Survey -11571 +▁Protect -11572 +▁tang -11573 +▁Feature -11574 +▁anticipated -11575 +▁forgotten -11576 +▁Used -11577 +▁motivation -11578 +▁homeowners -11579 +▁Bou -11580 +poon -11581 +▁albums -11582 +good -11583 +▁climbing -11584 +chain -11585 +har -11586 +▁Wash -11587 +▁welcoming -11588 +▁Length -11589 +▁buck -11590 +▁prospects -11591 +izable -11592 +▁prestigious -11593 +▁Independent -11594 +▁luxurious -11595 +▁winds -11596 +▁outer -11597 +cribed -11598 +▁troops -11599 +▁Info -11600 +▁Tow -11601 +▁buzz -11602 +▁ende -11603 +▁Taking -11604 +▁cush -11605 +▁owns -11606 +▁Domin -11607 +▁picks -11608 +count -11609 +▁Publ -11610 +▁Harvard -11611 +▁Artist -11612 +▁inject -11613 +▁Organic -11614 +▁Advert -11615 +▁Convention -11616 +▁preferences -11617 +▁scal -11618 +IGHT -11619 +▁theft -11620 +▁Sweden -11621 +▁manif -11622 +▁accordance -11623 +▁Draw -11624 +▁Michelle -11625 +around -11626 +▁funeral -11627 +▁Vac -11628 +▁Multi -11629 +▁concentration -11630 +▁Islands -11631 +▁churches -11632 +eneath -11633 +▁Ay -11634 +▁neuro -11635 +oard -11636 +▁DR -11637 +▁celebrity -11638 +▁Gordon -11639 +▁Thailand -11640 +Sub -11641 +lington -11642 +olesale -11643 +▁analytics -11644 +▁guided -11645 +thon -11646 +uru -11647 +▁measurement -11648 +▁shelf -11649 +▁Islam -11650 +▁Kal -11651 +arrass -11652 +▁Effect -11653 +▁Recommend -11654 +called -11655 +▁abstract -11656 +etooth -11657 +▁violent -11658 +▁Hey -11659 +▁Aaron -11660 +▁Grey -11661 +▁demanding -11662 +▁Sheriff -11663 +▁identification -11664 +▁brick -11665 +▁cancel -11666 +Yes -11667 +▁Treatment -11668 +▁console -11669 +▁confidential -11670 +ogs -11671 +▁Volunte -11672 +▁captain -11673 +▁lips -11674 +▁Question -11675 +▁Temple -11676 +▁Hello -11677 +▁Notes -11678 +▁casinos -11679 +▁Voice -11680 +▁genetic -11681 +depth -11682 +▁laboratory -11683 +▁harmful -11684 +▁Therapy -11685 +▁retreat -11686 +▁Louisiana -11687 +▁Madison -11688 +icity -11689 +▁regards -11690 +▁sponsor -11691 +OME -11692 +uy -11693 +IE -11694 +▁intelligent -11695 +ictionary -11696 +iar -11697 +▁apolog -11698 +▁friendship -11699 +▁appreciation -11700 +▁vel -11701 +▁jeans -11702 +▁enormous -11703 +▁Sens -11704 +eddings -11705 +▁RA -11706 +▁Manh -11707 +school -11708 +▁Rab -11709 +▁folk -11710 +▁Rent -11711 +▁closure -11712 +▁Francis -11713 +▁seventh -11714 +▁kilomet -11715 +▁boil -11716 +▁protective -11717 +▁Toyota -11718 +▁Islamic -11719 +▁canvas -11720 +NC -11721 +▁threw -11722 +atar -11723 +paren -11724 +▁crystal -11725 +idity -11726 +▁Fly -11727 +GS -11728 +▁Nice -11729 +▁Door -11730 +▁devoted -11731 +▁forums -11732 +▁fridge -11733 +▁tourists -11734 +▁valued -11735 +thur -11736 +olas -11737 +kyo -11738 +uity -11739 +▁airline -11740 +▁Hug -11741 +code -11742 +▁athletic -11743 +▁separately -11744 +▁alternatives -11745 +▁Sleep -11746 +▁Philippines -11747 +▁grows -11748 +▁Range -11749 +▁convin -11750 +asion -11751 +▁requiring -11752 +▁Cambridge -11753 +author -11754 +▁dimensions -11755 +▁shorter -11756 +▁ranking -11757 +onde -11758 +▁Boot -11759 +Are -11760 +ilarly -11761 +▁Desk -11762 +test -11763 +▁Paint -11764 +ching -11765 +encil -11766 +▁disabilities -11767 +▁portal -11768 +▁crafts -11769 +▁execution -11770 +▁Weather -11771 +▁noon -11772 +▁agricultural -11773 +▁neat -11774 +▁camping -11775 +▁reminded -11776 +▁Chef -11777 +▁sons -11778 +edu -11779 +▁varied -11780 +▁vegan -11781 +▁pad -11782 +▁fundraising -11783 +▁Cass -11784 +▁executives -11785 +▁copper -11786 +▁Khan -11787 +▁Jessica -11788 +▁Tag -11789 +inery -11790 +▁confused -11791 +ento -11792 +▁Kennedy -11793 +▁Item -11794 +▁Ian -11795 +ceptions -11796 +onge -11797 +▁Options -11798 +▁detection -11799 +▁accompany -11800 +▁Rachel -11801 +▁toll -11802 +▁adjustable -11803 +▁refreshing -11804 +▁navigation -11805 +rowave -11806 +▁inflamm -11807 +▁wrest -11808 +▁cer -11809 +▁Perry -11810 +Net -11811 +▁recommendation -11812 +zo -11813 +▁filters -11814 +cha -11815 +▁attendees -11816 +▁troubles -11817 +▁custody -11818 +▁Spa -11819 +▁ethnic -11820 +▁heal -11821 +▁partnerships -11822 +") -11823 +icop -11824 +▁Crim -11825 +▁homemade -11826 +▁Christians -11827 +oria -11828 +▁attempting -11829 +▁jury -11830 +▁qualities -11831 +itarian -11832 +▁Fu -11833 +▁Bapt -11834 +▁stere -11835 +▁boundaries -11836 +oven -11837 +▁transparent -11838 +▁tomato -11839 +▁Clar -11840 +▁breed -11841 +▁Fan -11842 +whe -11843 +ancers -11844 +▁replacing -11845 +▁Caribbean -11846 +▁Dad -11847 +▁demo -11848 +ylon -11849 +▁blogging -11850 +ante -11851 +▁handles -11852 +▁organize -11853 +Tra -11854 +▁Yan -11855 +▁Path -11856 +apters -11857 +▁Tools -11858 +▁instructor -11859 +▁dessert -11860 +▁Bah -11861 +▁burst -11862 +▁Production -11863 +▁unfortunately -11864 +▁Medicare -11865 +▁Century -11866 +▁identifying -11867 +▁crafted -11868 +▁Retail -11869 +▁lat -11870 +▁Greece -11871 +▁districts -11872 +ipedia -11873 +ethe -11874 +▁Surg -11875 +▁tower -11876 +▁ster -11877 +▁hesitate -11878 +▁headquarters -11879 +▁establishment -11880 +blo -11881 +▁speeds -11882 +▁Sett -11883 +▁Dragon -11884 +!). -11885 +▁Tower -11886 +▁sacrif -11887 +▁Wright -11888 +cs -11889 +▁boring -11890 +▁Pool -11891 +▁magazines -11892 +Go -11893 +▁Leader -11894 +SD -11895 +chair -11896 +▁adequate -11897 +track -11898 +▁Sustain -11899 +▁grip -11900 +▁supportive -11901 +▁Obs -11902 +ostic -11903 +▁arrangement -11904 +▁judgment -11905 +▁cogn -11906 +▁divers -11907 +▁propri -11908 +▁Bio -11909 +▁hint -11910 +▁Vision -11911 +▁Java -11912 +▁Todd -11913 +▁inev -11914 +▁reporter -11915 +▁lap -11916 +arma -11917 +pres -11918 +ingle -11919 +iev -11920 +▁rides -11921 +▁Kr -11922 +▁Administr -11923 +▁Visual -11924 +▁Princip -11925 +▁lined -11926 +▁Between -11927 +▁Really -11928 +eals -11929 +▁Fill -11930 +▁weakness -11931 +▁indu -11932 +▁fears -11933 +▁medic -11934 +▁utilizing -11935 +▁saves -11936 +▁Campus -11937 +▁Edge -11938 +▁rocks -11939 +▁hem -11940 +IES -11941 +▁Hem -11942 +▁Baker -11943 +▁closet -11944 +loyd -11945 +▁Grade -11946 +▁audit -11947 +master -11948 +▁Graham -11949 +▁vaccine -11950 +▁boats -11951 +parency -11952 +▁Ful -11953 +ASH -11954 +▁affairs -11955 +▁mounted -11956 +mitting -11957 +▁NASA -11958 +▁finishes -11959 +▁Bart -11960 +hard -11961 +▁shadow -11962 +▁precision -11963 +elta -11964 +▁arranged -11965 +▁teaches -11966 +▁Profile -11967 +▁promising -11968 +▁Dest -11969 +elect -11970 +▁gray -11971 +▁tricks -11972 +▁sporting -11973 +▁LE -11974 +▁Basic -11975 +▁signals -11976 +working -11977 +▁highlighted -11978 +▁lightly -11979 +▁Anim -11980 +▁Excellent -11981 +▁Fal -11982 +▁intervention -11983 +▁lighter -11984 +▁underlying -11985 +▁sequence -11986 +▁contributing -11987 +▁dealers -11988 +▁talents -11989 +▁ruled -11990 +▁Universal -11991 +▁recruitment -11992 +▁Listen -11993 +▁composed -11994 +▁specially -11995 +▁caf -11996 +celona -11997 +▁claiming -11998 +▁Larry -11999 +▁daughters -12000 +▁dup -12001 +▁Born -12002 +▁recipient -12003 +▁logged -12004 +▁tastes -12005 +pling -12006 +itzerland -12007 +will -12008 +▁RO -12009 +▁outf -12010 +▁Ward -12011 +▁oxygen -12012 +▁Yahoo -12013 +▁peer -12014 +▁Fem -12015 +▁Argent -12016 +▁snacks -12017 +▁Swiss -12018 +anth -12019 +See -12020 +▁integrate -12021 +anut -12022 +they -12023 +▁.. -12024 +▁complimentary -12025 +▁suicide -12026 +▁structural -12027 +▁eastern -12028 +amins -12029 +▁suited -12030 +▁Prices -12031 +▁advocate -12032 +▁ignore -12033 +▁quarterback -12034 +oft -12035 +rary -12036 +▁bin -12037 +▁weekends -12038 +▁FDA -12039 +▁dear -12040 +▁ranks -12041 +▁SK -12042 +▁fut -12043 +▁spam -12044 +▁hockey -12045 +▁pollution -12046 +Lear -12047 +▁Greater -12048 +▁costume -12049 +▁bikes -12050 +▁Hend -12051 +mediate -12052 +▁Stories -12053 +▁builds -12054 +held -12055 +▁wore -12056 +▁squee -12057 +▁sponsors -12058 +▁SL -12059 +▁Tokyo -12060 +▁kits -12061 +▁License -12062 +▁individually -12063 +▁Customers -12064 +▁clim -12065 +bled -12066 +▁Duke -12067 +▁Turk -12068 +▁cure -12069 +▁tile -12070 +▁Sugar -12071 +▁Improve -12072 +▁Avoid -12073 +▁adorable -12074 +▁refers -12075 +▁Anti -12076 +▁Woman -12077 +▁Stream -12078 +▁discipline -12079 +▁interactions -12080 +▁twin -12081 +▁surviv -12082 +▁reporters -12083 +▁appointments -12084 +▁Carter -12085 +▁Drop -12086 +green -12087 +▁beliefs -12088 +▁spouse -12089 +▁disag -12090 +ibe -12091 +▁embra -12092 +▁Beth -12093 +▁boasts -12094 +▁Veh -12095 +▁Lex -12096 +▁presenting -12097 +▁Ach -12098 +▁expects -12099 +▁tops -12100 +▁mobility -12101 +▁Tar -12102 +▁Freedom -12103 +▁fabrics -12104 +▁Equipment -12105 +▁rect -12106 +▁infections -12107 +▁Hamp -12108 +▁thankful -12109 +▁principle -12110 +▁Jerry -12111 +▁Ultra -12112 +▁moral -12113 +assador -12114 +▁Parker -12115 +irmingham -12116 +ernand -12117 +▁Pros -12118 +▁compelling -12119 +Time -12120 +▁ox -12121 +herent -12122 +▁appearances -12123 +▁agriculture -12124 +glas -12125 +While -12126 +▁Robinson -12127 +▁argue -12128 +▁floral -12129 +▁parameters -12130 +▁counties -12131 +▁moderate -12132 +ieces -12133 +onia -12134 +▁stitch -12135 +▁Mas -12136 +▁trademark -12137 +Let -12138 +mel -12139 +▁MI -12140 +▁analyze -12141 +▁agreements -12142 +uz -12143 +quest -12144 +▁Career -12145 +▁unnecess -12146 +▁Instruct -12147 +▁hungry -12148 +▁booked -12149 +▁Tig -12150 +▁Ale -12151 +▁libraries -12152 +▁quit -12153 +▁Condition -12154 +PI -12155 +▁Stir -12156 +▁thy -12157 +▁dentist -12158 +▁failing -12159 +▁Manhattan -12160 +▁addressing -12161 +▁Flex -12162 +ottage -12163 +▁Rice -12164 +▁Syria -12165 +▁Hud -12166 +▁Originally -12167 +▁margin -12168 +IDS -12169 +▁Sel -12170 +▁Associated -12171 +illance -12172 +▁fer -12173 +▁Campbell -12174 +▁od -12175 +effective -12176 +▁charts -12177 +urers -12178 +▁horn -12179 +arel -12180 +▁Weekly -12181 +▁Slo -12182 +▁cleans -12183 +▁promotional -12184 +▁bones -12185 +▁gut -12186 +▁sunny -12187 +▁Emily -12188 +▁strain -12189 +▁Others -12190 +▁seemingly -12191 +▁BMW -12192 +▁Broadway -12193 +▁Frame -12194 +▁Native -12195 +▁thesis -12196 +▁wheat -12197 +▁Jazz -12198 +▁embarrass -12199 +▁Alb -12200 +▁Fer -12201 +▁Nevada -12202 +▁remod -12203 +▁Tampa -12204 +▁appealing -12205 +acht -12206 +▁delete -12207 +▁spotted -12208 +▁beating -12209 +▁worthy -12210 +▁satisfy -12211 +abad -12212 +▁Imm -12213 +▁backyard -12214 +▁jumped -12215 +▁Usually -12216 +▁Ger -12217 +▁patent -12218 +▁stadium -12219 +▁Singh -12220 +resist -12221 +BR -12222 +▁computing -12223 +▁ginger -12224 +▁Hopefully -12225 +▁pays -12226 +▁harsh -12227 +▁transferred -12228 +nell -12229 +▁dying -12230 +▁chill -12231 +▁grill -12232 +▁Feed -12233 +aus -12234 +▁Boys -12235 +umbai -12236 +▁tribute -12237 +▁Rating -12238 +▁Brun -12239 +▁charming -12240 +▁Abs -12241 +▁Fell -12242 +▁tropical -12243 +▁substitute -12244 +▁multip -12245 +achers -12246 +▁Collins -12247 +▁Floor -12248 +▁glory -12249 +size -12250 +▁converted -12251 +▁Morris -12252 +neath -12253 +▁Links -12254 +▁globally -12255 +▁recycling -12256 +▁containers -12257 +marks -12258 +▁pear -12259 +▁Soul -12260 +▁secretary -12261 +▁MORE -12262 +▁Studios -12263 +▁portray -12264 +Phot -12265 +▁Franklin -12266 +▁Gay -12267 +▁honestly -12268 +▁crisp -12269 +▁beta -12270 +▁bron -12271 +itchell -12272 +▁encourages -12273 +▁scroll -12274 +▁closest -12275 +▁hike -12276 +▁conferences -12277 +▁accepting -12278 +▁technological -12279 +▁Warri -12280 +▁Match -12281 +anne -12282 +▁varieties -12283 +▁freel -12284 +▁Senator -12285 +▁basement -12286 +▁soy -12287 +▁convinced -12288 +erald -12289 +▁inbox -12290 +▁wings -12291 +▁SEC -12292 +▁duo -12293 +▁Communication -12294 +▁Lay -12295 +▁Primary -12296 +Car -12297 +service -12298 +▁vendor -12299 +▁peers -12300 +oline -12301 +uh -12302 +▁intim -12303 +▁Nob -12304 +▁cleaned -12305 +▁sliced -12306 +▁measuring -12307 +▁receiver -12308 +SM -12309 +▁grades -12310 +▁variations -12311 +▁Karen -12312 +▁authorized -12313 +▁animation -12314 +▁lying -12315 +▁Verm -12316 +▁deter -12317 +▁Soph -12318 +▁Beyond -12319 +CR -12320 +▁sail -12321 +▁Consumer -12322 +▁educate -12323 +▁investigating -12324 +▁modules -12325 +▁universal -12326 +▁parad -12327 +▁Transfer -12328 +ogy -12329 +leading -12330 +▁outlets -12331 +▁prospective -12332 +▁knowledgeable -12333 +▁thumb -12334 +▁Regular -12335 +▁invested -12336 +▁ber -12337 +orient -12338 +otype -12339 +▁herbs -12340 +▁investor -12341 +▁stones -12342 +▁controversial -12343 +▁whereas -12344 +▁Managing -12345 +western -12346 +rons -12347 +▁Fisher -12348 +▁stepped -12349 +▁Sri -12350 +▁hills -12351 +▁awa -12352 +▁criticism -12353 +▁clever -12354 +lette -12355 +mental -12356 +▁gaining -12357 +▁liver -12358 +▁consolid -12359 +▁Od -12360 +▁jet -12361 +▁syrup -12362 +▁Honda -12363 +▁Respons -12364 +▁neighbour -12365 +▁dive -12366 +▁elderly -12367 +▁prosecut -12368 +sub -12369 +▁traditions -12370 +rogen -12371 +▁motorcycle -12372 +▁EST -12373 +encer -12374 +▁Payment -12375 +ventions -12376 +▁Kid -12377 +▁patience -12378 +▁Gray -12379 +Eng -12380 +▁startup -12381 +▁lover -12382 +struct -12383 +▁rolls -12384 +▁refresh -12385 +▁dropping -12386 +ele -12387 +▁Syn -12388 +▁praise -12389 +▁communic -12390 +▁excessive -12391 +Sm -12392 +▁riders -12393 +▁lounge -12394 +▁Guy -12395 +▁Built -12396 +▁visitor -12397 +▁roster -12398 +▁distinctive -12399 +▁highway -12400 +▁Linked -12401 +azy -12402 +▁Hit -12403 +▁packing -12404 +mons -12405 +▁Olympics -12406 +▁corners -12407 +▁cere -12408 +▁shame -12409 +arer -12410 +▁Oscar -12411 +▁visibility -12412 +user -12413 +▁Worth -12414 +▁Ora -12415 +Aut -12416 +▁bride -12417 +▁independently -12418 +▁Commissioner -12419 +▁alumni -12420 +▁clips -12421 +isting -12422 +lig -12423 +▁Guest -12424 +▁durability -12425 +▁analysts -12426 +▁Wheel -12427 +▁Indonesia -12428 +▁bother -12429 +▁Thai -12430 +esc -12431 +mg -12432 +Home -12433 +▁Palestin -12434 +idges -12435 +▁Jimmy -12436 +▁flip -12437 +▁Warren -12438 +▁overlook -12439 +uv -12440 +▁Opport -12441 +Bay -12442 +▁reflection -12443 +▁Crystal -12444 +▁merchandise -12445 +▁Pul -12446 +▁Saudi -12447 +TO -12448 +▁Players -12449 +▁departure -12450 +▁Murray -12451 +▁nick -12452 +▁relate -12453 +▁triple -12454 +▁NCAA -12455 +▁customize -12456 +▁envelop -12457 +▁fought -12458 +▁Expert -12459 +owing -12460 +▁exempt -12461 +ielder -12462 +appro -12463 +intendo -12464 +▁coins -12465 +▁Vi -12466 +auge -12467 +▁Switzerland -12468 +▁Proble -12469 +eks -12470 +▁Programs -12471 +▁Tyler -12472 +▁graduation -12473 +▁Deputy -12474 +igenous -12475 +quire -12476 +▁cocktail -12477 +lon -12478 +▁Barcelona -12479 +▁maximize -12480 +▁inflation -12481 +▁Hung -12482 +▁potato -12483 +▁Rather -12484 +▁Pad -12485 +▁trash -12486 +▁dependent -12487 +▁invitation -12488 +▁secrets -12489 +▁rab -12490 +▁Conserv -12491 +▁subscribers -12492 +mentation -12493 +▁Panel -12494 +oin -12495 +▁burden -12496 +”) -12497 +Jan -12498 +rors -12499 +ndant -12500 +kle -12501 +▁wage -12502 +▁Hans -12503 +▁wool -12504 +▁beneath -12505 +▁automation -12506 +▁worlds -12507 +▁drawings -12508 +GBT -12509 +▁jar -12510 +▁mattress -12511 +▁intake -12512 +erver -12513 +▁priorities -12514 +▁comply -12515 +▁continuously -12516 +▁bacon -12517 +▁naked -12518 +▁RAM -12519 +▁Fra -12520 +▁Favor -12521 +▁technicians -12522 +▁Operations -12523 +▁Close -12524 +▁momentum -12525 +▁draws -12526 +▁Cit -12527 +▁programmes -12528 +▁warehouse -12529 +▁Often -12530 +senal -12531 +First -12532 +isy -12533 +▁acceptable -12534 +▁Fitness -12535 +worthy -12536 +▁drew -12537 +▁Schedule -12538 +▁baked -12539 +▁citizen -12540 +ols -12541 +▁Choice -12542 +▁ARE -12543 +▁Document -12544 +▁Evans -12545 +▁batch -12546 +▁NOW -12547 +▁renewable -12548 +▁Racing -12549 +▁Breakfast -12550 +▁Elementary -12551 +▁Birmingham -12552 +▁Pret -12553 +▁companion -12554 +▁gluten -12555 +TON -12556 +▁centuries -12557 +▁blocked -12558 +▁scary -12559 +▁preventing -12560 +▁Kel -12561 +Click -12562 +▁Applications -12563 +cr -12564 +▁Egg -12565 +▁Joh -12566 +aws -12567 +▁Pictures -12568 +▁hass -12569 +▁Hell -12570 +▁dairy -12571 +istical -12572 +▁pumpkin -12573 +arat -12574 +▁safer -12575 +▁Hat -12576 +▁shit -12577 +▁simultaneously -12578 +idding -12579 +▁Clinic -12580 +GO -12581 +▁cycling -12582 +▁Dal -12583 +stairs -12584 +▁reun -12585 +▁Kath -12586 +▁notification -12587 +▁considerable -12588 +▁basics -12589 +▁Rene -12590 +▁Resource -12591 +▁nutrients -12592 +▁glue -12593 +▁magnific -12594 +▁Added -12595 +▁Directors -12596 +▁rejected -12597 +▁almond -12598 +▁Clinical -12599 +▁psychological -12600 +▁shore -12601 +▁welfare -12602 +▁coronavirus -12603 +▁acknowledge -12604 +▁Sin -12605 +▁aesthetic -12606 +wed -12607 +DI -12608 +▁cree -12609 +written -12610 +▁surgical -12611 +bin -12612 +▁endorse -12613 +▁Taiwan -12614 +▁Sau -12615 +▁vig -12616 +ivic -12617 +▁islands -12618 +▁Citiz -12619 +▁FO -12620 +▁disk -12621 +▁shake -12622 +▁graduates -12623 +▁horiz -12624 +bn -12625 +check -12626 +Pub -12627 +▁undergraduate -12628 +vd -12629 +▁Vide -12630 +▁Certificate -12631 +▁Dakota -12632 +othy -12633 +▁Swe -12634 +ropolitan -12635 +▁progressive -12636 +▁smartphones -12637 +Tech -12638 +▁starter -12639 +▁commented -12640 +▁Fel -12641 +fulness -12642 +specific -12643 +eta -12644 +▁Lower -12645 +▁Healthcare -12646 +▁VIP -12647 +▁chick -12648 +▁drunk -12649 +fi -12650 +hist -12651 +ometer -12652 +borough -12653 +olo -12654 +▁deserves -12655 +rylic -12656 +▁Cards -12657 +driven -12658 +▁carb -12659 +▁Display -12660 +▁neglect -12661 +▁bonds -12662 +▁Rele -12663 +irable -12664 +About -12665 +roph -12666 +▁leisure -12667 +▁vacuum -12668 +smith -12669 +▁backing -12670 +▁Rank -12671 +▁anger -12672 +▁passage -12673 +▁metab -12674 +Mon -12675 +▁Risk -12676 +▁iPod -12677 +▁killer -12678 +▁vinegar -12679 +agu -12680 +ONE -12681 +▁Patri -12682 +▁Exhib -12683 +oub -12684 +opes -12685 +ificates -12686 +▁Terry -12687 +▁Around -12688 +▁Gardens -12689 +▁Alt -12690 +▁opponents -12691 +▁Lov -12692 +▁cited -12693 +▁outlet -12694 +▁steep -12695 +▁corporations -12696 +▁mothers -12697 +▁rede -12698 +▁politicians -12699 +Oct -12700 +isd -12701 +▁Mitchell -12702 +aments -12703 +▁mainstream -12704 +▁easiest -12705 +asive -12706 +uitive -12707 +▁premiere -12708 +▁classical -12709 +igs -12710 +▁Masters -12711 +▁resil -12712 +▁heated -12713 +▁dip -12714 +ORD -12715 +FR -12716 +attered -12717 +dated -12718 +▁ghost -12719 +▁hence -12720 +▁cinnamon -12721 +▁Parks -12722 +▁independence -12723 +▁handmade -12724 +▁accent -12725 +▁Costa -12726 +▁emerged -12727 +▁bree -12728 +▁Serve -12729 +▁enemies -12730 +▁Commons -12731 +rapeutic -12732 +▁pose -12733 +▁Reports -12734 +▁Cleaning -12735 +▁delet -12736 +▁longest -12737 +Day -12738 +▁creatures -12739 +▁packs -12740 +▁cuisine -12741 +pin -12742 +▁conducting -12743 +▁encry -12744 +▁Browse -12745 +▁tongue -12746 +John -12747 +▁Dimensions -12748 +▁grandchildren -12749 +▁poem -12750 +▁templates -12751 +▁Snap -12752 +▁Theater -12753 +▁Diet -12754 +▁fare -12755 +▁opponent -12756 +▁Dublin -12757 +▁deciding -12758 +▁LOVE -12759 +▁lid -12760 +▁Palace -12761 +▁reaches -12762 +▁*** -12763 +▁unemployment -12764 +chi -12765 +▁Clay -12766 +▁supplements -12767 +▁EM -12768 +▁Buffalo -12769 +▁torn -12770 +▁Marvel -12771 +▁Arkansas -12772 +▁Marshall -12773 +▁memorial -12774 +▁conce -12775 +▁cleaner -12776 +▁Guid -12777 +▁crust -12778 +▁Written -12779 +▁retailer -12780 +▁Gran -12781 +wy -12782 +▁silent -12783 +ropriate -12784 +▁Outdoor -12785 +▁soda -12786 +▁affecting -12787 +▁Impact -12788 +▁holder -12789 +▁Ori -12790 +▁ESPN -12791 +▁slice -12792 +▁transit -12793 +▁variable -12794 +▁Stage -12795 +ASS -12796 +▁superm -12797 +▁negotiations -12798 +▁Robin -12799 +omore -12800 +▁Physical -12801 +isor -12802 +▁gad -12803 +▁achievements -12804 +Art -12805 +▁travels -12806 +▁Hunt -12807 +▁quoted -12808 +▁hazard -12809 +▁observe -12810 +▁consistency -12811 +Med -12812 +▁Mason -12813 +erto -12814 +▁ramp -12815 +▁permitted -12816 +▁blast -12817 +mi -12818 +▁Sold -12819 +▁Seven -12820 +▁trainer -12821 +rowing -12822 +▁Pap -12823 +▁Yeah -12824 +▁Sent -12825 +▁digit -12826 +▁bake -12827 +▁Soon -12828 +▁hoped -12829 +▁underground -12830 +▁travelers -12831 +▁Agree -12832 +▁meditation -12833 +▁inning -12834 +produ -12835 +ocket -12836 +▁Faculty -12837 +▁Volume -12838 +▁mysterious -12839 +▁recycled -12840 +▁RV -12841 +▁Campaign -12842 +▁ingredient -12843 +▁Honey -12844 +▁pert -12845 +▁Fight -12846 +▁bullet -12847 +▁Left -12848 +▁Request -12849 +▁thirty -12850 +oting -12851 +aria -12852 +▁.... -12853 +Apr -12854 +▁Douglas -12855 +▁Keith -12856 +▁compound -12857 +▁shoulders -12858 +izard -12859 +▁limitations -12860 +▁pedest -12861 +▁eco -12862 +▁municipal -12863 +▁improves -12864 +▁slides -12865 +▁bail -12866 +hh -12867 +▁costly -12868 +▁CAN -12869 +▁Fit -12870 +▁restricted -12871 +rosc -12872 +▁Ideas -12873 +▁bugs -12874 +▁bump -12875 +▁groom -12876 +▁gem -12877 +▁allegations -12878 +▁paths -12879 +▁incorporated -12880 +iplinary -12881 +liminary -12882 +cies -12883 +agraph -12884 +▁Federation -12885 +drop -12886 +▁loading -12887 +enery -12888 +▁Associates -12889 +▁whisk -12890 +▁architectural -12891 +▁threatened -12892 +▁underneath -12893 +▁journalist -12894 +▁Empire -12895 +▁Tob -12896 +ardo -12897 +▁Revolution -12898 +▁UP -12899 +▁archive -12900 +▁Storm -12901 +▁litigation -12902 +▁Military -12903 +▁retro -12904 +▁Shan -12905 +hop -12906 +▁Zone -12907 +etheless -12908 +▁tempor -12909 +▁barg -12910 +mented -12911 +stage -12912 +▁Platform -12913 +▁tablespoon -12914 +laimed -12915 +opic -12916 +IAL -12917 +alia -12918 +▁mutual -12919 +aul -12920 +▁Interior -12921 +▁Episode -12922 +▁Roger -12923 +▁Cameron -12924 +▁respected -12925 +▁ic -12926 +▁lining -12927 +RO -12928 +▁Hus -12929 +▁terminal -12930 +▁remembered -12931 +▁lamp -12932 +▁creator -12933 +▁releasing -12934 +▁understands -12935 +▁Bake -12936 +lla -12937 +inction -12938 +▁ups -12939 +iasm -12940 +▁Horn -12941 +▁connectivity -12942 +bec -12943 +▁radical -12944 +▁vegg -12945 +arently -12946 +ateral -12947 +trans -12948 +▁incom -12949 +▁influenced -12950 +▁bankruptcy -12951 +▁inval -12952 +Reg -12953 +▁acceptance -12954 +afood -12955 +eps -12956 +pectives -12957 +▁shelves -12958 +▁Rate -12959 +▁sensors -12960 +▁Crown -12961 +▁visa -12962 +▁skirt -12963 +ui -12964 +▁proceeds -12965 +ooting -12966 +▁Ranch -12967 +▁revenues -12968 +ulator -12969 +▁Cable -12970 +▁maj -12971 +WA -12972 +ACE -12973 +▁Initi -12974 +▁Tip -12975 +▁Aid -12976 +remy -12977 +▁Gill -12978 +▁embedded -12979 +▁clarity -12980 +ritional -12981 +▁gradually -12982 +▁attraction -12983 +▁essence -12984 +girl -12985 +▁Bluetooth -12986 +▁Legisl -12987 +▁shout -12988 +▁Linda -12989 +▁ate -12990 +outube -12991 +▁inaug -12992 +▁Columbus -12993 +rison -12994 +▁specializes -12995 +▁Blues -12996 +click -12997 +▁physicians -12998 +ergy -12999 +▁struggled -13000 +▁abandoned -13001 +aired -13002 +▁Depending -13003 +▁appearing -13004 +▁vessel -13005 +▁Materials -13006 +▁ranges -13007 +▁incidents -13008 +illo -13009 +▁browsing -13010 +▁nominated -13011 +▁sketch -13012 +ango -13013 +▁defeated -13014 +▁Lakes -13015 +ummer -13016 +ped -13017 +▁sewing -13018 +Over -13019 +▁inclusion -13020 +▁Horse -13021 +▁Shopping -13022 +▁symbols -13023 +zech -13024 +bat -13025 +bey -13026 +apor -13027 +▁discrimination -13028 +oded -13029 +▁stressed -13030 +igue -13031 +▁unp -13032 +▁BA -13033 +▁implications -13034 +▁trains -13035 +▁Ple -13036 +di -13037 +▁Princess -13038 +▁notable -13039 +umper -13040 +▁manufacture -13041 +▁Sit -13042 +▁wides -13043 +▁struggles -13044 +▁stroll -13045 +'' -13046 +assy -13047 +dig -13048 +▁Ing -13049 +arness -13050 +People -13051 +▁journalists -13052 +▁sells -13053 +▁Rivers -13054 +ean -13055 +▁heels -13056 +▁tar -13057 +▁Griff -13058 +▁Liberty -13059 +agne -13060 +▁plugin -13061 +▁regime -13062 +▁fract -13063 +▁Parents -13064 +esis -13065 +▁Nashville -13066 +▁Mumbai -13067 +verages -13068 +aft -13069 +▁strengths -13070 +▁Treas -13071 +▁attracted -13072 +▁Prob -13073 +▁lone -13074 +▁gloss -13075 +▁continually -13076 +▁Knight -13077 +▁fails -13078 +▁Eat -13079 +obs -13080 +▁Spons -13081 +▁minimize -13082 +▁Fri -13083 +▁solving -13084 +pers -13085 +▁introducing -13086 +▁stap -13087 +cover -13088 +Dr -13089 +▁cabinets -13090 +▁reminds -13091 +Down -13092 +UK -13093 +▁Wireless -13094 +▁determination -13095 +▁Eagles -13096 +▁Pages -13097 +▁hospitality -13098 +amas -13099 +▁joke -13100 +▁Pic -13101 +▁decorated -13102 +▁editorial -13103 +▁conjunction -13104 +itcher -13105 +▁complexity -13106 +atility -13107 +▁Madrid -13108 +▁Lag -13109 +▁Nintendo -13110 +lu -13111 +▁chemistry -13112 +▁Manual -13113 +▁topped -13114 +▁mayor -13115 +▁Strategy -13116 +▁Delta -13117 +▁revers -13118 +▁classified -13119 +▁pairs -13120 +▁Baptist -13121 +▁shield -13122 +▁permits -13123 +umbers -13124 +ocks -13125 +Per -13126 +▁nationally -13127 +▁Cotton -13128 +ivan -13129 +▁Aer -13130 +▁Letter -13131 +edar -13132 +▁notified -13133 +▁humanity -13134 +Back -13135 +▁mart -13136 +ensed -13137 +such -13138 +▁Knowledge -13139 +▁intensity -13140 +▁TN -13141 +arms -13142 +▁Different -13143 +▁meantime -13144 +pread -13145 +plete -13146 +▁practicing -13147 +▁brut -13148 +▁barriers -13149 +▁fel -13150 +orted -13151 +▁nails -13152 +▁Brothers -13153 +ocur -13154 +▁elite -13155 +▁FM -13156 +▁tasting -13157 +▁deemed -13158 +ontal -13159 +▁slices -13160 +▁calculated -13161 +▁woods -13162 +▁Offer -13163 +▁locals -13164 +▁Merc -13165 +▁manages -13166 +▁coastal -13167 +▁Teacher -13168 +▁BBQ -13169 +▁ho -13170 +▁Disease -13171 +imens -13172 +Su -13173 +▁Throughout -13174 +▁Indeed -13175 +▁cannabis -13176 +anean -13177 +▁Height -13178 +aho -13179 +▁Guarant -13180 +▁Woods -13181 +▁Faith -13182 +▁Glen -13183 +▁unnecessary -13184 +LP -13185 +boro -13186 +VI -13187 +gary -13188 +▁traveled -13189 +ulu -13190 +rock -13191 +pired -13192 +▁runners -13193 +ACH -13194 +▁plumbing -13195 +rates -13196 +▁sticks -13197 +▁tiles -13198 +▁Tes -13199 +▁Spect -13200 +▁rebuild -13201 +▁pension -13202 +▁nearest -13203 +▁Kyle -13204 +▁Monte -13205 +compass -13206 +dered -13207 +▁Package -13208 +▁Imagine -13209 +inks -13210 +▁Stri -13211 +▁approached -13212 +▁LGBT -13213 +▁None -13214 +▁Airl -13215 +▁TA -13216 +news -13217 +▁crossed -13218 +arrings -13219 +▁inclusive -13220 +oustic -13221 +alling -13222 +▁Arena -13223 +▁landed -13224 +ropract -13225 +▁Pinterest -13226 +▁coating -13227 +▁Adjust -13228 +ocial -13229 +▁turb -13230 +▁joins -13231 +▁volunt -13232 +ORK -13233 +▁novels -13234 +cedes -13235 +▁bathrooms -13236 +▁Popular -13237 +▁straightforward -13238 +▁Lost -13239 +▁ports -13240 +thod -13241 +▁Coal -13242 +▁puzzle -13243 +▁instances -13244 +▁establishing -13245 +ographer -13246 +▁recruiting -13247 +▁Crime -13248 +▁rewarding -13249 +▁runner -13250 +▁touched -13251 +▁CNN -13252 +▁defence -13253 +connect -13254 +▁strap -13255 +▁py -13256 +Best -13257 +▁Fif -13258 +▁junk -13259 +▁convention -13260 +omer -13261 +▁DM -13262 +▁Points -13263 +▁paired -13264 +▁Jamie -13265 +▁scared -13266 +quisite -13267 +▁REAL -13268 +▁todd -13269 +▁Flat -13270 +itual -13271 +▁Murphy -13272 +▁Wor -13273 +some -13274 +▁Billy -13275 +Black -13276 +▁tbsp -13277 +umm -13278 +▁alerts -13279 +ocate -13280 +rella -13281 +atinum -13282 +▁targeting -13283 +▁Ster -13284 +▁receipt -13285 +▁Electronic -13286 +▁lane -13287 +▁Northwest -13288 +▁quilt -13289 +ijing -13290 +▁trauma -13291 +▁Marie -13292 +▁controller -13293 +▁harass -13294 +HC -13295 +▁warming -13296 +▁Spl -13297 +▁coin -13298 +elli -13299 +izers -13300 +▁dispute -13301 +▁sooner -13302 +▁lenses -13303 +▁fatal -13304 +anka -13305 +▁delayed -13306 +▁electro -13307 +▁combining -13308 +lett -13309 +uming -13310 +▁identical -13311 +▁Romney -13312 +▁Chand -13313 +▁abortion -13314 +▁explaining -13315 +▁shar -13316 +▁Rio -13317 +▁wardrobe -13318 +▁promotions -13319 +ITE -13320 +▁Driver -13321 +▁cooler -13322 +▁Vista -13323 +▁Barry -13324 +▁fireplace -13325 +pg -13326 +function -13327 +▁experimental -13328 +ogurt -13329 +▁analys -13330 +▁Poland -13331 +bris -13332 +▁chains -13333 +roe -13334 +uffy -13335 +▁Carm -13336 +attery -13337 +▁Emma -13338 +▁vessels -13339 +▁rebounds -13340 +may -13341 +▁ethical -13342 +OUT -13343 +▁floating -13344 +▁Medium -13345 +▁loyalty -13346 +ede -13347 +▁creamy -13348 +▁volumes -13349 +XX -13350 +cos -13351 +▁retrie -13352 +Look -13353 +rise -13354 +▁transformed -13355 +▁homework -13356 +ixtures -13357 +▁advisor -13358 +pace -13359 +▁shred -13360 +mal -13361 +▁Thor -13362 +▁Mob -13363 +▁tob -13364 +▁Graduate -13365 +▁cant -13366 +commun -13367 +▁reactions -13368 +▁Pump -13369 +▁Vill -13370 +GM -13371 +▁explores -13372 +▁searches -13373 +▁occasional -13374 +uated -13375 +▁Capit -13376 +▁Ens -13377 +▁warmth -13378 +▁density -13379 +▁eighth -13380 +strong -13381 +dis -13382 +▁incorrect -13383 +▁gest -13384 +▁Oracle -13385 +▁separated -13386 +▁derived -13387 +▁relatives -13388 +▁serial -13389 +itudes -13390 +▁MLS -13391 +▁Progress -13392 +▁McDonald -13393 +▁restored -13394 +forced -13395 +▁Agent -13396 +▁Bot -13397 +aneous -13398 +▁enthusiasm -13399 +▁'' -13400 +▁Birthday -13401 +▁transparency -13402 +▁nude -13403 +ometric -13404 +▁backgrounds -13405 +IME -13406 +▁parallel -13407 +Out -13408 +▁Brandon -13409 +▁Spark -13410 +▁Mosc -13411 +▁Chat -13412 +▁literary -13413 +▁Teen -13414 +▁chic -13415 +▁Mine -13416 +▁utilized -13417 +▁Barack -13418 +▁socks -13419 +▁Budget -13420 +▁kicked -13421 +▁Conditions -13422 +▁sustained -13423 +▁roller -13424 +▁Counsel -13425 +▁freely -13426 +▁reservation -13427 +▁penet -13428 +▁Richards -13429 +▁Ry -13430 +▁uncomfortable -13431 +▁excuse -13432 +▁medal -13433 +▁arrives -13434 +position -13435 +▁Dent -13436 +sor -13437 +ques -13438 +▁Cav -13439 +▁recovered -13440 +Aug -13441 +▁Furniture -13442 +▁unpre -13443 +▁Going -13444 +iov -13445 +▁inviting -13446 +▁populations -13447 +▁oak -13448 +▁coupons -13449 +▁founding -13450 +▁NA -13451 +▁holders -13452 +▁Alice -13453 +▁Basket -13454 +World -13455 +▁grandmother -13456 +▁Excel -13457 +▁biological -13458 +▁Healthy -13459 +▁Prop -13460 +wl -13461 +SH -13462 +▁Route -13463 +ancies -13464 +▁Tun -13465 +▁destruction -13466 +▁Nancy -13467 +▁rod -13468 +END -13469 +▁thermal -13470 +imedia -13471 +▁professionally -13472 +arios -13473 +bul -13474 +Friday -13475 +▁comed -13476 +▁jumping -13477 +▁Lang -13478 +▁Nat -13479 +▁inhib -13480 +▁challenged -13481 +rah -13482 +aukee -13483 +▁upgrades -13484 +▁Grove -13485 +▁Economics -13486 +▁Pine -13487 +▁Doug -13488 +Good -13489 +▁Lots -13490 +▁ampl -13491 +▁FAQ -13492 +▁broader -13493 +▁preference -13494 +▁profound -13495 +oni -13496 +▁Eagle -13497 +▁Hampshire -13498 +▁texts -13499 +idel -13500 +▁attacked -13501 +type -13502 +▁surprisingly -13503 +▁Circle -13504 +▁deadly -13505 +mir -13506 +▁silk -13507 +▁Lions -13508 +▁accurately -13509 +▁portrait -13510 +▁Candid -13511 +▁grams -13512 +▁barrier -13513 +▁convicted -13514 +▁Items -13515 +base -13516 +▁Designer -13517 +▁musician -13518 +▁reprodu -13519 +▁dub -13520 +▁Hotels -13521 +▁fortunate -13522 +onz -13523 +▁Priv -13524 +▁Ru -13525 +▁adapted -13526 +▁IBM -13527 +▁Tol -13528 +▁apples -13529 +▁Multiple -13530 +▁Typ -13531 +▁suppose -13532 +▁lands -13533 +▁adjusted -13534 +▁Johnny -13535 +▁Peters -13536 +▁Viol -13537 +▁ug -13538 +▁fier -13539 +▁evident -13540 +▁delays -13541 +hole -13542 +▁turkey -13543 +rek -13544 +▁mandatory -13545 +▁Workshop -13546 +▁pending -13547 +iners -13548 +phony -13549 +▁Vitamin -13550 +▁Ashley -13551 +▁Luther -13552 +Life -13553 +▁Hudson -13554 +umph -13555 +▁ribbon -13556 +▁Fortunately -13557 +pensive -13558 +pal -13559 +▁stimul -13560 +▁referring -13561 +▁fork -13562 +▁nurses -13563 +cam -13564 +▁Fo -13565 +Part -13566 +▁Defin -13567 +SO -13568 +▁frost -13569 +▁yeah -13570 +SI -13571 +urd -13572 +▁Ni -13573 +▁farming -13574 +▁PT -13575 +▁blade -13576 +anas -13577 +▁pressing -13578 +▁cakes -13579 +▁repeatedly -13580 +▁sellers -13581 +aning -13582 +▁buried -13583 +▁Mechan -13584 +urdy -13585 +hess -13586 +▁contend -13587 +▁panc -13588 +▁Supply -13589 +▁MY -13590 +oooo -13591 +watch -13592 +▁argued -13593 +▁deliber -13594 +▁TM -13595 +▁Neil -13596 +▁exams -13597 +▁prosec -13598 +eded -13599 +▁NHL -13600 +▁Swedish -13601 +Orig -13602 +rupted -13603 +▁Status -13604 +▁newer -13605 +▁entity -13606 +▁salmon -13607 +▁premises -13608 +▁FBI -13609 +▁organised -13610 +▁twelve -13611 +▁catching -13612 +ingo -13613 +▁planted -13614 +▁pickup -13615 +adays -13616 +▁Tab -13617 +ponse -13618 +▁violation -13619 +artered -13620 +▁leverage -13621 +adi -13622 +▁silly -13623 +▁cozy -13624 +▁relaxation -13625 +ski -13626 +▁smoothly -13627 +▁assisted -13628 +your -13629 +▁chapters -13630 +▁Covid -13631 +▁promotes -13632 +▁orientation -13633 +▁Standards -13634 +eli -13635 +▁Myst -13636 +▁Warner -13637 +due -13638 +Cal -13639 +▁interpretation -13640 +▁provisions -13641 +▁traged -13642 +▁dramatically -13643 +▁(" -13644 +▁Jeremy -13645 +▁midnight -13646 +closed -13647 +▁Opera -13648 +▁needing -13649 +▁Climate -13650 +▁PRE -13651 +▁Recovery -13652 +▁SUV -13653 +▁Surv -13654 +▁flavour -13655 +▁SER -13656 +▁Remov -13657 +▁strips -13658 +atson -13659 +▁AF -13660 +founder -13661 +▁steal -13662 +▁thrive -13663 +otto -13664 +▁balcon -13665 +▁liberal -13666 +▁matched -13667 +▁Richmond -13668 +▁Circuit -13669 +▁slim -13670 +color -13671 +▁logistics -13672 +▁centres -13673 +▁Decor -13674 +sal -13675 +▁Parking -13676 +▁wrist -13677 +▁intensive -13678 +▁significance -13679 +▁Agriculture -13680 +▁silence -13681 +omas -13682 +▁camps -13683 +▁overt -13684 +▁revol -13685 +▁cycl -13686 +▁Pent -13687 +▁dict -13688 +▁prayers -13689 +▁arc -13690 +arium -13691 +▁spice -13692 +ournals -13693 +okia -13694 +▁freshman -13695 +▁scent -13696 +▁integral -13697 +▁avo -13698 +▁Skills -13699 +▁Len -13700 +▁glam -13701 +▁Isa -13702 +▁Arc -13703 +NG -13704 +▁mos -13705 +▁counseling -13706 +▁Almost -13707 +▁reflected -13708 +hero -13709 +▁strikes -13710 +▁moistur -13711 +▁magnificent -13712 +▁exped -13713 +▁tattoo -13714 +▁urged -13715 +▁Kick -13716 +▁responsive -13717 +nor -13718 +▁pipeline -13719 +▁Naz -13720 +ptic -13721 +▁WiFi -13722 +▁precisely -13723 +▁animated -13724 +▁Pour -13725 +▁skip -13726 +▁aggreg -13727 +iterr -13728 +▁Pure -13729 +▁banned -13730 +▁nurt -13731 +▁Parts -13732 +▁// -13733 +▁Arthur -13734 +▁concerts -13735 +▁streak -13736 +normal -13737 +▁lime -13738 +▁cargo -13739 +▁kg -13740 +▁besides -13741 +▁shorts -13742 +▁shy -13743 +▁Hab -13744 +▁newspapers -13745 +▁Sr -13746 +▁Mos -13747 +▁enthusiasts -13748 +▁differ -13749 +▁Airlines -13750 +▁hott -13751 +igo -13752 +eston -13753 +▁Rules -13754 +▁affair -13755 +▁Moving -13756 +group -13757 +▁Haz -13758 +▁resc -13759 +▁Kl -13760 +osph -13761 +ioned -13762 +.: -13763 +▁protects -13764 +▁backs -13765 +▁brass -13766 +▁Interview -13767 +amous -13768 +ucker -13769 +ellery -13770 +▁imaging -13771 +start -13772 +hester -13773 +▁subt -13774 +▁unders -13775 +▁urge -13776 +ais -13777 +▁Either -13778 +▁melt -13779 +▁ancest -13780 +▁decorative -13781 +▁manifest -13782 +▁sandwich -13783 +▁heroes -13784 +mus -13785 +▁temple -13786 +▁stabil -13787 +reation -13788 +▁Upper -13789 +▁confusion -13790 +ulent -13791 +▁Answer -13792 +▁hilar -13793 +▁Leather -13794 +▁Dear -13795 +▁widespread -13796 +▁Script -13797 +rimp -13798 +waukee -13799 +▁towel -13800 +arten -13801 +▁pursuing -13802 +ographical -13803 +▁OH -13804 +▁modes -13805 +▁Average -13806 +Who -13807 +▁Picture -13808 +▁cosmetic -13809 +▁crossing -13810 +olester -13811 +▁Especially -13812 +ammed -13813 +▁branding -13814 +bitious -13815 +▁suggestion -13816 +▁spreading -13817 +▁Veterans -13818 +▁Camera -13819 +isons -13820 +▁intend -13821 +▁Issue -13822 +▁indul -13823 +▁Specific -13824 +Posted -13825 +hesive -13826 +▁Switch -13827 +▁desires -13828 +▁pile -13829 +▁styl -13830 +▁Stanley -13831 +▁Beijing -13832 +▁festivals -13833 +▁additionally -13834 +▁forgot -13835 +▁cleared -13836 +▁Solid -13837 +hyd -13838 +▁surroundings -13839 +▁outlook -13840 +▁fires -13841 +▁downloads -13842 +▁mesh -13843 +▁organizing -13844 +........ -13845 +▁bases -13846 +▁radiation -13847 +▁shirts -13848 +▁Actually -13849 +incinn -13850 +▁XP -13851 +▁maintains -13852 +▁surveillance -13853 +named -13854 +▁credential -13855 +▁blogger -13856 +▁Archives -13857 +hu -13858 +▁Tap -13859 +▁hassle -13860 +fox -13861 +▁immense -13862 +▁autumn -13863 +▁vocals -13864 +▁inhab -13865 +▁poison -13866 +Station -13867 +▁Competition -13868 +▁clay -13869 +thal -13870 +▁stiff -13871 +▁sung -13872 +▁Kenya -13873 +▁spoon -13874 +▁wallet -13875 +▁Degree -13876 +▁coordinator -13877 +▁Tags -13878 +▁Mes -13879 +▁coinc -13880 +▁treasure -13881 +▁Publishing -13882 +aman -13883 +▁Ble -13884 +lie -13885 +▁Running -13886 +▁promoted -13887 +▁reson -13888 +heimer -13889 +▁surveys -13890 +rolled -13891 +▁curve -13892 +▁Subject -13893 +oriented -13894 +allic -13895 +building -13896 +▁tuition -13897 +▁Danny -13898 +▁obligation -13899 +▁fond -13900 +▁suspected -13901 +▁Tigers -13902 +ellar -13903 +▁prosper -13904 +▁gloves -13905 +▁Prevention -13906 +▁Late -13907 +▁competitions -13908 +▁Amazing -13909 +▁bored -13910 +▁Ye -13911 +TY -13912 +astics -13913 +jan -13914 +▁Hack -13915 +▁generating -13916 +▁Intelligence -13917 +▁elementary -13918 +umbled -13919 +▁tends -13920 +ritis -13921 +▁predicted -13922 +▁develops -13923 +▁mineral -13924 +▁Curt -13925 +agg -13926 +▁SQL -13927 +▁cyl -13928 +▁vou -13929 +▁arise -13930 +▁Jake -13931 +incinnati -13932 +▁cit -13933 +▁Alf -13934 +mad -13935 +▁magnetic -13936 +▁honors -13937 +▁Academic -13938 +▁NOTE -13939 +▁Lion -13940 +▁approaching -13941 +▁arriving -13942 +minded -13943 +▁Bass -13944 +▁modify -13945 +pert -13946 +▁Norm -13947 +▁Labour -13948 +▁TC -13949 +▁stats -13950 +lesh -13951 +▁monu -13952 +ountain -13953 +OOK -13954 +▁Advisory -13955 +▁schedules -13956 +▁legitimate -13957 +▁mast -13958 +advant -13959 +tic -13960 +pher -13961 +▁Czech -13962 +▁governance -13963 +bery -13964 +▁anonymous -13965 +▁lb -13966 +▁jurisd -13967 +iterranean -13968 +ref -13969 +omi -13970 +▁Kap -13971 +▁fried -13972 +▁watches -13973 +▁Wildlife -13974 +▁weddings -13975 +▁traded -13976 +▁optimization -13977 +▁Regardless -13978 +je -13979 +▁PD -13980 +▁fucking -13981 +▁clouds -13982 +lift -13983 +▁Ultimate -13984 +▁advocacy -13985 +▁rh -13986 +▁inher -13987 +eing -13988 +addle -13989 +▁Mt -13990 +▁Abd -13991 +atched -13992 +▁radi -13993 +PH -13994 +product -13995 +▁helicop -13996 +▁ACC -13997 +▁beaten -13998 +▁dia -13999 +▁entities -14000 +▁Cad -14001 +▁lol -14002 +▁IF -14003 +▁profitable -14004 +▁Cant -14005 +▁households -14006 +.|| -14007 +▁Command -14008 +brew -14009 +▁domin -14010 +▁Downtown -14011 +▁Adventure -14012 +icular -14013 +▁Julie -14014 +▁earthqu -14015 +Sec -14016 +castle -14017 +▁prox -14018 +▁Historical -14019 +IZ -14020 +▁qualifying -14021 +cipe -14022 +▁na -14023 +▁Steam -14024 +▁Championships -14025 +anium -14026 +▁Moscow -14027 +▁footprint -14028 +▁Nord -14029 +▁valve -14030 +▁flows -14031 +rots -14032 +▁wallpaper -14033 +▁lace -14034 +▁goodness -14035 +▁Specialist -14036 +▁Lauren -14037 +▁Finding -14038 +white -14039 +▁collar -14040 +cient -14041 +▁lear -14042 +▁corruption -14043 +▁perception -14044 +▁licensing -14045 +ifle -14046 +▁arguments -14047 +▁touches -14048 +urring -14049 +▁Cop -14050 +▁addict -14051 +flu -14052 +▁celebrities -14053 +▁Cafe -14054 +▁arbit -14055 +▁Speaker -14056 +▁concentrate -14057 +▁disappear -14058 +Fl -14059 +ERY -14060 +▁carriers -14061 +▁debris -14062 +▁Purchase -14063 +giene -14064 +pring -14065 +▁Ted -14066 +▁Fame -14067 +▁masc -14068 +▁Enc -14069 +▁Nep -14070 +▁pest -14071 +▁nit -14072 +▁Stick -14073 +▁vitamins -14074 +▁privilege -14075 +▁Pope -14076 +▁GOP -14077 +NET -14078 +▁Foods -14079 +▁forming -14080 +▁Constitution -14081 +▁commentary -14082 +▁Wikipedia -14083 +▁mint -14084 +▁uncertainty -14085 +▁congr -14086 +▁EL -14087 +▁Init -14088 +▁parliament -14089 +▁accessed -14090 +PO -14091 +▁Neigh -14092 +▁farms -14093 +tails -14094 +amine -14095 +kward -14096 +▁Pun -14097 +large -14098 +▁recreational -14099 +▁adjustments -14100 +▁ambitious -14101 +ME -14102 +▁legislative -14103 +Off -14104 +▁paragraph -14105 +▁Norway -14106 +▁Coming -14107 +matic -14108 +▁badly -14109 +▁MAR -14110 +▁Ship -14111 +Gener -14112 +▁exotic -14113 +▁connects -14114 +▁tray -14115 +▁Corps -14116 +▁Turkish -14117 +▁Listing -14118 +▁functioning -14119 +▁queries -14120 +▁Montreal -14121 +▁Recipe -14122 +▁adjacent -14123 +▁microwave -14124 +▁zones -14125 +▁Pred -14126 +▁synthetic -14127 +keeper -14128 +▁meter -14129 +chant -14130 +▁sake -14131 +▁Katie -14132 +▁RT -14133 +▁deficit -14134 +▁evolved -14135 +▁Lie -14136 +▁PayPal -14137 +olesterol -14138 +▁borders -14139 +▁provin -14140 +▁Suz -14141 +▁Surgery -14142 +▁disagree -14143 +fil -14144 +gence -14145 +▁mins -14146 +▁chamber -14147 +▁sore -14148 +▁persu -14149 +Christ -14150 +▁UA -14151 +hedral -14152 +iw -14153 +▁pussy -14154 +IRE -14155 +▁OFF -14156 +▁marry -14157 +▁damp -14158 +ourt -14159 +▁Edin -14160 +▁manually -14161 +▁LG -14162 +▁counting -14163 +▁SN -14164 +▁athlete -14165 +▁Plans -14166 +▁democracy -14167 +▁melted -14168 +ratulations -14169 +▁Sandy -14170 +▁Montana -14171 +▁RC -14172 +rose -14173 +▁buses -14174 +▁BO -14175 +fare -14176 +▁verified -14177 +▁educators -14178 +▁partial -14179 +▁Roof -14180 +▁infected -14181 +▁pharmaceutical -14182 +ioxid -14183 +▁practitioners -14184 +▁sacrifice -14185 +PER -14186 +▁Apparently -14187 +otherapy -14188 +▁consume -14189 +▁facebook -14190 +▁experiments -14191 +▁legally -14192 +▁upgraded -14193 +stit -14194 +▁SF -14195 +▁pel -14196 +▁congress -14197 +▁physics -14198 +▁reservations -14199 +▁Bishop -14200 +▁Belgium -14201 +▁trace -14202 +▁affiliated -14203 +▁maxim -14204 +▁Dow -14205 +▁Wel -14206 +▁Higher -14207 +▁Sud -14208 +imm -14209 +▁thri -14210 +▁obtaining -14211 +▁constra -14212 +▁brave -14213 +▁Villa -14214 +ierra -14215 +▁breakdown -14216 +▁reset -14217 +▁satisfying -14218 +▁pars -14219 +▁Harbor -14220 +▁Kirk -14221 +annon -14222 +▁query -14223 +oca -14224 +▁editors -14225 +▁pupp -14226 +udge -14227 +▁streams -14228 +▁Provide -14229 +▁Nursing -14230 +▁elastic -14231 +appropriate -14232 +IONS -14233 +istance -14234 +▁Pel -14235 +▁institutional -14236 +illet -14237 +▁toug -14238 +▁discusses -14239 +▁workflow -14240 +corn -14241 +▁annoying -14242 +▁ONE -14243 +half -14244 +▁Heavy -14245 +fold -14246 +▁Joint -14247 +▁carp -14248 +ido -14249 +▁Bos -14250 +▁remed -14251 +▁psychology -14252 +▁Potter -14253 +▁rankings -14254 +▁sofa -14255 +▁pools -14256 +▁contam -14257 +▁deployment -14258 +▁circles -14259 +▁astron -14260 +▁bubble -14261 +ippers -14262 +▁tobacco -14263 +▁charter -14264 +▁defic -14265 +inee -14266 +QU -14267 +▁viral -14268 +▁mentor -14269 +▁vo -14270 +▁lifting -14271 +▁distribute -14272 +▁Weeke -14273 +▁avoiding -14274 +▁LP -14275 +▁Gear -14276 +borne -14277 +▁submitting -14278 +▁NHS -14279 +▁Cany -14280 +▁mounting -14281 +▁Vintage -14282 +bling -14283 +▁relie -14284 +ancell -14285 +▁Amend -14286 +▁HV -14287 +▁photographers -14288 +▁planting -14289 +▁RES -14290 +▁Previous -14291 +▁enrolled -14292 +▁Shore -14293 +▁Oakland -14294 +▁unveiled -14295 +ometry -14296 +▁Nathan -14297 +rele -14298 +▁Cake -14299 +▁LinkedIn -14300 +chet -14301 +▁Discount -14302 +▁logic -14303 +▁sometime -14304 +▁innocent -14305 +▁landl -14306 +▁Mario -14307 +▁readily -14308 +▁regarded -14309 +▁Mercedes -14310 +▁regret -14311 +▁refrigerator -14312 +▁Ku -14313 +▁Podcast -14314 +▁mall -14315 +▁missions -14316 +▁assert -14317 +▁modeling -14318 +acies -14319 +▁Lodge -14320 +▁crops -14321 +▁Teaching -14322 +▁informative -14323 +▁Patt -14324 +▁Replace -14325 +GR -14326 +▁Wear -14327 +▁shocked -14328 +▁Dating -14329 +▁deputy -14330 +▁critics -14331 +oric -14332 +▁steering -14333 +▁theore -14334 +▁NT -14335 +▁storyt -14336 +elong -14337 +▁pole -14338 +▁tel -14339 +▁scenarios -14340 +▁Rug -14341 +▁structured -14342 +▁sends -14343 +▁Growth -14344 +abul -14345 +olved -14346 +▁sealed -14347 +▁darkness -14348 +▁caution -14349 +grad -14350 +▁posters -14351 +rik -14352 +▁administrator -14353 +▁suggesting -14354 +SN -14355 +▁AA -14356 +aph -14357 +cedented -14358 +▁Dennis -14359 +▁diesel -14360 +▁Thunder -14361 +▁reserves -14362 +asa -14363 +▁determining -14364 +▁arena -14365 +▁Magn -14366 +unge -14367 +▁updating -14368 +▁Sha -14369 +▁Centers -14370 +▁greens -14371 +/. -14372 +▁compiled -14373 +▁assignments -14374 +▁prevents -14375 +erse -14376 +▁Words -14377 +▁voltage -14378 +Feb -14379 +▁complain -14380 +OWN -14381 +▁priorit -14382 +▁Ferr -14383 +ounding -14384 +▁amateur -14385 +▁lenders -14386 +iop -14387 +▁perpet -14388 +▁Cincinnati -14389 +▁diving -14390 +▁Champion -14391 +posed -14392 +▁Allow -14393 +▁Videos -14394 +▁absorb -14395 +▁Tournament -14396 +▁advances -14397 +▁cognitive -14398 +▁Baseball -14399 +▁metres -14400 +▁IRS -14401 +▁wre -14402 +▁strictly -14403 +▁Cruz -14404 +istle -14405 +▁pupils -14406 +▁terrorist -14407 +ourage -14408 +▁uploaded -14409 +▁blues -14410 +▁blanket -14411 +▁couch -14412 +haw -14413 +▁lecture -14414 +▁Attend -14415 +▁freeze -14416 +▁racial -14417 +▁dys -14418 +▁Dinner -14419 +▁Walt -14420 +thening -14421 +▁Excellence -14422 +▁pads -14423 +▁candle -14424 +▁Branch -14425 +▁anth -14426 +▁migration -14427 +▁Nokia -14428 +▁Investig -14429 +▁explicit -14430 +▁damn -14431 +▁summit -14432 +▁IR -14433 +▁inspir -14434 +-) -14435 +stan -14436 +▁unwanted -14437 +▁Jews -14438 +▁Tiger -14439 +▁coloring -14440 +▁adjustment -14441 +▁Scar -14442 +▁influences -14443 +▁Explorer -14444 +▁inquiry -14445 +▁revel -14446 +gends -14447 +▁Maintenance -14448 +acre -14449 +▁fertil -14450 +▁fame -14451 +▁firmly -14452 +▁hiding -14453 +ector -14454 +▁palm -14455 +▁counterpart -14456 +▁Reuters -14457 +▁dimension -14458 +▁Kam -14459 +fy -14460 +▁starring -14461 +lad -14462 +▁Ax -14463 +▁Rangers -14464 +Thank -14465 +▁dealt -14466 +▁Someone -14467 +▁examined -14468 +▁spicy -14469 +▁corresponding -14470 +▁Mack -14471 +▁disposal -14472 +▁sleeves -14473 +▁supermarket -14474 +emor -14475 +▁Client -14476 +▁cherry -14477 +▁reass -14478 +▁attributes -14479 +▁waterproof -14480 +▁Nutrition -14481 +▁Sty -14482 +▁optical -14483 +ellers -14484 +GH -14485 +▁gum -14486 +▁wand -14487 +inis -14488 +▁dilig -14489 +▁downloading -14490 +have -14491 +bye -14492 +▁journalism -14493 +▁joints -14494 +▁Chase -14495 +▁styling -14496 +▁commod -14497 +▁evaluated -14498 +mends -14499 +▁Ghost -14500 +▁demonstrates -14501 +▁Statistics -14502 +reated -14503 +▁Pos -14504 +Ty -14505 +▁monster -14506 +▁participant -14507 +▁spokesperson -14508 +▁mechanisms -14509 +▁TW -14510 +▁Opening -14511 +▁Measure -14512 +ository -14513 +▁reviewing -14514 +▁Benn -14515 +▁VAT -14516 +▁bicycle -14517 +▁Estab -14518 +▁Hour -14519 +▁Alexand -14520 +▁fisher -14521 +▁Honor -14522 +▁Articles -14523 +▁Mediterranean -14524 +▁Transl -14525 +rez -14526 +▁seamless -14527 +listed -14528 +comm -14529 +Ge -14530 +▁Cultural -14531 +▁outline -14532 +irs -14533 +▁deer -14534 +▁Require -14535 +▁Mi -14536 +▁revised -14537 +▁queen -14538 +▁Cinem -14539 +▁isolated -14540 +▁wondered -14541 +▁Nike -14542 +▁Perm -14543 +▁variation -14544 +▁euro -14545 +Hi -14546 +▁Kindle -14547 +camp -14548 +▁spirits -14549 +▁cryptocur -14550 +cap -14551 +rine -14552 +apple -14553 +▁playground -14554 +▁Chan -14555 +▁duplic -14556 +▁Object -14557 +▁Philip -14558 +▁beverage -14559 +▁complications -14560 +▁performs -14561 +▁keywords -14562 +gered -14563 +▁spices -14564 +▁proceedings -14565 +bank -14566 +▁Gard -14567 +opard -14568 +▁bloggers -14569 +▁pretend -14570 +▁renovation -14571 +▁RBI -14572 +engu -14573 +▁execute -14574 +anners -14575 +▁Motors -14576 +▁segments -14577 +▁march -14578 +▁Leaf -14579 +▁obstacles -14580 +▁Juan -14581 +▁dat -14582 +jamin -14583 +SF -14584 +▁tones -14585 +▁Loan -14586 +direct -14587 +▁alternate -14588 +ername -14589 +▁Yesterday -14590 +▁enterprises -14591 +hesis -14592 +▁valley -14593 +▁Wow -14594 +▁Truth -14595 +▁intuitive -14596 +pop -14597 +▁freshly -14598 +▁executed -14599 +keeping -14600 +▁Jefferson -14601 +▁Bristol -14602 +lower -14603 +▁roofing -14604 +▁Lip -14605 +▁parade -14606 +▁messaging -14607 +▁Guests -14608 +uki -14609 +▁sheer -14610 +COM -14611 +ggy -14612 +ANCE -14613 +amboo -14614 +▁detected -14615 +shirts -14616 +▁BT -14617 +▁modest -14618 +▁sleeve -14619 +▁sleek -14620 +▁knit -14621 +chron -14622 +▁pipes -14623 +▁rehabilitation -14624 +▁desperate -14625 +ometown -14626 +▁perspectives -14627 +orus -14628 +story -14629 +▁insulation -14630 +▁Optim -14631 +▁Giants -14632 +▁sciences -14633 +▁minerals -14634 +▁illustrations -14635 +▁unfor -14636 +▁Planet -14637 +BER -14638 +▁Shaw -14639 +▁consultants -14640 +braska -14641 +▁inexpensive -14642 +▁enhancing -14643 +▁icons -14644 +▁resolved -14645 +▁attachment -14646 +▁stirring -14647 +▁Niger -14648 +boys -14649 +▁replied -14650 +▁Lanc -14651 +▁proudly -14652 +▁SMS -14653 +▁Holland -14654 +▁fate -14655 +▁recreation -14656 +IX -14657 +▁syndrome -14658 +rams -14659 +▁Instit -14660 +▁twitter -14661 +▁mock -14662 +▁Idaho -14663 +▁deployed -14664 +▁compromise -14665 +▁lawmakers -14666 +▁sunlight -14667 +▁taxi -14668 +▁Happ -14669 +oodles -14670 +DER -14671 +▁Enh -14672 +▁preliminary -14673 +▁descriptions -14674 +▁cinema -14675 +▁bod -14676 +▁therapeutic -14677 +▁Plaza -14678 +paid -14679 +imity -14680 +▁cancelled -14681 +▁gown -14682 +▁Watson -14683 +nan -14684 +▁Quant -14685 +oples -14686 +▁hobby -14687 +▁nerve -14688 +▁ankle -14689 +▁gameplay -14690 +second -14691 +▁finances -14692 +▁responding -14693 +▁Ts -14694 +allow -14695 +▁economics -14696 +▁recognised -14697 +▁Edinburgh -14698 +▁perfection -14699 +struction -14700 +▁elabor -14701 +▁epid -14702 +▁Canon -14703 +▁Root -14704 +▁rape -14705 +▁violations -14706 +▁Turner -14707 +poke -14708 +▁tactics -14709 +▁spine -14710 +▁ritual -14711 +▁adverse -14712 +▁Assessment -14713 +aternity -14714 +▁bears -14715 +▁MT -14716 +mill -14717 +▁belongs -14718 +▁breathe -14719 +▁outreach -14720 +▁Brazilian -14721 +two -14722 +owski -14723 +▁advisory -14724 +sterdam -14725 +▁Oliver -14726 +▁masks -14727 +▁trades -14728 +▁performers -14729 +▁inflammation -14730 +▁stressful -14731 +▁Kum -14732 +uez -14733 +▁Fans -14734 +▁Flight -14735 +▁Reader -14736 +▁touching -14737 +▁villages -14738 +▁drill -14739 +▁refuse -14740 +▁Score -14741 +▁Rogers -14742 +▁cricket -14743 +▁Milwaukee -14744 +▁decides -14745 +▁Wes -14746 +▁Shar -14747 +▁puzz -14748 +uri -14749 +▁installations -14750 +▁barn -14751 +idy -14752 +shaped -14753 +▁urgent -14754 +▁Coordinator -14755 +▁horrible -14756 +▁alien -14757 +tees -14758 +▁investigations -14759 +available -14760 +▁brake -14761 +▁washed -14762 +▁organizational -14763 +ISH -14764 +powered -14765 +▁Swim -14766 +lasting -14767 +▁penalties -14768 +▁Bike -14769 +▁raises -14770 +urred -14771 +▁bitter -14772 +▁shoppers -14773 +▁encompass -14774 +osity -14775 +esity -14776 +▁cables -14777 +▁lever -14778 +▁comprised -14779 +▁Approx -14780 +▁Required -14781 +▁pp -14782 +▁depict -14783 +▁tan -14784 +▁median -14785 +▁Chev -14786 +▁rentals -14787 +▁tanks -14788 +ools -14789 +▁Beng -14790 +▁Wells -14791 +▁Colon -14792 +▁Internal -14793 +▁nan -14794 +▁educated -14795 +▁Obviously -14796 +▁Cemetery -14797 +▁mud -14798 +atical -14799 +▁pale -14800 +▁Similar -14801 +▁binding -14802 +▁locks -14803 +▁Warrant -14804 +Mod -14805 +▁varying -14806 +▁mi -14807 +▁Canyon -14808 +Most -14809 +▁activists -14810 +Ke -14811 +▁Cheese -14812 +ulin -14813 +▁Pizza -14814 +▁consuming -14815 +▁abundance -14816 +▁neighborhoods -14817 +▁galleries -14818 +▁decreased -14819 +▁beings -14820 +▁throat -14821 +▁bonuses -14822 +▁mushrooms -14823 +▁Creating -14824 +▁Quarter -14825 +▁Ald -14826 +▁Harrison -14827 +bone -14828 +▁Luckily -14829 +nings -14830 +▁Bryan -14831 +uate -14832 +▁Pete -14833 +▁generic -14834 +▁deleted -14835 +▁ounces -14836 +▁circulation -14837 +▁Sacram -14838 +▁Arsenal -14839 +▁wages -14840 +▁Wide -14841 +▁Factory -14842 +▁acknowledged -14843 +agger -14844 +▁immigrants -14845 +El -14846 +▁gri -14847 +▁intr -14848 +▁bou -14849 +▁Firm -14850 +▁afterwards -14851 +▁Eb -14852 +▁corporation -14853 +▁donors -14854 +▁Amanda -14855 +▁stove -14856 +leton -14857 +arse -14858 +▁delightful -14859 +▁Membership -14860 +▁Arn -14861 +▁Edwards -14862 +arity -14863 +arnish -14864 +▁witnessed -14865 +million -14866 +▁flowing -14867 +▁Notice -14868 +aky -14869 +ettes -14870 +▁Mis -14871 +▁associations -14872 +▁niche -14873 +▁casting -14874 +than -14875 +▁belly -14876 +▁Vermont -14877 +▁unprecedented -14878 +▁Conservation -14879 +reath -14880 +▁Twin -14881 +icone -14882 +▁Harm -14883 +▁google -14884 +bane -14885 +▁Okay -14886 +▁Margaret -14887 +▁Pretty -14888 +▁spat -14889 +▁Beer -14890 +▁quil -14891 +▁Hood -14892 +▁assure -14893 +▁Ja -14894 +orie -14895 +USA -14896 +itants -14897 +ampagne -14898 +▁rivers -14899 +▁Pearl -14900 +▁rook -14901 +uscript -14902 +▁nutritional -14903 +▁Coup -14904 +ourmet -14905 +▁collaborate -14906 +Top -14907 +▁lending -14908 +▁ES -14909 +BL -14910 +MM -14911 +▁condem -14912 +jack -14913 +anga -14914 +▁dated -14915 +▁DA -14916 +cost -14917 +▁RF -14918 +filled -14919 +▁offline -14920 +▁remarks -14921 +▁frustrating -14922 +umatic -14923 +▁combinations -14924 +▁ceramic -14925 +▁Et -14926 +ivest -14927 +▁Poll -14928 +▁Ze -14929 +▁Heights -14930 +▁forests -14931 +▁Frequ -14932 +▁Abb -14933 +▁Cyber -14934 +impse -14935 +▁Against -14936 +▁assisting -14937 +rating -14938 +retion -14939 +▁Educational -14940 +▁ACT -14941 +▁pointing -14942 +▁scholarships -14943 +▁Blake -14944 +▁christ -14945 +uba -14946 +▁intersection -14947 +▁schemes -14948 +▁happily -14949 +▁awkward -14950 +High -14951 +▁nowhere -14952 +uxe -14953 +▁rhythm -14954 +▁demonstration -14955 +urable -14956 +gow -14957 +▁Favorite -14958 +loe -14959 +▁thickness -14960 +▁festive -14961 +▁proprietary -14962 +▁headlines -14963 +▁invites -14964 +▁opener -14965 +plan -14966 +▁Raw -14967 +▁catering -14968 +▁minority -14969 +ussels -14970 +▁wholesale -14971 +patient -14972 +occ -14973 +▁Plastic -14974 +▁behaviors -14975 +▁Chron -14976 +Any -14977 +uda -14978 +▁switching -14979 +▁convince -14980 +▁muff -14981 +▁Featuring -14982 +ymph -14983 +▁expend -14984 +▁yogurt -14985 +▁banner -14986 +Web -14987 +▁Train -14988 +▁labour -14989 +▁Reed -14990 +▁Draft -14991 +▁Heaven -14992 +▁seminar -14993 +OK -14994 +▁prohibited -14995 +▁phenomenon -14996 +comfort -14997 +yon -14998 +FOR -14999 +▁ballot -15000 +▁infant -15001 +▁Accept -15002 +▁sophomore -15003 +▁VR -15004 +▁nic -15005 +▁culinary -15006 +▁cartoon -15007 +▁ugly -15008 +▁particles -15009 +fu -15010 +▁stairs -15011 +▁pillow -15012 +▁scheduling -15013 +▁monetary -15014 +▁decorating -15015 +▁suspects -15016 +▁Wait -15017 +▁burned -15018 +▁subsequently -15019 +▁ridiculous -15020 +▁disrupt -15021 +▁thereby -15022 +▁noting -15023 +▁Owner -15024 +▁hottest -15025 +▁sturdy -15026 +▁Ticket -15027 +▁worries -15028 +▁controlling -15029 +▁projected -15030 +▁acute -15031 +Red -15032 +▁accordingly -15033 +▁toe -15034 +▁Fantasy -15035 +▁investigators -15036 +▁protests -15037 +▁antique -15038 +▁Maj -15039 +▁unh -15040 +▁Projects -15041 +▁Thread -15042 +oped -15043 +▁influential -15044 +▁energ -15045 +▁resemb -15046 +UP -15047 +kn -15048 +▁utter -15049 +iosity -15050 +▁consisting -15051 +▁Architecture -15052 +▁dynamics -15053 +▁partially -15054 +▁Political -15055 +▁GL -15056 +▁Lan -15057 +▁embark -15058 +▁Township -15059 +flower -15060 +▁impacted -15061 +▁environmentally -15062 +March -15063 +▁transfers -15064 +▁Understanding -15065 +▁kicks -15066 +▁Fabric -15067 +▁defending -15068 +mega -15069 +logy -15070 +▁Stephan -15071 +sa -15072 +▁playoffs -15073 +▁Employment -15074 +▁Colomb -15075 +▁lam -15076 +rapes -15077 +psy -15078 +rene -15079 +▁congrat -15080 +▁Million -15081 +▁underway -15082 +▁needle -15083 +▁Mental -15084 +▁Fold -15085 +▁fever -15086 +▁Initiative -15087 +▁recommends -15088 +▁Occup -15089 +block -15090 +▁proteins -15091 +▁bom -15092 +▁resign -15093 +▁webinar -15094 +▁qualifications -15095 +aways -15096 +ipher -15097 +▁congreg -15098 +▁Nag -15099 +▁Neck -15100 +▁supervision -15101 +▁polish -15102 +..." -15103 +▁Orders -15104 +▁eBay -15105 +▁Session -15106 +▁accredited -15107 +▁GRE -15108 +▁UFC -15109 +▁Analytics -15110 +wealth -15111 +▁CF -15112 +becca -15113 +▁Rou -15114 +▁harness -15115 +▁Milan -15116 +third -15117 +▁certificates -15118 +rant -15119 +▁RSS -15120 +▁Ric -15121 +▁recordings -15122 +ainted -15123 +▁Memory -15124 +▁tragedy -15125 +▁sheep -15126 +▁Phillips -15127 +▁Unless -15128 +mile -15129 +▁Democrat -15130 +▁Tu -15131 +▁sentenced -15132 +▁notifications -15133 +Health -15134 +▁sensitivity -15135 +▁sentiment -15136 +▁indication -15137 +▁ignored -15138 +▁Fest -15139 +▁Celt -15140 +▁confusing -15141 +▁Apps -15142 +▁therapist -15143 +ontin -15144 +▁Truck -15145 +▁Helen -15146 +ilant -15147 +century -15148 +▁PRO -15149 +▁awful -15150 +▁celebrations -15151 +▁Historic -15152 +▁databases -15153 +▁rotation -15154 +▁Scholarship -15155 +▁Calendar -15156 +▁Bonus -15157 +usalem -15158 +▁recept -15159 +▁lottery -15160 +▁foil -15161 +▁hed -15162 +▁verification -15163 +▁Portugal -15164 +chnology -15165 +getic -15166 +▁laughing -15167 +▁timeless -15168 +▁blessing -15169 +April -15170 +▁interviewed -15171 +scape -15172 +▁puppy -15173 +▁Traffic -15174 +▁Certain -15175 +▁aug -15176 +▁Sox -15177 +▁freezer -15178 +▁eaten -15179 +ARS -15180 +June -15181 +▁Earn -15182 +▁cave -15183 +▁lug -15184 +▁Datab -15185 +Spe -15186 +▁Rescue -15187 +▁Makes -15188 +claimer -15189 +▁Nebraska -15190 +bag -15191 +osures -15192 +utt -15193 +▁hilarious -15194 +▁Walter -15195 +▁Southeast -15196 +▁advocates -15197 +▁Phase -15198 +▁picnic -15199 +▁acids -15200 +fighters -15201 +▁lender -15202 +aque -15203 +▁offset -15204 +▁Nevertheless -15205 +▁financially -15206 +▁Kle -15207 +▁Chemical -15208 +Acc -15209 +▁enrich -15210 +▁promo -15211 +arre -15212 +??? -15213 +▁LCD -15214 +▁terrific -15215 +▁Experts -15216 +▁rounded -15217 +▁Inj -15218 +▁museums -15219 +▁Benjamin -15220 +▁researcher -15221 +▁assessments -15222 +▁ethics -15223 +La -15224 +▁indicator -15225 +▁Vik -15226 +▁sequel -15227 +▁evenly -15228 +▁frustrated -15229 +▁bang -15230 +▁publishers -15231 +changing -15232 +▁glimpse -15233 +▁Nicole -15234 +▁Views -15235 +▁Delaware -15236 +▁briefly -15237 +▁guarantees -15238 +▁tin -15239 +▁careg -15240 +▁Massage -15241 +▁roasted -15242 +MO -15243 +tops -15244 +Two -15245 +▁beats -15246 +▁battles -15247 +▁Biden -15248 +icaid -15249 +▁tricky -15250 +igma -15251 +▁Programme -15252 +▁pursuit -15253 +erie -15254 +etta -15255 +▁evolving -15256 +hett -15257 +▁exchanges -15258 +▁crunch -15259 +▁Stu -15260 +▁playoff -15261 +▁Dell -15262 +▁quad -15263 +▁Puerto -15264 +▁costumes -15265 +etics -15266 +▁rumors -15267 +▁disadvant -15268 +▁ric -15269 +▁innovations -15270 +▁polished -15271 +▁coupled -15272 +▁intric -15273 +▁Bun -15274 +▁sights -15275 +brook -15276 +▁Mountains -15277 +▁trademarks -15278 +▁downt -15279 +▁comics -15280 +pered -15281 +▁nap -15282 +▁themed -15283 +▁Oz -15284 +▁Glas -15285 +▁formerly -15286 +requ -15287 +▁dign -15288 +▁Teachers -15289 +▁bucket -15290 +▁Ko -15291 +▁EVER -15292 +West -15293 +avan -15294 +▁Defe -15295 +▁Garc -15296 +Sept -15297 +▁Alberta -15298 +▁Walking -15299 +umbs -15300 +vance -15301 +NE -15302 +▁dominated -15303 +▁Haven -15304 +▁assumed -15305 +eu -15306 +Bus -15307 +vine -15308 +▁Dual -15309 +MD -15310 +▁Response -15311 +▁Ug -15312 +▁AU -15313 +▁finely -15314 +▁Expect -15315 +storm -15316 +▁Extract -15317 +Care -15318 +▁Publisher -15319 +wiches -15320 +iquid -15321 +▁hind -15322 +▁sou -15323 +▁stating -15324 +▁Guardian -15325 +▁Agreement -15326 +OLD -15327 +▁dense -15328 +▁prompted -15329 +▁Gabri -15330 +▁specialize -15331 +▁Traditional -15332 +▁Il -15333 +▁holy -15334 +▁Abu -15335 +parents -15336 +▁necklace -15337 +▁Testing -15338 +▁Amsterdam -15339 +insula -15340 +▁machinery -15341 +▁positioned -15342 +hma -15343 +▁retention -15344 +food -15345 +prom -15346 +▁Bund -15347 +▁Flore -15348 +▁dominant -15349 +▁Engineer -15350 +Stud -15351 +Will -15352 +▁whis -15353 +▁fragrance -15354 +usters -15355 +▁Mortgage -15356 +efficient -15357 +▁sailing -15358 +▁perf -15359 +▁Otherwise -15360 +ivia -15361 +▁prol -15362 +▁grabbed -15363 +▁Dogs -15364 +also -15365 +▁dot -15366 +▁nour -15367 +dr -15368 +▁pumps -15369 +▁securely -15370 +▁Visa -15371 +▁gardening -15372 +▁antioxid -15373 +▁ample -15374 +▁interf -15375 +▁dece -15376 +▁saus -15377 +rencies -15378 +▁investigated -15379 +▁Kan -15380 +▁mph -15381 +role -15382 +▁Jama -15383 +▁asleep -15384 +unks -15385 +▁polls -15386 +▁Crew -15387 +▁peanut -15388 +▁jaw -15389 +▁habitat -15390 +▁Jet -15391 +▁contacting -15392 +▁Stanford -15393 +inkles -15394 +inse -15395 +ammar -15396 +Pre -15397 +▁cav -15398 +▁gauge -15399 +▁Kil -15400 +▁sew -15401 +iblings -15402 +▁Chapel -15403 +▁Neg -15404 +▁NW -15405 +▁Vel -15406 +aba -15407 +▁Same -15408 +▁Consulting -15409 +▁happier -15410 +▁extends -15411 +▁rational -15412 +▁knees -15413 +gans -15414 +▁biology -15415 +▁cope -15416 +▁Mode -15417 +▁waited -15418 +cond -15419 +▁bronze -15420 +▁gamers -15421 +▁diver -15422 +▁Victorian -15423 +▁tens -15424 +▁circular -15425 +▁Message -15426 +▁NV -15427 +rien -15428 +arett -15429 +▁Hero -15430 +washer -15431 +▁Rodrig -15432 +crafted -15433 +keys -15434 +▁PlayStation -15435 +▁Eval -15436 +▁Outside -15437 +▁Polish -15438 +asha -15439 +▁Secure -15440 +▁midst -15441 +▁Asked -15442 +▁quicker -15443 +▁kidney -15444 +▁reck -15445 +▁Whole -15446 +.), -15447 +▁HS -15448 +▁vet -15449 +▁Superv -15450 +mount -15451 +▁notion -15452 +▁Recruit -15453 +ctuary -15454 +▁bachelor -15455 +ocratic -15456 +▁unclear -15457 +irk -15458 +oodle -15459 +▁earl -15460 +▁cupc -15461 +▁threatening -15462 +▁devastating -15463 +▁preval -15464 +▁pdf -15465 +▁SAP -15466 +attering -15467 +▁quart -15468 +▁reign -15469 +▁Banks -15470 +▁ours -15471 +▁shiny -15472 +▁poems -15473 +▁CE -15474 +▁lod -15475 +▁umb -15476 +gger -15477 +▁gaps -15478 +▁Raven -15479 +▁Dust -15480 +AVE -15481 +▁panic -15482 +payers -15483 +▁EUR -15484 +▁Vine -15485 +Min -15486 +▁Milk -15487 +ere -15488 +▁crowds -15489 +▁Ub -15490 +▁Resear -15491 +▁Television -15492 +▁fixtures -15493 +akespe -15494 +▁equality -15495 +▁decorations -15496 +▁scoop -15497 +eem -15498 +▁Founder -15499 +▁Claim -15500 +▁partnered -15501 +Cor -15502 +▁lengthy -15503 +ieval -15504 +▁sincere -15505 +▁cel -15506 +inspired -15507 +▁Exc -15508 +▁fatigue -15509 +▁substances -15510 +ania -15511 +ghai -15512 +▁NSW -15513 +▁FE -15514 +▁youngest -15515 +▁beads -15516 +ennial -15517 +▁teenager -15518 +▁auton -15519 +▁kinda -15520 +▁moder -15521 +▁EPA -15522 +▁Lucas -15523 +▁translated -15524 +oulder -15525 +▁Dol -15526 +▁Carbon -15527 +▁Publication -15528 +ema -15529 +▁Aub -15530 +▁WHAT -15531 +▁tweets -15532 +▁gratitude -15533 +▁proportion -15534 +▁Samuel -15535 +▁clinics -15536 +▁listened -15537 +▁Karl -15538 +▁administrators -15539 +▁optimize -15540 +▁hats -15541 +▁lifted -15542 +▁crying -15543 +▁separation -15544 +▁deposits -15545 +▁herb -15546 +ibles -15547 +avender -15548 +leen -15549 +▁angel -15550 +▁surgeon -15551 +▁learners -15552 +alg -15553 +▁Cel -15554 +glass -15555 +▁touring -15556 +▁Jerusalem -15557 +▁Leban -15558 +▁Parad -15559 +▁traders -15560 +▁acclaimed -15561 +▁lakes -15562 +oubted -15563 +▁Battery -15564 +erness -15565 +asis -15566 +▁Ana -15567 +▁extensions -15568 +▁refined -15569 +▁seafood -15570 +▁tv -15571 +▁occupied -15572 +▁ip -15573 +ucked -15574 +phis -15575 +▁Collabor -15576 +▁Elite -15577 +▁Founded -15578 +▁chaos -15579 +▁automobile -15580 +▁describing -15581 +▁metals -15582 +▁approve -15583 +▁disciplines -15584 +unted -15585 +▁trio -15586 +▁deduct -15587 +▁feather -15588 +▁Bangl -15589 +▁practically -15590 +▁Trading -15591 +▁Zero -15592 +▁Solution -15593 +▁pap -15594 +▁obligations -15595 +Wed -15596 +ested -15597 +▁grasp -15598 +▁Pric -15599 +▁Writer -15600 +OCK -15601 +▁linear -15602 +▁dimin -15603 +▁seasoned -15604 +▁XL -15605 +eton -15606 +Wind -15607 +▁Ot -15608 +▁cares -15609 +▁notify -15610 +▁Gem -15611 +▁sunshine -15612 +▁Tourism -15613 +▁Bears -15614 +▁pencil -15615 +aton -15616 +▁Shah -15617 +▁cryst -15618 +▁Eco -15619 +▁Reference -15620 +BE -15621 +▁honour -15622 +▁Kend -15623 +.). -15624 +Hey -15625 +▁lively -15626 +Monday -15627 +▁Designs -15628 +▁MAN -15629 +▁temporarily -15630 +GC -15631 +▁neglig -15632 +▁Denmark -15633 +▁spotlight -15634 +resa -15635 +▁Rule -15636 +hal -15637 +▁tenants -15638 +formed -15639 +▁bik -15640 +▁cousin -15641 +▁submissions -15642 +▁Summary -15643 +▁commands -15644 +▁meth -15645 +▁quarters -15646 +▁OP -15647 +▁nomination -15648 +▁pix -15649 +otional -15650 +ohyd -15651 +intendent -15652 +▁fet -15653 +imated -15654 +▁mum -15655 +▁accessory -15656 +width -15657 +▁mamm -15658 +▁sentences -15659 +ois -15660 +ahl -15661 +▁Complex -15662 +▁Indians -15663 +▁aspir -15664 +when -15665 +▁Edit -15666 +alone -15667 +▁Participants -15668 +▁BU -15669 +▁Medicaid -15670 +▁Cu -15671 +▁testimony -15672 +▁enthusiastic -15673 +▁teenage -15674 +▁instrumental -15675 +▁variables -15676 +▁selections -15677 +▁algorithm -15678 +▁tweeted -15679 +somet -15680 +▁coordination -15681 +▁assembled -15682 +▁flooding -15683 +▁Southwest -15684 +▁lasts -15685 +zel -15686 +▁lazy -15687 +▁Electrical -15688 +think -15689 +▁terrain -15690 +phant -15691 +▁Vehicle -15692 +▁Weekend -15693 +ampton -15694 +▁palette -15695 +▁Hook -15696 +▁explored -15697 +▁Jar -15698 +▁Discovery -15699 +▁Moz -15700 +▁Industries -15701 +degree -15702 +▁beside -15703 +▁enrollment -15704 +▁sculpture -15705 +▁peppers -15706 +Cent -15707 +akespeare -15708 +heastern -15709 +▁comparable -15710 +jud -15711 +say -15712 +houses -15713 +▁pier -15714 +▁encountered -15715 +▁ninth -15716 +▁UT -15717 +▁Saints -15718 +quare -15719 +try -15720 +HR -15721 +▁terrace -15722 +▁Bobby -15723 +▁frustration -15724 +▁Dian -15725 +▁Ruth -15726 +▁accessibility -15727 +keley -15728 +▁Hence -15729 +▁zoom -15730 +-------- -15731 +▁Alz -15732 +▁Liz -15733 +▁threads -15734 +▁scandal -15735 +▁Caf -15736 +show -15737 +lit -15738 +▁Orchestra -15739 +acts -15740 +▁Newcastle -15741 +▁decoration -15742 +weg -15743 +▁disclosure -15744 +▁Keeping -15745 +▁HVAC -15746 +▁redes -15747 +▁Carlos -15748 +▁scientist -15749 +▁pressed -15750 +▁Created -15751 +▁Nom -15752 +▁assessed -15753 +fix -15754 +Play -15755 +GL -15756 +▁unde -15757 +▁giveaway -15758 +IDE -15759 +▁elevated -15760 +▁Chile -15761 +▁fights -15762 +▁Queensland -15763 +Link -15764 +▁Pattern -15765 +▁beverages -15766 +▁Above -15767 +gettable -15768 +▁coated -15769 +version -15770 +artha -15771 +lasses -15772 +▁pinch -15773 +▁Vincent -15774 +▁creations -15775 +arded -15776 +▁Exerc -15777 +ogg -15778 +issan -15779 +▁Installation -15780 +pdf -15781 +▁laying -15782 +▁Interested -15783 +▁Spread -15784 +▁controversy -15785 +▁Carn -15786 +orig -15787 +▁collapse -15788 +▁Garage -15789 +▁zomb -15790 +iaries -15791 +▁harmon -15792 +issance -15793 +iovascular -15794 +▁Fried -15795 +Ass -15796 +▁renewed -15797 +oubtedly -15798 +▁xxx -15799 +HP -15800 +▁Task -15801 +▁wellbeing -15802 +▁castle -15803 +▁foss -15804 +▁Behav -15805 +▁immun -15806 +▁showers -15807 +ulously -15808 +▁Laboratory -15809 +▁bundle -15810 +▁ONLY -15811 +▁throws -15812 +▁rope -15813 +piring -15814 +▁injection -15815 +▁FROM -15816 +▁journals -15817 +▁deny -15818 +▁undergo -15819 +▁shareholders -15820 +enez -15821 +▁Zoom -15822 +▁Austria -15823 +▁permanently -15824 +▁Julia -15825 +▁Sierra -15826 +▁kills -15827 +▁Cham -15828 +itime -15829 +▁Wend -15830 +▁sunset -15831 +▁Operating -15832 +Trans -15833 +atches -15834 +ATED -15835 +▁partly -15836 +ourcing -15837 +▁unions -15838 +▁repay -15839 +atories -15840 +▁endeav -15841 +▁outfits -15842 +▁Partnership -15843 +▁Bent -15844 +▁towels -15845 +▁incentives -15846 +ampoo -15847 +▁Argentina -15848 +aturated -15849 +▁mes -15850 +uced -15851 +▁memor -15852 +▁additions -15853 +▁comfortably -15854 +▁Male -15855 +▁chore -15856 +Big -15857 +▁Rebecca -15858 +isha -15859 +▁veggies -15860 +▁fundament -15861 +▁lung -15862 +▁enquir -15863 +▁static -15864 +yr -15865 +▁flesh -15866 +▁Photosh -15867 +istically -15868 +▁Alpha -15869 +▁dismissed -15870 +▁constitu -15871 +kh -15872 +▁Privacy -15873 +▁phrases -15874 +▁Chuck -15875 +▁SR -15876 +▁Dale -15877 +▁fortune -15878 +▁customs -15879 +station -15880 +▁Palestinian -15881 +path -15882 +cra -15883 +▁answering -15884 +▁Patients -15885 +▁Recreation -15886 +alph -15887 +stown -15888 +▁Brooks -15889 +▁Blvd -15890 +▁QU -15891 +▁Statement -15892 +fficial -15893 +▁chase -15894 +▁Criminal -15895 +▁Flower -15896 +mare -15897 +▁crushed -15898 +akery -15899 +▁diagnostic -15900 +▁shifts -15901 +▁scenery -15902 +Where -15903 +Many -15904 +▁Eight -15905 +▁rivals -15906 +▁Zoo -15907 +▁backpack -15908 +raul -15909 +▁enjoyment -15910 +WE -15911 +urf -15912 +▁earliest -15913 +▁boutique -15914 +▁hardest -15915 +web -15916 +▁outlined -15917 +▁correction -15918 +▁Profession -15919 +▁CPU -15920 +▁adaptation -15921 +▁Assistance -15922 +ullivan -15923 +▁Foster -15924 +five -15925 +▁TD -15926 +▁offshore -15927 +▁attain -15928 +▁inquiries -15929 +▁grape -15930 +ikh -15931 +▁metrics -15932 +▁beers -15933 +AME -15934 +itle -15935 +▁flagship -15936 +▁wonders -15937 +Act -15938 +▁Miles -15939 +▁discomfort -15940 +Max -15941 +▁landscapes -15942 +▁breach -15943 +▁Capitol -15944 +▁Ideal -15945 +▁Bradley -15946 +▁succ -15947 +uffs -15948 +Pres -15949 +▁balcony -15950 +isle -15951 +▁Cities -15952 +▁Gonz -15953 +▁Tall -15954 +▁perceived -15955 +▁Scout -15956 +▁extrem -15957 +▁varies -15958 +▁UI -15959 +▁Sterling -15960 +▁Warm -15961 +▁MBA -15962 +▁Bald -15963 +▁paperwork -15964 +emen -15965 +entry -15966 +▁sliding -15967 +▁documented -15968 +Hello -15969 +▁extending -15970 +▁jewellery -15971 +▁RM -15972 +▁vibe -15973 +frame -15974 +▁payroll -15975 +▁teenagers -15976 +cience -15977 +curement -15978 +▁Bloomberg -15979 +▁Soccer -15980 +▁Sor -15981 +▁Wake -15982 +▁highlighting -15983 +▁Rib -15984 +▁doubles -15985 +▁observations -15986 +▁fairy -15987 +▁consumed -15988 +AK -15989 +▁Ghana -15990 +seeing -15991 +▁earrings -15992 +▁Lynn -15993 +▁tunnel -15994 +▁Unique -15995 +▁Warranty -15996 +▁literacy -15997 +Have -15998 +▁porch -15999 +focus -16000 +▁boom -16001 +▁thoughtful -16002 +▁Contemporary -16003 +▁revealing -16004 +▁Lights -16005 +▁cholesterol -16006 +xual -16007 +▁acrylic -16008 +▁overhead -16009 +▁Cer -16010 +▁scrub -16011 +▁grief -16012 +pad -16013 +asma -16014 +▁traditionally -16015 +▁marathon -16016 +▁clearance -16017 +Love -16018 +▁blocking -16019 +phew -16020 +)( -16021 +▁Lemon -16022 +▁Entry -16023 +▁Prest -16024 +neapolis -16025 +▁viable -16026 +▁drying -16027 +▁females -16028 +▁Submit -16029 +▁billing -16030 +▁hygiene -16031 +▁eligibility -16032 +ifi -16033 +▁dread -16034 +map -16035 +eking -16036 +▁Laser -16037 +▁searched -16038 +▁ -16039 +e -16040 +t -16041 +a -16042 +o -16043 +i -16044 +n -16045 +s -16046 +r -16047 +h -16048 +l -16049 +d -16050 +c -16051 +u -16052 +m -16053 +p -16054 +g -16055 +f -16056 +y -16057 +w -16058 +b -16059 +. -16060 +v -16061 +, -16062 +k -16063 +T -16064 +S -16065 +I -16066 +A -16067 +- -16068 +C -16069 +0 -16070 +M -16071 +1 -16072 +P -16073 +x -16074 +B -16075 +2 -16076 +W -16077 +D -16078 +R -16079 +E -16080 +H -16081 +F -16082 +’ -16083 +L -16084 +N -16085 +O -16086 +: -16087 +' -16088 +G -16089 +j -16090 +) -16091 +( -16092 +z -16093 +3 -16094 +5 -16095 +q -16096 +4 -16097 +U -16098 +" -16099 +9 -16100 +J -16101 +8 -16102 +6 -16103 +V -16104 +Y -16105 +K -16106 +| -16107 +7 -16108 +! -16109 +/ -16110 +“ -16111 +” -16112 +? -16113 +– -16114 +; -16115 +& -16116 +$ -16117 +Q -16118 +% -16119 +— -16120 +X -16121 +Z -16122 +* -16123 diff --git a/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/quinary_seed1337.txt b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/quinary_seed1337.txt new file mode 100644 index 0000000000..38ade8fc94 --- /dev/null +++ b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/quinary_seed1337.txt @@ -0,0 +1,1689 @@ +"Quinary training script for OpenAI's Parameter Golf Challenge. Based on ternary submission by Ciprian-Florin Ifrim (24 March 2026). Quinary: {-2,-1,0,+1,+2} with base-5 packing (3 quins/byte = 2.667 bpw vs ternary 1.6 bpw)." + +import copy +import glob +import io +import math +import os +import random +import subprocess +import sys +import tempfile +import time +import lzma +from pathlib import Path +import numpy as np +import sentencepiece as spm +import torch +import torch.distributed as dist +import torch.nn.functional as F +from torch import Tensor, nn +from torch.nn.parallel import DistributedDataParallel as DDP +from flash_attn_interface import flash_attn_func + +def _e(k, d, t=str): + v = os.environ.get(k, str(d)) + if t == bool: return bool(int(v)) + return t(v) + +class Hyperparameters: + # Defaults below match the canonical SP16384 quinary submission config — + # i.e. exactly what `run.sh` passes through to torchrun. A bare + # `torchrun --standalone --nproc_per_node=8 train_gpt.py` (no env vars) + # therefore reproduces the submission. `run.sh` is still the + # documentation-of-record for the canonical config; these defaults are + # mirrored from it so the two never silently disagree. + data_path = _e("DATA_PATH", "./data/canonical/datasets/fineweb10B_sp16384") + # [0-9] prefix matches only NNNNNN-suffixed shard files, ignoring any + # sibling parallel-array files that might share the prefix. + train_files = os.path.join(data_path, "fineweb_train_[0-9]*.bin") + val_files = os.path.join(data_path, "fineweb_val_[0-9]*.bin") + tokenizer_path = _e("TOKENIZER_PATH", "./data/canonical/tokenizers/fineweb_16384_bpe.model") + run_id = os.environ.get("RUN_ID", f"run_{int(time.time())}") + seed = _e("SEED", 42, int) + compile_mode = _e("COMPILE_MODE", "default") + val_batch_size = _e("VAL_BATCH_SIZE", 524288, int) + val_loss_every = _e("VAL_LOSS_EVERY", 0, int) # 0 = no in-training val + train_log_every = _e("TRAIN_LOG_EVERY", 1000, int) + iterations = _e("ITERATIONS", 10000, int) + warmdown_fraction = _e("WARMDOWN_FRACTION", 0.2, float) + min_lr = _e("MIN_LR", 0.0, float) # floor on the LR multiplier (fraction of base LR) + warmup_steps = _e("WARMUP_STEPS", 5, int) + train_batch_tokens = _e("TRAIN_BATCH_TOKENS", 524288, int) + train_seq_len = _e("TRAIN_SEQ_LEN", 1024, int) + max_wallclock_seconds = _e("MAX_WALLCLOCK_SECONDS", 599.0, float) + vocab_size = _e("VOCAB_SIZE", 16384, int) + num_layers = _e("NUM_LAYERS", 10, int) + num_kv_heads = _e("NUM_KV_HEADS", 3, int) + model_dim = _e("MODEL_DIM", 576, int) + num_heads = _e("NUM_HEADS", 6, int) + mlp_mult = _e("MLP_MULT", 4, int) + tie_embeddings = _e("TIE_EMBEDDINGS", 1, int) + rope_base = _e("ROPE_BASE", 5000.0, float) + rope_type = _e("ROPE_TYPE", "yarn") + yarn_max_len = _e("YARN_MAX_LEN", 2048, int) + logit_softcap = _e("LOGIT_SOFTCAP", 10.0, float) + softcap_type = _e("SOFTCAP_TYPE", "poly") + tied_embed_init_std = _e("TIED_EMBED_INIT_STD", 0.005, float) + qk_gain_init = _e("QK_GAIN_INIT", 5.0, float) + activation_type = _e("ACTIVATION", "relu2") + embed_dim = _e("EMBED_DIM", 380, int) + embed_lr = _e("EMBED_LR", 0.6, float) + head_lr = _e("HEAD_LR", 0.02, float) + adam_lr = _e("ADAM_LR", 0.05, float) + adam_wd = _e("ADAM_WD", 0.05, float) + tied_embed_lr = _e("TIED_EMBED_LR", 0.02, float) + seq_len_start = _e("SEQ_LEN_START", 0, int) + seq_schedule_fraction = _e("SEQ_SCHEDULE_FRACTION", 0.0, float) + batch_tokens_start = _e("BATCH_TOKENS_START", 0, int) + batch_schedule_fraction = _e("BATCH_SCHEDULE_FRACTION", 0.33, float) + churn_log_every = _e("CHURN_LOG_EVERY", 0, int) + matrix_lr = _e("MATRIX_LR", 0.035, float) + scalar_lr = _e("SCALAR_LR", 0.02, float) + muon_momentum = _e("MUON_MOMENTUM", 0.95, float) + muon_backend_steps = _e("MUON_BACKEND_STEPS", 3, int) + muon_wd = _e("MUON_WD", 0.0, float) + matrix_optimizer = _e("MATRIX_OPTIMIZER", "muon") + muon_momentum_warmup_start = _e("MUON_MOMENTUM_WARMUP_START", 0.85, float) + muon_momentum_warmup_steps = _e("MUON_MOMENTUM_WARMUP_STEPS", 500, int) + beta1 = _e("BETA1", 0.9, float) + beta2 = _e("BETA2", 0.95, float) + adam_eps = _e("ADAM_EPS", 1e-8, float) + grad_clip_norm = _e("GRAD_CLIP_NORM", 0.0, float) + bitnet_group_size = _e("BITNET_GROUP_SIZE", 192, int) + ttt_steps = _e("TTT_STEPS", 3, int) + ttt_lr = _e("TTT_LR", 0.005, float) + ttt_tokens = _e("TTT_TOKENS", 32768, int) + skip_training = _e("SKIP_TRAINING", 0, bool) + scale_quant_bits = _e("SCALE_QUANT_BITS", 5, int) # 5-bit log-delta scale quantization + fp_storage = True # FP8 storage for non-quinary (small) tensors + +# Calibration / fp16-stored scalar+vector params adapted by score-first TTT. +# `scale_correction` was previously listed here but its gradient is blocked by +# the STE detach in QuinaryLinear.forward, so including it as a TTT target was +# a no-op. Excluded from the CTP tuple so the TTT optimizer reflects what +# actually receives gradients (~42k params, not 232k). The parameter still +# exists in the state-dict and is stored fp16 in the artifact (via the ndim<2 +# bucket); it is just not selected for TTT adaptation. +CTP = ("attn_scale","mlp_scale","resid_mix","q_gain","skip_weights","vocab_bias") + +def pack_quinary(q: Tensor): + f = (q.reshape(-1).to(torch.int8) + 2).numpy() # {-2..2} -> {0..4} + n = len(f) + p = (3 - n % 3) % 3 + if p: f = np.concatenate([f, np.zeros(p, dtype=np.int8)]) + g = f.reshape(-1, 3).astype(np.uint8) + return (g[:,0] + g[:,1]*5 + g[:,2]*25).tobytes(), n # max = 4+20+100 = 124 + +def unpack_quinary(data: bytes, n: int) -> Tensor: + v = np.frombuffer(data, dtype=np.uint8).astype(np.int16) + t = np.zeros((len(v), 3), dtype=np.int8) + for i in range(3): t[:,i] = v % 5; v //= 5 + return torch.from_numpy(t.reshape(-1)[:n].astype(np.int8) - 2) + + +def pack_quinary_bitmask(q) -> tuple[bytes, int, int]: + """Pack quinary symbols ∈ {-2..2} as three concatenated bit-planes. + + Layout (big-endian within bytes via numpy.packbits): + [zero_mask: ceil(n_total / 8) bytes — 1 bit/symbol, 1=zero] + [sign_bits: ceil(n_nonzero / 8) bytes — 1 bit/nonzero, 1=negative] + [mag2_bits: ceil(n_nonzero / 8) bytes — 1 bit/nonzero, 1=|symbol|==2] + + Returns: (concat_bytes, n_total, n_nonzero). + Each plane has homogeneous bit statistics so the downstream compressor + can model them independently rather than fighting a multimodal mixture. + """ + if isinstance(q, torch.Tensor): + a = q.reshape(-1).to(torch.int8).numpy() + else: + a = np.asarray(q, dtype=np.int8).reshape(-1) + n_total = a.size + nz_mask = (a != 0) # bool, len n_total + n_nonzero = int(nz_mask.sum()) + nz_vals = a[nz_mask] # int8, len n_nonzero + sign_bits = (nz_vals < 0).astype(np.uint8) # 1 = negative + mag2_bits = (np.abs(nz_vals) == 2).astype(np.uint8) + zero_packed = np.packbits((~nz_mask).astype(np.uint8)) # 1 bit means "this symbol is zero" + sign_packed = np.packbits(sign_bits) + mag2_packed = np.packbits(mag2_bits) + return (zero_packed.tobytes() + sign_packed.tobytes() + mag2_packed.tobytes(), + n_total, n_nonzero) + + +def unpack_quinary_bitmask(data: bytes, n_total: int, n_nonzero: int) -> Tensor: + """Inverse of pack_quinary_bitmask. Returns int8 tensor of length n_total.""" + z_len = (n_total + 7) // 8 + s_len = (n_nonzero + 7) // 8 + m_len = (n_nonzero + 7) // 8 + expected = z_len + s_len + m_len + if len(data) != expected: + raise ValueError(f"bitmask data size {len(data)} != expected {expected} " + f"(n_total={n_total}, n_nonzero={n_nonzero})") + z_bytes = np.frombuffer(data[:z_len], dtype=np.uint8) + s_bytes = np.frombuffer(data[z_len:z_len + s_len], dtype=np.uint8) + m_bytes = np.frombuffer(data[z_len + s_len:], dtype=np.uint8) + is_zero = np.unpackbits(z_bytes)[:n_total].astype(bool) + sign = np.unpackbits(s_bytes)[:n_nonzero].astype(bool) + mag2 = np.unpackbits(m_bytes)[:n_nonzero].astype(bool) + nz_vals = np.where(mag2, 2, 1).astype(np.int8) + nz_vals = np.where(sign, -nz_vals, nz_vals) + out = np.zeros(n_total, dtype=np.int8) + out[~is_zero] = nz_vals + return torch.from_numpy(out) + + +# Artifact archive: layout-aware per-stream v2 (header byte 0x03). +# +# - For each quinary tensor: screen the 4 layouts {base5, base5_T, +# bitmask, bitmask_T} by LZMA9-compressed size, then run LZMA9 vs +# lrzip-zpaq -L9 only on the winning layout. Bounded heuristic +# with an LZMA floor — *not* an exhaustive 4×2 search. +# - For c_qkv.weight: split rows into Q / K / V sub-payloads each chosen +# independently (Q, K, V have different trained distributions). +# - For other bulk fields (FP8 embeddings, large fp16 tensors): torch.save +# the value and compress with min(lzma, lrzip). +# - Robust to the seed-dependent lrzip cliff observed on full-blob +# compression: even if lrzip ZPAQ underperforms on one tensor's bytes, +# lzma takes over for that tensor specifically. +_COMPRESSOR_LZMA = 0 +_COMPRESSOR_LRZIP_ZPAQ = 1 +_COMPRESSOR_PER_STREAM_V2 = 3 + +# Threshold for treating a value as "bulk" (compressed independently). +_PER_STREAM_BULK_BYTES = 64 * 1024 + +# v2 layout IDs — what kind of body is stored in a bulk payload. +_LAYOUT_RAW = 0 # opaque bytes (torch.save output for non-quinary fields) +_LAYOUT_Q_BASE5 = 1 # base-5 packed quinary symbols, canonical row-major order +_LAYOUT_Q_BASE5_T = 2 # base-5 packed quinary symbols, transposed (column-major) +_LAYOUT_Q_BITMASK = 3 # bitmask packed (zero|sign|mag2), canonical +_LAYOUT_Q_BITMASK_T = 4 # bitmask packed, transposed +_LAYOUT_NAMES = { + _LAYOUT_RAW: "raw", + _LAYOUT_Q_BASE5: "base5", + _LAYOUT_Q_BASE5_T: "base5_T", + _LAYOUT_Q_BITMASK: "bitmask", + _LAYOUT_Q_BITMASK_T: "bitmask_T", +} + +# v2 sentinel for metadata refs (distinct string from v1 to avoid version confusion). +_BULK_SENTINEL_V2 = "__BULK_REF_V2__" + + +def _lrzip_compress_bytes(data: bytes, level: int = 9) -> bytes: + """Compress raw bytes via `lrzip -z` (ZPAQ). Returns body only (no header).""" + with tempfile.NamedTemporaryFile(delete=False) as f: + f.write(data); in_path = f.name + out_path = in_path + ".lrz" + try: + try: + subprocess.run(["lrzip", "-z", "-L", str(level), "-q", "-f", in_path], + check=True, capture_output=True) + except FileNotFoundError as e: + raise RuntimeError("lrzip binary not found; run `apt-get install lrzip`") from e + with open(out_path, "rb") as f: + return f.read() + finally: + for p in (in_path, out_path): + try: os.unlink(p) + except FileNotFoundError: pass + + +def _lrzip_decompress_bytes(body: bytes) -> bytes: + """Decompress an lrzip ZPAQ body (no header).""" + with tempfile.NamedTemporaryFile(delete=False, suffix=".lrz") as f: + f.write(body); in_path = f.name + out_path = in_path[:-4] + try: + try: + subprocess.run(["lrzip", "-d", "-q", "-f", in_path], + check=True, capture_output=True) + except FileNotFoundError as e: + raise RuntimeError("lrzip binary not found; required to load this artifact") from e + with open(out_path, "rb") as f: + return f.read() + finally: + for p in (in_path, out_path): + try: os.unlink(p) + except FileNotFoundError: pass + + +def _pick_best_compressor(data: bytes, level: int = 9, has_lrzip: bool = True) -> tuple[int, bytes]: + """Try lzma + (optionally) lrzip; return (method_id, body) for the smaller. + + This is the per-stream robustness: even if one compressor cliffs on a + given byte distribution (the seed=7 issue), the other usually doesn't. + Any failure (missing binary, runtime error, broken lrzip backend) on + lrzip is silently skipped — lzma is the always-available floor. + """ + candidates: list[tuple[int, bytes]] = [(_COMPRESSOR_LZMA, lzma.compress(data, preset=level))] + if has_lrzip: + try: + candidates.append((_COMPRESSOR_LRZIP_ZPAQ, _lrzip_compress_bytes(data, level))) + except (RuntimeError, subprocess.CalledProcessError, OSError): + pass + return min(candidates, key=lambda c: len(c[1])) + + +def _is_bulk(value) -> bool: + """Decide whether a state_dict field should be peeled off for per-stream compression.""" + if isinstance(value, (bytes, bytearray)): + return len(value) >= _PER_STREAM_BULK_BYTES + if isinstance(value, torch.Tensor): + return value.numel() * value.element_size() >= _PER_STREAM_BULK_BYTES + return False + + +def _load_artifact(blob: bytes) -> dict: + """Load a per-stream v2 archive (header byte 0x03) into a state_dict.""" + return _deserialize_per_stream_v2(blob) + + +# --------------------------------------------------------------------------- +# v2 archive: layout-aware per-stream compression +# --------------------------------------------------------------------------- + +def _gen_quinary_layout_candidates(symbols_2d: np.ndarray) -> list[tuple[int, bytes, dict]]: + """For a 2D int8 symbol matrix in {-2..2}, return a list of + (layout_id, body_bytes, layout_meta) tuples — every supported layout. + + layout_meta carries the per-layout fields needed at deserialize time + (n_quins, n_nonzero, etc.). + """ + rows, cols = symbols_2d.shape + flat = symbols_2d.reshape(-1) + transposed = symbols_2d.T.reshape(-1).copy() # contiguous transpose + + # base-5 canonical + body_b5, n_b5 = pack_quinary(torch.from_numpy(flat)) + # base-5 transposed + body_b5_t, n_b5_t = pack_quinary(torch.from_numpy(transposed)) + # bitmask canonical + body_bm, nt_bm, nz_bm = pack_quinary_bitmask(torch.from_numpy(flat)) + # bitmask transposed + body_bm_t, nt_bm_t, nz_bm_t = pack_quinary_bitmask(torch.from_numpy(transposed)) + + return [ + (_LAYOUT_Q_BASE5, body_b5, {"n_quins": n_b5}), + (_LAYOUT_Q_BASE5_T, body_b5_t, {"n_quins": n_b5_t}), + (_LAYOUT_Q_BITMASK, body_bm, {"n_total": nt_bm, "n_nonzero": nz_bm}), + (_LAYOUT_Q_BITMASK_T, body_bm_t, {"n_total": nt_bm_t, "n_nonzero": nz_bm_t}), + ] + + +def _layout_to_canonical_bytes(layout: int, body: bytes, rows: int, cols: int, + layout_meta: dict) -> bytes: + """Inverse of _gen_quinary_layout_candidates: take a body in one of the + layout encodings and return the canonical base-5-packed bytes that + `deq_sd` expects in entry["packed"].""" + if layout == _LAYOUT_Q_BASE5: + # Already canonical. + return body + if layout == _LAYOUT_Q_BASE5_T: + symbols_t = unpack_quinary(body, layout_meta["n_quins"]).numpy() + symbols = symbols_t.reshape(cols, rows).T.reshape(-1).copy() + out, _ = pack_quinary(torch.from_numpy(symbols)) + return out + if layout == _LAYOUT_Q_BITMASK: + symbols = unpack_quinary_bitmask(body, layout_meta["n_total"], layout_meta["n_nonzero"]).numpy() + out, _ = pack_quinary(torch.from_numpy(symbols)) + return out + if layout == _LAYOUT_Q_BITMASK_T: + symbols_t = unpack_quinary_bitmask(body, layout_meta["n_total"], layout_meta["n_nonzero"]).numpy() + symbols = symbols_t.reshape(cols, rows).T.reshape(-1).copy() + out, _ = pack_quinary(torch.from_numpy(symbols)) + return out + raise ValueError(f"unknown layout id {layout}") + + +def _qkv_split_sizes(name: str, rows: int, cols: int) -> tuple[int, int] | None: + """For a c_qkv weight, derive (q_rows, kv_rows) row-counts. + + Standard transformer convention: q_size = num_heads * head_dim = model_dim + (the square attention assumption). So q_rows == cols. The remaining rows + are split equally between K and V: kv_rows = (rows - cols) // 2. + + Returns None if name doesn't look like c_qkv or the ratio doesn't decompose + cleanly (in which case we don't split, just compress as one tensor). + """ + if not name.endswith("c_qkv.weight"): + return None + if rows <= cols: + return None # not a GQA shape; or single-head case where Q==K==V + extra = rows - cols # 2 * kv_rows + if extra % 2 != 0: + return None + kv_rows = extra // 2 + q_rows = cols + if q_rows + 2 * kv_rows != rows: + return None + return q_rows, kv_rows + + +def _serialize_per_stream_v2(state_dict: dict, level: int = 9) -> tuple[bytes, str]: + """v2 archive: layout-aware per-stream compression. + + For each quinary entry, generate up to 4 layout candidates + (base5, base5_T, bitmask, bitmask_T), screen them by LZMA9-compressed + size, then compress the winning layout with min(LZMA9, lrzip-zpaq). + For c_qkv weights, the row-block is split into Q/K/V sub-payloads + handled independently. All other bulk fields use the v1 raw-bytes path. + + This is a bounded-cost heuristic with an LZMA floor — *not* an + exhaustive 4×2 search. It can in principle miss a (layout, compressor) + pair where the LZMA-screen-loser would have won under lrzip; in + practice this is rare on this stack and the LZMA floor caps the + worst case at the canonical base5+LZMA encoding. + + Returns (archive_bytes, audit_string) so the caller can route the + audit line through the run's logger. + """ + from shutil import which + has_lrzip = which("lrzip") is not None + + bulk_bodies: list[tuple[int, bytes]] = [] # (compressor_method, compressed_body) + metadata: dict = {} + audit_lines: list[str] = [] + layout_counts = {n: 0 for n in _LAYOUT_NAMES.values()} + method_counts = {"lzma": 0, "lrzip": 0} + qkv_split_count = 0 + + def _store(layout: int, body: bytes) -> tuple[int, int, int]: + """Compress and store; return (payload_idx, method_id, compressed_size).""" + method, comp = _pick_best_compressor(body, level, has_lrzip) + bulk_bodies.append((method, comp)) + method_counts["lzma" if method == _COMPRESSOR_LZMA else "lrzip"] += 1 + layout_counts[_LAYOUT_NAMES[layout]] += 1 + return len(bulk_bodies) - 1, method, len(comp) + + def _best_quinary_layout(symbols_2d: np.ndarray) -> tuple[int, int, int, dict, int]: + """Generate all layout candidates, screen with lzma to pick best layout, + then compress winner with min(lzma, lrzip) for the final body. + + Cheap-screen + refine cuts serialize compute from O(layouts*compressors) + to O(layouts) lzma + 1 lrzip, keeping serialize time bounded even when + lrzip is slow on bad streams. + + Returns (chosen_layout, payload_idx, compressed_size, layout_meta, n_candidates_evaluated). + """ + cands = _gen_quinary_layout_candidates(symbols_2d) + best_layout, best_body, best_meta = None, None, None + best_lzma_size = None + for layout, body, meta in cands: + lzma_size = len(lzma.compress(body, preset=level)) + if best_lzma_size is None or lzma_size < best_lzma_size: + best_lzma_size = lzma_size + best_layout, best_body, best_meta = layout, body, meta + # Now compress the winning layout with min(lzma, lrzip) for the actual stored body. + comp_method, comp_body = _pick_best_compressor(best_body, level, has_lrzip) + bulk_bodies.append((comp_method, comp_body)) + method_counts["lzma" if comp_method == _COMPRESSOR_LZMA else "lrzip"] += 1 + layout_counts[_LAYOUT_NAMES[best_layout]] += 1 + return best_layout, len(bulk_bodies) - 1, len(comp_body), best_meta, len(cands) + + for name, entry in state_dict.items(): + if not isinstance(entry, dict): + metadata[name] = entry + continue + new_entry = dict(entry) # shallow copy + + if entry.get("type") == "quinary" and "packed" in entry: + # Reconstruct full symbol matrix from canonical packed bytes. + rows, cols = entry["shape"][0], entry["padded_cols"] + symbols_full = unpack_quinary(entry["packed"], entry["n_quins"]).numpy() + symbols_2d = symbols_full[:rows * cols].reshape(rows, cols) + + split = _qkv_split_sizes(name, rows, cols) + if split is not None: + q_rows, kv_rows = split + qkv_split_count += 1 + refs = [] + for sub_name, sub_2d in ( + ("Q", symbols_2d[:q_rows]), + ("K", symbols_2d[q_rows:q_rows + kv_rows]), + ("V", symbols_2d[q_rows + kv_rows:]), + ): + layout, idx, sz, meta, _ = _best_quinary_layout(sub_2d.copy()) + sub_rows, sub_cols = sub_2d.shape + refs.append({ + "_v2": _BULK_SENTINEL_V2, "idx": idx, "layout": layout, + "rows": sub_rows, "cols": sub_cols, "meta": meta, + }) + new_entry["packed"] = ("__QKV_SPLIT__", refs) + audit_lines.append(f" {name}: qkv_split layouts={[_LAYOUT_NAMES[r['layout']] for r in refs]}") + else: + layout, idx, sz, meta, _ = _best_quinary_layout(symbols_2d.copy()) + new_entry["packed"] = { + "_v2": _BULK_SENTINEL_V2, "idx": idx, "layout": layout, + "rows": rows, "cols": cols, "meta": meta, + } + audit_lines.append(f" {name}: {_LAYOUT_NAMES[layout]} ({sz/1e6:.3f}MB)") + + # Other bulk fields (fp8 data, fp16 large tensors, scale_delta tensors, + # etc.) — fall back to v1's torch.save+pick path. + for key, value in list(new_entry.items()): + if key == "packed": # already handled + continue + if _is_bulk(value): + buf = io.BytesIO(); torch.save(value, buf) + method, comp = _pick_best_compressor(buf.getvalue(), level, has_lrzip) + bulk_bodies.append((method, comp)) + method_counts["lzma" if method == _COMPRESSOR_LZMA else "lrzip"] += 1 + layout_counts["raw"] += 1 + new_entry[key] = { + "_v2": _BULK_SENTINEL_V2, "idx": len(bulk_bodies) - 1, + "layout": _LAYOUT_RAW, + } + metadata[name] = new_entry + + # Compress metadata. + meta_buf = io.BytesIO(); torch.save(metadata, meta_buf) + meta_compressed = lzma.compress(meta_buf.getvalue(), preset=level) + + # Assemble archive (header 0x03). + out = bytearray() + out.append(_COMPRESSOR_PER_STREAM_V2) + out.extend(len(meta_compressed).to_bytes(4, "little")) + out.extend(meta_compressed) + out.extend(len(bulk_bodies).to_bytes(4, "little")) + for method, body in bulk_bodies: + out.append(method) + out.extend(len(body).to_bytes(4, "little")) + out.extend(body) + + layout_summary = ", ".join(f"{n}={c}" for n, c in layout_counts.items() if c) + audit = (f"per_stream_v2: meta={len(meta_compressed)/1e6:.2f}MB, " + f"{len(bulk_bodies)} bulk payloads " + f"(lzma={method_counts['lzma']}, lrzip={method_counts['lrzip']}; " + f"{layout_summary}), qkv_splits={qkv_split_count}, " + f"total={len(out)/1e6:.2f}MB") + return bytes(out), audit + + +def _deserialize_per_stream_v2(blob: bytes) -> dict: + """Inverse of _serialize_per_stream_v2. Returns a state_dict whose quinary + entries have canonical entry["packed"] bytes — `deq_sd` is unaware of the + archive layer.""" + cursor = 0 + assert blob[cursor] == _COMPRESSOR_PER_STREAM_V2, \ + f"expected v2 header 0x03, got {blob[0]:#x}" + cursor += 1 + meta_size = int.from_bytes(blob[cursor:cursor + 4], "little"); cursor += 4 + meta_blob = blob[cursor:cursor + meta_size]; cursor += meta_size + metadata = torch.load(io.BytesIO(lzma.decompress(meta_blob)), + map_location="cpu", weights_only=False) + n_payloads = int.from_bytes(blob[cursor:cursor + 4], "little"); cursor += 4 + decompressed: list[bytes] = [] + for _ in range(n_payloads): + method = blob[cursor]; cursor += 1 + size = int.from_bytes(blob[cursor:cursor + 4], "little"); cursor += 4 + body = blob[cursor:cursor + size]; cursor += size + if method == _COMPRESSOR_LZMA: + decompressed.append(lzma.decompress(body)) + elif method == _COMPRESSOR_LRZIP_ZPAQ: + decompressed.append(_lrzip_decompress_bytes(body)) + else: + raise ValueError(f"unknown compressor method {method:#x}") + + def _is_v2_ref(v): + return isinstance(v, dict) and v.get("_v2") == _BULK_SENTINEL_V2 + + for name, entry in metadata.items(): + if not isinstance(entry, dict): + continue + for key, value in list(entry.items()): + if _is_v2_ref(value): + if value["layout"] == _LAYOUT_RAW: + entry[key] = torch.load(io.BytesIO(decompressed[value["idx"]]), + map_location="cpu", weights_only=False) + else: + entry[key] = _layout_to_canonical_bytes( + value["layout"], decompressed[value["idx"]], + value["rows"], value["cols"], value["meta"]) + elif (isinstance(value, tuple) and len(value) == 2 + and value[0] == "__QKV_SPLIT__"): + # Reconstruct each part's symbol matrix, concat along row dim, + # repack as canonical base-5. + refs = value[1] + sub_symbols = [] + for ref in refs: + canonical_body = _layout_to_canonical_bytes( + ref["layout"], decompressed[ref["idx"]], + ref["rows"], ref["cols"], ref["meta"]) + n = ref["rows"] * ref["cols"] + sub_syms = unpack_quinary(canonical_body, n).numpy()[:n] + sub_symbols.append(sub_syms.reshape(ref["rows"], ref["cols"])) + full = np.concatenate(sub_symbols, axis=0).reshape(-1).copy() + entry[key], _ = pack_quinary(torch.from_numpy(full)) + return metadata + + +def _quant_scale(scale_fp16: Tensor, bits: int) -> dict: + """Quantize per-group scales to bits-bit log-deltas. Returns dict with + `scale_anchor` (fp16), `scale_step` (fp16), `scale_delta` (packed bytes + or int8 tensor), plus `scale_bits` to dispatch in deq_sd.""" + scale_f = scale_fp16.float().reshape(-1) + n_groups = scale_f.numel() + log_scale = torch.log2(scale_f.clamp(min=1e-12)) + lo, hi = float(log_scale.min().item()), float(log_scale.max().item()) + anchor = (lo + hi) / 2.0 + spread = hi - lo + levels = 1 << bits + half = levels // 2 + step = max(spread / max(levels - 1, 1), 1e-9) + delta = ((log_scale - anchor) / step).round().clamp(-half, half - 1).to(torch.int32) + if bits == 4: + d = (delta + half).to(torch.uint8).numpy() + if len(d) % 2: + d = np.concatenate([d, np.zeros(1, dtype=np.uint8)]) + packed = (d[::2] | (d[1::2] << 4)).astype(np.uint8) + delta_stored = torch.from_numpy(packed.copy()) + elif bits == 5: + delta_stored = (delta + half).to(torch.uint8) + else: # 8 + delta_stored = delta.to(torch.int8) + return { + "scale_anchor": torch.tensor(anchor, dtype=torch.float16), + "scale_step": torch.tensor(step, dtype=torch.float16), + "scale_delta": delta_stored, + "scale_n_groups": n_groups, + "scale_bits": bits, + } + +def _dequant_scale(entry: dict) -> Tensor: + """Reconstruct fp16 per-group scales from the quantized representation.""" + bits = int(entry["scale_bits"]) + half = (1 << bits) // 2 + n_groups = int(entry["scale_n_groups"]) + if bits == 4: + packed = entry["scale_delta"].to(torch.uint8).numpy() + low = packed & 0x0F + high = (packed >> 4) & 0x0F + d = np.empty(packed.size * 2, dtype=np.uint8) + d[0::2] = low + d[1::2] = high + delta = torch.from_numpy(d[:n_groups].astype(np.int32) - half) + elif bits == 5: + delta = entry["scale_delta"].to(torch.int32) - half + else: # 8 + delta = entry["scale_delta"].to(torch.int32) + anchor = entry["scale_anchor"].float() + step = entry["scale_step"].float() + return (2.0 ** (anchor + delta.float() * step)).to(torch.float16) + + +def q_sd(state_dict: dict, group_size: int = 64, + scale_quant_bits: int = 0) -> tuple[dict, dict]: + quantized = {} + stats = {"quinary_params": 0, "quinary_bytes": 0, "fp_params": 0, "fp_bytes": 0} + for name, tensor in state_dict.items(): + t = tensor.detach().cpu().float().contiguous() + t_orig_shape = list(t.shape) + if t.ndim == 3: + t = t.reshape(t.shape[0], -1) + is_quantized_candidate = ( + t.ndim == 2 and t.numel() > 65_536 + and "tok_emb" not in name and "lm_head" not in name and "embed_proj" not in name + ) + if is_quantized_candidate: + pad = (group_size - t.shape[1] % group_size) % group_size + t_padded = F.pad(t, (0, pad)) if pad > 0 else t + t_grouped = t_padded.reshape(-1, group_size) + scale = t_grouped.abs().mean(-1, keepdim=True).clamp(min=1e-8).half().float() + q = (t_grouped / scale).round().clamp(-2, 2).to(torch.int8) + + packed_bytes, n_packed = pack_quinary(q) + stats["quinary_params"] += t.numel() + + entry = { + "type": "quinary", "packed": packed_bytes, + "shape": list(t.shape), "padded_cols": t_padded.shape[1], + "group_size": group_size, "n_quins": n_packed, + "orig_shape": t_orig_shape, + } + if scale_quant_bits and scale_quant_bits in (4, 5, 8): + sq = _quant_scale(scale.half().squeeze(-1), scale_quant_bits) + entry.update(sq) + stats["quinary_bytes"] += len(packed_bytes) + sq["scale_delta"].numel() + 4 # +4 for anchor+step + else: + entry["scale"] = scale.half().squeeze(-1) + stats["quinary_bytes"] += len(packed_bytes) + scale.numel() * 2 + quantized[name] = entry + elif t.ndim == 2: + quantized[name] = {"type": "fp8", "data": t.to(torch.float8_e4m3fn)} + stats["fp_params"] += t.numel() + stats["fp_bytes"] += t.numel() + else: + quantized[name] = {"type": "fp16", "data": t.half()} + stats["fp_params"] += t.numel() + stats["fp_bytes"] += t.numel() * 2 + return quantized, stats + +def deq_sd(quantized: dict, target_dtype=torch.bfloat16): + out = {} + for name, entry in quantized.items(): + if entry["type"] == "quinary": + q = unpack_quinary(entry["packed"], entry["n_quins"]) + q = q.float().reshape(-1, entry["group_size"]) + if "scale_bits" in entry: + scale = _dequant_scale(entry).float().unsqueeze(-1) + else: + scale = entry["scale"].float().unsqueeze(-1) + q_absmean = q.abs().mean(-1, keepdim=True).clamp(min=1e-8) + t = (q * (scale / q_absmean)).reshape(-1, entry["padded_cols"]) + shape = entry["shape"] + result = t[:shape[0], :shape[1]].to(target_dtype) + orig = entry.get("orig_shape") + out[name] = result.reshape(orig).contiguous() if orig and orig != shape else result.contiguous() + elif entry["type"] == "fp8": + out[name] = entry["data"].to(torch.float32).to(target_dtype).contiguous() + else: + out[name] = entry["data"].to(target_dtype).contiguous() + return out + +def quin_stats(model: nn.Module, group_size: int = 64): + total = zeros = 0 + with torch.no_grad(): + for name, p in model.named_parameters(): + if p.ndim == 2 and ("weight" in name or "prototypes" in name) and p.shape[0] > 1 and p.numel() % group_size == 0: + w = p.detach().float().reshape(-1, group_size) + scale = w.abs().mean(-1, keepdim=True).clamp(min=1e-8).half().float() + q = (w / scale).round().clamp(-2, 2) + zeros += int((q == 0).sum().item()) + total += int(q.numel()) + return {"zero_frac": zeros / max(total, 1), "total_weights": total} + +_prev_committed: dict = {} + +def churn_fn(model: nn.Module, group_size: int = 64): + global _prev_committed + total = flipped = 0 + with torch.no_grad(): + for name, p in model.named_parameters(): + if p.ndim == 2 and ("weight" in name or "prototypes" in name) and p.shape[0] > 1 and p.numel() % group_size == 0: + w = p.detach().float().reshape(-1, group_size) + scale = w.abs().mean(-1, keepdim=True).clamp(min=1e-8).half().float() + q = (w / scale).round().clamp(-2, 2).cpu().numpy() + if name in _prev_committed: + flipped += int(np.sum(q != _prev_committed[name])) + total += q.size + _prev_committed[name] = q + return flipped / max(total, 1) + +def ns_orth(G: Tensor, steps: int = 10, eps: float = 1e-7) -> Tensor: + a, b, c = (3.4445, -4.7750, 2.0315) + X = G.bfloat16() + X /= X.norm() + eps + transposed = G.size(0) > G.size(1) + if transposed: + X = X.T + for _ in range(steps): + A = X @ X.T + B = b * A + c * A @ A + X = a * X + B @ X + return X.T if transposed else X + +class Muon(torch.optim.Optimizer): + def __init__(self, params, lr: float, momentum: float, backend_steps: int, nesterov: bool = True, wd: float = 0.0): + super().__init__(params, dict(lr=lr, momentum=momentum, backend_steps=backend_steps, nesterov=nesterov, wd=wd)) + + @torch.no_grad() + def step(self, closure=None): + loss = None + if closure is not None: + with torch.enable_grad(): + loss = closure() + distributed = dist.is_available() and dist.is_initialized() + world_size = dist.get_world_size() if distributed else 1 + rank = dist.get_rank() if distributed else 0 + for group in self.param_groups: + params = group["params"] + if not params: + continue + lr, momentum = group["lr"], group["momentum"] + backend_steps, nesterov = group["backend_steps"], group["nesterov"] + total_params = sum(int(p.numel()) for p in params) + updates_flat = torch.zeros(total_params, device=params[0].device, dtype=torch.bfloat16) + curr = 0 + for i, p in enumerate(params): + if i % world_size == rank and p.grad is not None: + g = p.grad + state = self.state[p] + if "momentum_buffer" not in state: + state["momentum_buffer"] = torch.zeros_like(g) + buf = state["momentum_buffer"] + buf.mul_(momentum).add_(g) + if nesterov: + g = g.add(buf, alpha=momentum) + g = F.rms_norm(g.float(), (g.size(-1),)).bfloat16() + g = ns_orth(g, steps=backend_steps) + g *= max(1, g.size(0) / g.size(1)) ** 0.5 + updates_flat[curr:curr + p.numel()] = g.reshape(-1) + curr += p.numel() + if distributed: + dist.all_reduce(updates_flat, op=dist.ReduceOp.SUM) + wd = group.get("wd", 0.0) + curr = 0 + for p in params: + g = updates_flat[curr : curr + p.numel()].view_as(p).to(dtype=p.dtype) + if wd > 0: + p.mul_(1 - lr * wd) + p.add_(g, alpha=-lr) + curr += p.numel() + return loss + +def ld_shard(file: Path) -> Tensor: + header_bytes = 256 * np.dtype(" Tensor: + chunks = [] + remaining = n + while remaining > 0: + avail = self.tokens.numel() - self.pos + if avail <= 0: + self._advance_file() + continue + k = min(remaining, avail) + chunks.append(self.tokens[self.pos:self.pos + k]) + self.pos += k + remaining -= k + return chunks[0] if len(chunks) == 1 else torch.cat(chunks) + +class DistributedTokenLoader: + def __init__(self, pattern: str, rank: int, world_size: int, device: torch.device): + self.rank, self.world_size, self.device = rank, world_size, device + self.stream = TokenStream(pattern) + + def next_batch(self, global_tokens: int, seq_len: int, grad_accum_steps: int) -> tuple[Tensor, Tensor]: + local_tokens = global_tokens // (self.world_size * grad_accum_steps) + per_rank_span = local_tokens + 1 + chunk = self.stream.take(per_rank_span * self.world_size) + start = self.rank * per_rank_span + local = chunk[start:start + per_rank_span].pin_memory().to(self.device, non_blocking=True).to(torch.int64) + x = local[:-1].reshape(-1, seq_len) + y = local[1:].reshape(-1, seq_len) + return x, y + +class RMSNorm(nn.Module): + def __init__(self, eps: float | None = None): + super().__init__() + self.eps = eps + + def forward(self, x: Tensor) -> Tensor: + return F.rms_norm(x, (x.size(-1),), eps=self.eps) + +def apply_fp8_ste(w: Tensor) -> Tensor: + w_sim = w.to(torch.float8_e4m3fn).to(w.dtype) + return (w_sim - w).detach() + w + +class QATLinear(nn.Linear): + def forward(self, x: Tensor) -> Tensor: + w_qat = apply_fp8_ste(self.weight) + return F.linear(x, w_qat.to(x.dtype), self.bias.to(x.dtype) if self.bias is not None else None) + +class QATEmbedding(nn.Embedding): + def forward(self, input: Tensor) -> Tensor: + w_qat = apply_fp8_ste(self.weight) + return F.embedding(input, w_qat, self.padding_idx, self.max_norm, + self.norm_type, self.scale_grad_by_freq, self.sparse) + +class QuinaryLinear(nn.Linear): + def __init__(self, in_features, out_features, bias=False, group_size=64): + super().__init__(in_features, out_features, bias=bias) + self.group_size = group_size + num_groups = (in_features * out_features) // group_size + # Inert by design: the STE detach below blocks gradients to + # `scale_correction`. Kept as a fp32 buffer at value 1.0 for + # backwards-compatibility with the ternary-base state-dict layout. + # An attempt to fix the STE so this parameter receives gradients + # was tested 2026-05-01 (commit b9c…) and showed a small + # training-time regression with no TTT benefit, so reverted. + self.scale_correction = nn.Parameter(torch.ones(num_groups, dtype=torch.float32)) + + def forward(self, x: Tensor) -> Tensor: + w = self.weight.bfloat16() + g = self.group_size + w_g = w.reshape(-1, g) + scale = w_g.abs().mean(-1, keepdim=True).clamp(min=1e-8) * self.scale_correction.to(w.dtype).unsqueeze(-1) + q = (w_g / scale).round().clamp(-2, 2) + w_quantized = w + ((q * scale).reshape(w.shape) - w).detach() + return F.linear(x, w_quantized, + self.bias.to(x.dtype) if self.bias is not None else None) + + +class NormedQuinaryLinear(QuinaryLinear): + def forward(self, x: Tensor) -> Tensor: + return super().forward(F.rms_norm(x, (x.size(-1),))) + +def restore_low_dim_params_to_fp32(module: nn.Module) -> None: + with torch.no_grad(): + for name, param in module.named_parameters(): + if (param.ndim < 2 or any(p in name for p in CTP)) and param.dtype != torch.float32: + param.data = param.data.float() + +class Rotary(nn.Module): + def __init__(self, dim: int, base: float = 10000.0, no_cache: bool = False, + rope_type: str = "rope", yarn_max_len: int = 4096, train_seq_len: int = 1024): + super().__init__() + self.no_cache = no_cache + inv_freq = 1.0 / (base ** (torch.arange(0, dim, 2, dtype=torch.float32) / dim)) + if rope_type == "yarn": + scale = train_seq_len / yarn_max_len + freq_idx = torch.arange(0, dim, 2, dtype=torch.float32) + ramp = torch.clamp((freq_idx / dim - 0.25) / 0.75, 0.0, 1.0) + inv_freq = inv_freq / (ramp * (1.0 / scale - 1.0) + 1.0) + self.register_buffer("inv_freq", inv_freq, persistent=False) + self._seq_len_cached = 0 + self._cos_cached: Tensor | None = None + self._sin_cached: Tensor | None = None + + def forward(self, seq_len, device, dtype): + if self.no_cache: + t = torch.arange(seq_len, device=device, dtype=self.inv_freq.dtype) + freqs = torch.outer(t, self.inv_freq.to(device)) + return freqs.cos()[None, :, None, :].to(dtype=dtype), freqs.sin()[None, :, None, :].to(dtype=dtype) + if ( + self._cos_cached is None + or self._sin_cached is None + or self._seq_len_cached != seq_len + or self._cos_cached.device != device + ): + t = torch.arange(seq_len, device=device, dtype=self.inv_freq.dtype) + freqs = torch.outer(t, self.inv_freq.to(device)) + self._cos_cached = freqs.cos()[None, :, None, :] + self._sin_cached = freqs.sin()[None, :, None, :] + self._seq_len_cached = seq_len + return self._cos_cached.to(dtype=dtype), self._sin_cached.to(dtype=dtype) + +def apply_rotary_emb(x: Tensor, cos: Tensor, sin: Tensor) -> Tensor: + half = x.size(-1) // 2 + x1, x2 = x[..., :half], x[..., half:] + return torch.cat((x1 * cos + x2 * sin, x1 * (-sin) + x2 * cos), dim=-1) + +class CausalSelfAttention(nn.Module): + def __init__(self, dim, num_heads, num_kv_heads, rope_base, qk_gain_init, + group_size=64, no_cache=False, rope_type="rope", + yarn_max_len=4096, train_seq_len=1024): + super().__init__() + self.num_heads, self.num_kv_heads = num_heads, num_kv_heads + self.head_dim = dim // num_heads + self.q_size = self.num_heads * self.head_dim + self.kv_size = self.num_kv_heads * self.head_dim + + self.c_qkv = QuinaryLinear(dim, self.q_size + 2 * self.kv_size, bias=False, group_size=group_size) + self.proj = NormedQuinaryLinear(dim, dim, bias=False, group_size=group_size) + self.proj._zero_init = True + self.q_gain = nn.Parameter(torch.full((num_heads,), qk_gain_init, dtype=torch.float32)) + self.rotary = Rotary(self.head_dim, base=rope_base, no_cache=no_cache, + rope_type=rope_type, yarn_max_len=yarn_max_len, + train_seq_len=train_seq_len) + + def forward(self, x: Tensor) -> Tensor: + bsz, seqlen, dim = x.shape + qkv_out = self.c_qkv(x) + q_out, k_out, v_out = qkv_out.split([self.q_size, self.kv_size, self.kv_size], dim=-1) + q = q_out.reshape(bsz, seqlen, self.num_heads, self.head_dim) + k = k_out.reshape(bsz, seqlen, self.num_kv_heads, self.head_dim) + v = v_out.reshape(bsz, seqlen, self.num_kv_heads, self.head_dim) + q, k = F.rms_norm(q, (q.size(-1),)), F.rms_norm(k, (k.size(-1),)) + cos, sin = self.rotary(seqlen, x.device, q.dtype) + q, k = apply_rotary_emb(q, cos, sin), apply_rotary_emb(k, cos, sin) + q = q * self.q_gain.to(dtype=q.dtype)[None, None, :, None] + y = flash_attn_func(q.contiguous(), k.contiguous(), v.contiguous(), causal=True) + y = y.reshape(bsz, seqlen, dim) + return self.proj(y) + +class MLP(nn.Module): + def __init__(self, dim, mlp_mult, group_size=64, activation="swiglu"): + super().__init__() + hidden = mlp_mult * dim + self.activation = activation + if activation == "swiglu": + self.gate_up = QuinaryLinear(dim, hidden * 2, bias=False, group_size=group_size) + else: + self.fc = QuinaryLinear(dim, hidden, bias=False, group_size=group_size) + self.proj = NormedQuinaryLinear(hidden, dim, bias=False, group_size=group_size) + self.proj._zero_init = True + + def forward(self, x: Tensor) -> Tensor: + if self.activation == "swiglu": + gu = self.gate_up(x) + gate, up = gu.chunk(2, dim=-1) + return self.proj(F.silu(gate) * up) + elif self.activation == "relu": + return self.proj(torch.relu(self.fc(x))) + elif self.activation == "leaky_relu": + return self.proj(F.leaky_relu(self.fc(x), negative_slope=0.01)) + elif self.activation == "leaky_relu2": + return self.proj(F.leaky_relu(self.fc(x), negative_slope=0.5).square()) + else: # relu2 + return self.proj(torch.relu(self.fc(x)).square()) + +class Block(nn.Module): + def __init__(self, dim: int, num_heads: int, num_kv_heads: int, mlp_mult: int, + rope_base: float, qk_gain_init: float, group_size: int=64, + activation: str="swiglu", no_cache: bool=False, + rope_type: str="rope", yarn_max_len: int=4096, + train_seq_len: int=1024): + super().__init__() + self.attn_norm = RMSNorm() + self.mlp_norm = RMSNorm() + self.attn = CausalSelfAttention(dim, num_heads, num_kv_heads, rope_base, qk_gain_init, + group_size, no_cache, rope_type, yarn_max_len, train_seq_len) + self.mlp = MLP(dim, mlp_mult, group_size, activation) + self.attn_scale = nn.Parameter(torch.ones(dim, dtype=torch.float32)) + self.mlp_scale = nn.Parameter(torch.ones(dim, dtype=torch.float32)) + self.resid_mix = nn.Parameter(torch.stack((torch.ones(dim), torch.zeros(dim))).float()) + + def forward(self, x: Tensor, x0: Tensor) -> Tensor: + mix = self.resid_mix.to(dtype=x.dtype) + x_in = mix[0] * x + mix[1] * x0 + n = self.attn_norm(x_in) + x = x_in + self.attn_scale.to(dtype=x_in.dtype) * self.attn(n) + x = x + self.mlp_scale.to(dtype=x.dtype) * self.mlp(self.mlp_norm(x)) + return x + +class GPT(nn.Module): + def __init__(self, vocab_size, num_layers, model_dim, num_heads, num_kv_heads, mlp_mult, + tie_embeddings, tied_embed_init_std, logit_softcap, rope_base, qk_gain_init, + group_size: int = 64, activation: str = "swiglu", + embed_dim: int = 0, softcap_type: str="poly", no_cache: bool=False, + rope_type: str="rope", yarn_max_len: int=4096, train_seq_len: int=1024): + super().__init__() + self.tie_embeddings = tie_embeddings + self.logit_softcap = logit_softcap + self.softcap_type = softcap_type + self.embed_dim = embed_dim if embed_dim > 0 else model_dim + self.tok_emb = QATEmbedding(vocab_size, self.embed_dim) + self.embed_proj = QATLinear(self.embed_dim, model_dim, bias=False) if self.embed_dim != model_dim else None + self.embed_proj_rev = QATLinear(model_dim, self.embed_dim, bias=False) if self.embed_dim != model_dim else None + + self.blocks = nn.ModuleList([ + Block(model_dim, num_heads, num_kv_heads, mlp_mult, rope_base, qk_gain_init, + group_size, activation, no_cache, rope_type, yarn_max_len, train_seq_len) + for _ in range(num_layers) + ]) + + # U-Net split: first half encoder, second half decoder, decoder layers + # add a learned-weighted skip from the symmetric encoder layer. + self.num_encoder_layers = num_layers // 2 + self.num_decoder_layers = num_layers - self.num_encoder_layers + self.num_skip_weights = min(self.num_encoder_layers, self.num_decoder_layers) + self.skip_weights = nn.Parameter(torch.ones(self.num_skip_weights, model_dim, dtype=torch.float32)) + + self.final_norm = RMSNorm() + self.lm_head = QATLinear(model_dim, vocab_size, bias=False) + self.lm_head._zero_init = True + if tie_embeddings: + self.lm_head.weight.requires_grad_(False) + + self.vocab_bias = nn.Parameter(torch.zeros(vocab_size, dtype=torch.float32)) + self._init_weights(tied_embed_init_std) + + def _init_weights(self, tied_embed_init_std: float) -> None: + if self.tie_embeddings: + nn.init.normal_(self.tok_emb.weight, mean=0.0, std=tied_embed_init_std) + for module in self.modules(): + if isinstance(module, QuinaryLinear) and not getattr(module, "_zero_init", False): + nn.init.normal_(module.weight, mean=0.0, std=0.02) + elif isinstance(module, nn.Linear) and getattr(module, "_zero_init", False): + nn.init.zeros_(module.weight) + + def _compute_logits(self, x: Tensor) -> Tensor: + if self.tie_embeddings: + proj = self.embed_proj_rev(x) if self.embed_proj_rev is not None else x + logits_raw = F.linear(proj, self.tok_emb.weight.to(x.dtype)) + else: + logits_raw = self.lm_head(x) + return logits_raw + self.vocab_bias.to(x.dtype) + + def _softcap(self, logits: Tensor) -> Tensor: + s = self.logit_softcap + if self.softcap_type == "tanh": + return s * torch.tanh(logits / s) + x_sc = torch.clamp(logits / s, -2.0, 2.0) + x2 = x_sc * x_sc + return s * torch.clamp(x_sc * (1.0 - x2 / 3.0 + x2 * x2 / 15.0), -1.0, 1.0) + + def forward(self, input_ids: Tensor, target_ids: Tensor, reduction: str = "mean") -> Tensor: + x = self.tok_emb(input_ids).float() + if self.embed_proj is not None: + x = self.embed_proj(x) + x = F.rms_norm(x, (x.size(-1),)) + x0 = x + + # U-Net encoder/decoder with skip connections. + skips = [] + for vi in range(self.num_encoder_layers): + x = self.blocks[vi](x, x0) + skips.append(x) + for vi in range(self.num_decoder_layers): + if vi < self.num_skip_weights: + x = x + self.skip_weights[vi].to(dtype=x.dtype) * skips.pop() + x = self.blocks[self.num_encoder_layers + vi](x, x0) + + x_normed = self.final_norm(x) + x_flat = x_normed.reshape(-1, x_normed.size(-1)) + targets = target_ids.reshape(-1) + logits = self._softcap(self._compute_logits(x_flat)) + + if reduction == "none": + return F.cross_entropy(logits.float(), targets, reduction="none").reshape(input_ids.shape) + + # Fused CE: single logsumexp; z-loss only during training (not eval BPB). + logits_f = logits.float() + lse = torch.logsumexp(logits_f, dim=-1) + target_logits = logits_f.gather(1, targets.unsqueeze(1)).squeeze(1) + main_loss = (lse - target_logits).mean() + if self.training: + main_loss = main_loss + 1e-4 * (lse ** 2).mean() + return main_loss + + +def build_luts(sp, vocab_size: int, device: torch.device): + sp_vocab_size = int(sp.vocab_size()) + table_size = max(sp_vocab_size, vocab_size) + base_bytes_np = np.zeros((table_size,), dtype=np.int16) + has_leading_space_np = np.zeros((table_size,), dtype=np.bool_) + is_boundary_token_np = np.ones((table_size,), dtype=np.bool_) + for token_id in range(sp_vocab_size): + if sp.is_control(token_id) or sp.is_unknown(token_id) or sp.is_unused(token_id): + continue + is_boundary_token_np[token_id] = False + if sp.is_byte(token_id): + base_bytes_np[token_id] = 1 + continue + piece = sp.id_to_piece(token_id) + if piece.startswith("\u2581"): + has_leading_space_np[token_id] = True + piece = piece[1:] + base_bytes_np[token_id] = len(piece.encode("utf-8")) + return ( + torch.tensor(base_bytes_np, dtype=torch.int16, device=device), + torch.tensor(has_leading_space_np, dtype=torch.bool, device=device), + torch.tensor(is_boundary_token_np, dtype=torch.bool, device=device), + ) + +def ld_val(pattern, seq_len, max_tok=int(os.environ.get("VAL_MAX_TOKENS", 0))): + files = sorted(glob.glob(pattern)) + assert files, f"No files: {pattern}" + tok = torch.cat([ld_shard(Path(p)) for p in files]).contiguous() + if max_tok > 0: tok = tok[:max_tok + 1] + u = ((tok.numel() - 1) // seq_len) * seq_len + return tok[:u + 1] + + +def eval_val(args, model, rank, world_size, device, grad_accum_steps, val_tokens, + base_bytes_lut, has_leading_space_lut, is_boundary_token_lut): + local_batch_tokens = args.val_batch_size // (world_size * grad_accum_steps) + local_batch_seqs = max(1, local_batch_tokens // args.train_seq_len) + total_seqs = (val_tokens.numel() - 1) // args.train_seq_len + seq_start = (total_seqs * rank) // world_size + seq_end = (total_seqs * (rank + 1)) // world_size + loss_sum = torch.zeros((), device=device, dtype=torch.float64) + token_count = torch.zeros((), device=device, dtype=torch.float64) + byte_count = torch.zeros((), device=device, dtype=torch.float64) + model.eval() + with torch.no_grad(): + for batch_start in range(seq_start, seq_end, local_batch_seqs): + batch_end = min(batch_start + local_batch_seqs, seq_end) + raw_start = batch_start * args.train_seq_len + raw_end = batch_end * args.train_seq_len + 1 + local = val_tokens[raw_start:raw_end].to(device=device, dtype=torch.int64) + x, y = local[:-1].reshape(-1, args.train_seq_len), local[1:].reshape(-1, args.train_seq_len) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + batch_loss = model(x, y).detach() + n = float(y.numel()) + loss_sum += batch_loss.to(torch.float64) * n + token_count += n + prev_ids, tgt_ids = x.reshape(-1), y.reshape(-1) + tok_bytes = base_bytes_lut[tgt_ids].to(torch.int16) + tok_bytes += (has_leading_space_lut[tgt_ids] & ~is_boundary_token_lut[prev_ids]).to(torch.int16) + byte_count += tok_bytes.to(torch.float64).sum() + if dist.is_available() and dist.is_initialized(): + for t in (loss_sum, token_count, byte_count): + dist.all_reduce(t, op=dist.ReduceOp.SUM) + val_loss = loss_sum / token_count + bpb = (val_loss.item() / math.log(2.0)) * (token_count.item() / byte_count.item()) + model.train() + return float(val_loss.item()), float(bpb), int(token_count.item()), int(byte_count.item()) + +def main() -> None: + args = Hyperparameters() + code = Path(__file__).read_text(encoding="utf-8") + + if args.matrix_optimizer != "adamw": + global ns_orth + ns_orth = torch.compile(ns_orth) + + distributed = "RANK" in os.environ and "WORLD_SIZE" in os.environ + rank = int(os.environ.get("RANK", "0")) + world_size = int(os.environ.get("WORLD_SIZE", "1")) + local_rank = int(os.environ.get("LOCAL_RANK", "0")) + grad_accum_steps = max(1, 8 // world_size) + grad_scale = 1.0 / grad_accum_steps + + if not torch.cuda.is_available(): + raise RuntimeError("CUDA is required") + device = torch.device("cuda", local_rank) + torch.cuda.set_device(device) + if distributed: + dist.init_process_group(backend="nccl", device_id=device) + dist.barrier() + master_process = rank == 0 + torch.backends.cuda.matmul.allow_tf32 = True + torch.backends.cudnn.allow_tf32 = True + + os.makedirs("logs/cuda/", exist_ok=True) + logfile = f"logs/cuda/{args.run_id}.txt" if master_process else None + if master_process: + print(logfile) + # Truncate any pre-existing logfile from a previous run with the same + # RUN_ID so the per-RUN_ID log file isn't append-stacked across reruns. + if logfile: + open(logfile, "w", encoding="utf-8").close() + + def log0(msg: str, console: bool = True) -> None: + if not master_process: + return + if console: + print(msg) + if logfile: + with open(logfile, "a", encoding="utf-8") as f: + print(msg, file=f) + + log0(code, console=False) + log0("=" * 100, console=False) + + log0(f"Python {sys.version}", console=False) + log0(f"PyTorch {torch.__version__}", console=False) + + random.seed(args.seed) + np.random.seed(args.seed) + torch.manual_seed(args.seed) + torch.cuda.manual_seed_all(args.seed) + + sp = spm.SentencePieceProcessor(model_file=args.tokenizer_path) + # Guard against an obvious tokenizer/vocab-size mismatch that would + # silently produce a wrong byte-count LUT (and therefore a wrong BPB + # denominator) without crashing. + assert sp.vocab_size() == args.vocab_size, ( + f"tokenizer vocab_size ({sp.vocab_size()}) != args.vocab_size " + f"({args.vocab_size}); check TOKENIZER_PATH / VOCAB_SIZE") + val_tokens = ld_val(args.val_files, args.train_seq_len) + base_bytes_lut, has_leading_space_lut, is_boundary_token_lut = build_luts( + sp, args.vocab_size, device) + + # --- Model --- + base_model = GPT( + vocab_size=args.vocab_size, num_layers=args.num_layers, model_dim=args.model_dim, + num_heads=args.num_heads, num_kv_heads=args.num_kv_heads, mlp_mult=args.mlp_mult, + tie_embeddings=args.tie_embeddings, tied_embed_init_std=args.tied_embed_init_std, + logit_softcap=args.logit_softcap, rope_base=args.rope_base, qk_gain_init=args.qk_gain_init, + group_size=args.bitnet_group_size, activation=args.activation_type, + embed_dim=args.embed_dim, + softcap_type=args.softcap_type, no_cache=(args.compile_mode == "reduce-overhead"), + rope_type=args.rope_type, yarn_max_len=args.yarn_max_len, train_seq_len=args.train_seq_len, + ).to(device).bfloat16() + + for module in base_model.modules(): + if isinstance(module, nn.Linear): + module.float() + restore_low_dim_params_to_fp32(base_model) + if args.tie_embeddings: + base_model.lm_head.weight.requires_grad_(False) + + torch._dynamo.config.optimize_ddp = False + + compiled_model = torch.compile(base_model, mode=args.compile_mode if args.compile_mode != "default" else None) + use_find_unused = not args.tie_embeddings + model = DDP(compiled_model, device_ids=[local_rank], broadcast_buffers=False, + find_unused_parameters=use_find_unused, + static_graph=not use_find_unused, + gradient_as_bucket_view=True) if distributed else compiled_model + + # --- Optimizers --- + _excl = {"tok_emb.weight", "lm_head.weight"} + all_other_params = [(n, p) for n, p in base_model.named_parameters() + if not any(eh in n for eh in _excl)] + matrix_params = [p for n, p in all_other_params + if p.ndim == 2 and not any(pat in n for pat in CTP)] + scalar_params = [p for n, p in all_other_params + if p.ndim < 2 or any(pat in n for pat in CTP)] + + token_lr = args.tied_embed_lr if args.tie_embeddings else args.embed_lr + opt_tok = torch.optim.Adam( + [{"params": [base_model.tok_emb.weight], "lr": token_lr, "base_lr": token_lr}], + betas=(args.beta1, args.beta2), eps=args.adam_eps, fused=True) + if args.matrix_optimizer == "adamw": + opt_muon = torch.optim.AdamW( + [{"params": matrix_params, "lr": args.adam_lr, "base_lr": args.adam_lr}], + betas=(args.beta1, args.beta2), eps=args.adam_eps, weight_decay=args.adam_wd, fused=True) + else: + opt_muon = Muon(matrix_params, lr=args.matrix_lr, momentum=args.muon_momentum, + backend_steps=args.muon_backend_steps, wd=args.muon_wd) + for g in opt_muon.param_groups: + g["base_lr"] = args.matrix_lr + opt_scalar = torch.optim.Adam( + [{"params": scalar_params, "lr": args.scalar_lr, "base_lr": args.scalar_lr}], + betas=(args.beta1, args.beta2), eps=args.adam_eps, fused=True) + opt_head = torch.optim.Adam( + [{"params": [base_model.lm_head.weight], "lr": 0.0, "base_lr": 0.0}], + betas=(args.beta1, args.beta2), eps=args.adam_eps, fused=True) + + optimizers = [opt_tok, opt_muon, opt_scalar, opt_head] + + # --- Log all hyperparameters --- + log0("--- Hyperparameters ---", console=False) + log0(" ".join(f"{a}={getattr(args,a)}" for a in sorted(dir(args)) if not a.startswith("_") and a not in ("train_files","val_files") and not callable(getattr(args,a))), console=False) + n_params = sum(p.numel() for p in base_model.parameters()) + log0(f"params:{n_params} L:{args.num_layers} d:{args.model_dim} h:{args.num_heads} kv:{args.num_kv_heads} ws:{world_size} ga:{grad_accum_steps} s:{args.seed}") + + # --- Data loader & helpers --- + train_loader = DistributedTokenLoader(args.train_files, rank, world_size, device) + + def zero_grad_all(): + for opt in optimizers: + opt.zero_grad(set_to_none=True) + + max_wallclock_ms = 1000.0 * args.max_wallclock_seconds if args.max_wallclock_seconds > 0 else None + + def lr_mul(step: int, elapsed_ms: float): + if args.warmdown_fraction <= 0: + return 1.0 + if max_wallclock_ms is None: + warmdown_start = int(args.iterations * (1.0 - args.warmdown_fraction)) + ratio = max((args.iterations - step) / max(args.iterations * args.warmdown_fraction, 1), 0.0) if step >= warmdown_start else 1.0 + else: + warmdown_ms = max_wallclock_ms * args.warmdown_fraction + remaining_ms = max(max_wallclock_ms - elapsed_ms, 0.0) + ratio = remaining_ms / max(warmdown_ms, 1e-9) if remaining_ms <= warmdown_ms else 1.0 + return max(ratio, args.min_lr) + + _seq_switched = False + _batch_switched = False + active_seq_len = args.seq_len_start if args.seq_len_start > 0 else args.train_seq_len + active_batch_tokens = args.batch_tokens_start if args.batch_tokens_start > 0 else args.train_batch_tokens + + if args.skip_training: + log0("skip_training=1, using existing artifact final_model.quinary.ptz") + + # --- Compiler warmup --- + if args.warmup_steps > 0 and not args.skip_training: + _ms = {n: t.detach().cpu().clone() for n, t in base_model.state_dict().items()} + _os = [copy.deepcopy(o.state_dict()) for o in optimizers] + model.train() + for ws in range(args.warmup_steps): + zero_grad_all() + for mi in range(grad_accum_steps): + if distributed: model.require_backward_grad_sync = mi == grad_accum_steps - 1 + x, y = train_loader.next_batch(active_batch_tokens, active_seq_len, grad_accum_steps) + torch.compiler.cudagraph_mark_step_begin() + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): loss = model(x, y) + (loss * grad_scale).backward() + for o in optimizers: o.step() + zero_grad_all() + log0(f"warmup:{ws+1}/{args.warmup_steps}") + base_model.load_state_dict(_ms, strict=True) + for o, s in zip(optimizers, _os): o.load_state_dict(s) + zero_grad_all() + train_loader = DistributedTokenLoader(args.train_files, rank, world_size, device) + + # --- Main training loop --- + training_time_ms = 0.0 + stop_after_step: int | None = None + train_loss = torch.zeros((), device=device) + torch.cuda.synchronize() + t0 = time.perf_counter() + step = 0 + + while not args.skip_training: + last_step = step == args.iterations or (stop_after_step is not None and step >= stop_after_step) + + if last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + training_time_ms += 1000.0 * (time.perf_counter() - t0) + val_loss, val_bpb, val_tok_count, val_byte_count = eval_val( + args, model, rank, world_size, device, grad_accum_steps, + val_tokens, base_bytes_lut, has_leading_space_lut, is_boundary_token_lut) + tstats = quin_stats(base_model, group_size=args.bitnet_group_size) + log0(f"step:{step}/{args.iterations} val_loss:{val_loss:.4f} val_bpb:{val_bpb:.4f} " + f"train_time:{training_time_ms:.0f}ms zero_frac:{tstats['zero_frac']:.3f} " + f"eval_tokens:{val_tok_count} eval_bytes:{val_byte_count}") + torch.cuda.synchronize() + t0 = time.perf_counter() + + if last_step: + if stop_after_step is not None and step < args.iterations: + log0(f"stopping_early: wallclock_cap train_time:{training_time_ms:.0f}ms step:{step}/{args.iterations}") + break + + elapsed_ms = training_time_ms + 1000.0 * (time.perf_counter() - t0) + scale = lr_mul(step, elapsed_ms) + + # Sequence length schedule + if args.seq_len_start > 0 and not _seq_switched: + if max_wallclock_ms is not None: + should_switch_seq = elapsed_ms >= args.seq_schedule_fraction * max_wallclock_ms + else: + should_switch_seq = step >= int(args.iterations * args.seq_schedule_fraction) + if should_switch_seq: + active_seq_len = args.train_seq_len + _seq_switched = True + torch._dynamo.reset() + train_loader = DistributedTokenLoader(args.train_files, rank, world_size, device) + log0(f"step:{step} seq_len_switch:{args.seq_len_start}->{active_seq_len}") + + # Batch size schedule + if args.batch_tokens_start > 0 and not _batch_switched: + if max_wallclock_ms is not None: + should_switch_batch = elapsed_ms >= args.batch_schedule_fraction * max_wallclock_ms + else: + should_switch_batch = step >= int(args.iterations * args.batch_schedule_fraction) + if should_switch_batch: + active_batch_tokens = args.train_batch_tokens + _batch_switched = True + log0(f"step:{step} batch_switch:{args.batch_tokens_start}->{active_batch_tokens}") + + zero_grad_all() + train_loss.zero_() + + for micro in range(grad_accum_steps): + if distributed: + model.require_backward_grad_sync = micro == grad_accum_steps - 1 + x, y = train_loader.next_batch(active_batch_tokens, active_seq_len, grad_accum_steps) + torch.compiler.cudagraph_mark_step_begin() + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + loss = model(x, y) + train_loss.add_(loss.detach()) + (loss * grad_scale).backward() + train_loss /= grad_accum_steps + + # Muon momentum warmup (skip when AdamW is the matrix optimizer) + if args.matrix_optimizer != "adamw": + frac = min(step / args.muon_momentum_warmup_steps, 1.0) if args.muon_momentum_warmup_steps > 0 else 1.0 + for g in opt_muon.param_groups: + g["momentum"] = (1 - frac) * args.muon_momentum_warmup_start + frac * args.muon_momentum + + # LR scheduling + for opt in optimizers: + for g in opt.param_groups: + g["lr"] = g["base_lr"] * scale + opt.step() + zero_grad_all() + step += 1 + approx_ms = training_time_ms + 1000.0 * (time.perf_counter() - t0) + + if args.train_log_every > 0 and step % args.train_log_every == 0: + log0(f"step:{step}/{args.iterations} loss:{train_loss.item():.4f} t:{approx_ms:.0f}ms avg:{approx_ms/step:.1f}ms") + if args.churn_log_every > 0 and step % args.churn_log_every == 0: + log0(f"step:{step} churn:{churn_fn(base_model, args.bitnet_group_size):.4f} zero:{quin_stats(base_model, args.bitnet_group_size)['zero_frac']:.3f}") + + # Wallclock cap sync + if stop_after_step is None and max_wallclock_ms is not None and step % 10 == 0: + reached_cap = approx_ms >= max_wallclock_ms + if distributed: + cap_t = torch.tensor(int(reached_cap), device=device) + dist.all_reduce(cap_t, op=dist.ReduceOp.MAX) + reached_cap = bool(cap_t.item()) + if reached_cap: + stop_after_step = step + + # --- Serialization --- + if master_process and not args.skip_training: + sd = base_model.state_dict() + if base_model.tie_embeddings: + sd.pop("lm_head.weight", None) + + log0("serialize: start") + t_qsd = time.perf_counter() + q_obj, q_stats = q_sd(sd, group_size=args.bitnet_group_size, + scale_quant_bits=args.scale_quant_bits) + qsd_time = time.perf_counter() - t_qsd + log0(f"serialize: q_sd in {qsd_time:.1f}s " + f"(quinary={q_stats['quinary_params']/1e6:.2f}M params/{q_stats['quinary_bytes']/1e6:.2f}MB, " + f"fp={q_stats['fp_params']/1e6:.2f}M params/{q_stats['fp_bytes']/1e6:.2f}MB)") + + t_save = time.perf_counter() + buf = io.BytesIO() + torch.save(q_obj, buf) + raw_bytes = len(buf.getvalue()) + log0(f"serialize: torch.save raw={raw_bytes/1e6:.2f}MB in {time.perf_counter()-t_save:.1f}s") + + t_compress = time.perf_counter() + final_blob, per_stream_audit = _serialize_per_stream_v2(q_obj, level=9) + compress_time = time.perf_counter() - t_compress + artifact_bytes = len(final_blob) + log0(per_stream_audit) + log0(f"serialize: per_stream_v2 {raw_bytes/1e6:.2f}MB -> {artifact_bytes/1e6:.2f}MB " + f"(ratio {artifact_bytes/raw_bytes:.1%}, saved {(raw_bytes-artifact_bytes)/1e6:.2f}MB) " + f"in {compress_time:.1f}s") + + with open("final_model.quinary.ptz", "wb") as f: + f.write(final_blob) + + code_bytes = len(code.encode("utf-8")) + + total = artifact_bytes + code_bytes + log0(f"artifact:{artifact_bytes/1e6:.2f}MB quinary:{q_stats['quinary_params']}({q_stats['quinary_bytes']}B) fp:{q_stats['fp_params']}({q_stats['fp_bytes']}B) code:{code_bytes}") + log0(f"budget:{total}/{16000000} ({total/1e6:.2f}/{16.00:.2f}MB) {'FITS' if total <= 16000000 else 'OVER'}") + + # --- All ranks load roundtrip weights and evaluate --- + if distributed: + dist.barrier() + + with open("final_model.quinary.ptz", "rb") as f: + loaded = _load_artifact(f.read()) + missing, unexpected = base_model.load_state_dict(deq_sd(loaded), strict=False) + # The only "missing" key we expect is `lm_head.weight` when tied, since + # q_sd drops it from the saved state-dict. Any other missing/unexpected + # key is a serialization-roundtrip bug and should fail loudly. + expected_missing = {"lm_head.weight"} if base_model.tie_embeddings else set() + assert set(missing) <= expected_missing, f"unexpected missing keys after artifact load: {set(missing) - expected_missing}" + assert not unexpected, f"unexpected extra keys after artifact load: {unexpected}" + torch._dynamo.reset() + + q_val_loss, q_val_bpb, q_tok_count, q_byte_count = eval_val( + args, model, rank, world_size, device, grad_accum_steps, + val_tokens, base_bytes_lut, has_leading_space_lut, is_boundary_token_lut) + log0(f"final_quinary_roundtrip val_loss:{q_val_loss:.4f} val_bpb:{q_val_bpb:.4f} " + f"eval_tokens:{q_tok_count} eval_bytes:{q_byte_count}") + + # --- Score-first chunk-based CTP TTT --- + if args.ttt_steps > 0: + torch.cuda.synchronize() + t_ttt = time.perf_counter() + seq_len = args.train_seq_len + # TTT_STRIDE: sliding-window step used to slice the val stream into TTT + # examples. Hardcoded at 16 (canonical sp16384 quinary submission); a + # smaller stride yields more overlapping windows but more compute. + stride = 16 + ttt_chunk_tokens = args.ttt_tokens if args.ttt_tokens > 0 else 32768 + ttt_epochs = args.ttt_steps + batch_seqs = 32 + total_tokens = val_tokens.numel() - 1 + + # Select TTT params (CTP only — quinary weights are frozen) + for p in base_model.parameters(): + p.requires_grad_(False) + ttt_params = [] + for name, p in base_model.named_parameters(): + if any(pat in name for pat in CTP): + p.requires_grad_(True) + ttt_params.append(p) + n_ttt = sum(p.numel() for p in ttt_params) + ttt_opt = torch.optim.SGD(ttt_params, lr=args.ttt_lr, momentum=0.9) + for pg in ttt_opt.param_groups: + pg["initial_lr"] = pg["lr"] + log0(f"ttt: {n_ttt} CTP params, lr={args.ttt_lr}") + + # Assign sliding windows to chunks + context_size = seq_len - stride + window_starts = [ws for ws in range(0, total_tokens, stride) + if ws + context_size < total_tokens] + num_chunks = (total_tokens + ttt_chunk_tokens - 1) // ttt_chunk_tokens + chunk_windows = [[] for _ in range(num_chunks)] + for ws in window_starts: + s = 0 if ws == 0 else context_size + ci = min((ws + s) // ttt_chunk_tokens, num_chunks - 1) + chunk_windows[ci].append(ws) + log0(f"ttt: {n_ttt} CTP params, {num_chunks} chunks, {ttt_epochs} epochs, lr={args.ttt_lr}") + + loss_sum = torch.zeros((), device=device, dtype=torch.float64) + token_count = torch.zeros((), device=device, dtype=torch.float64) + byte_count = torch.zeros((), device=device, dtype=torch.float64) + + # Recompile for TTT (find_unused_parameters needed with frozen params) + torch._dynamo.reset() + compiled_ttt = torch.compile(base_model, mode=args.compile_mode if args.compile_mode != "default" else None) + + for ci in range(num_chunks): + windows = chunk_windows[ci] + if not windows: + continue + chunk_start = ci * ttt_chunk_tokens + chunk_end = min((ci + 1) * ttt_chunk_tokens, total_tokens) + my_s = len(windows) * rank // world_size + my_e = len(windows) * (rank + 1) // world_size + my_windows = windows[my_s:my_e] + + # Phase 1: SCORE (no_grad, compiled) + base_model.eval() + with torch.no_grad(): + for bi in range(0, len(my_windows), batch_seqs): + batch_ws = my_windows[bi:bi + batch_seqs] + bsz = len(batch_ws) + x_batch = torch.zeros(bsz, seq_len, dtype=torch.int64, device=device) + y_batch = torch.zeros(bsz, seq_len, dtype=torch.int64, device=device) + wlens = [] + for i, ws in enumerate(batch_ws): + we = min(ws + seq_len, total_tokens) + wlen = we - ws + wlens.append(wlen) + chunk_tok = val_tokens[ws:we + 1].to(dtype=torch.int64, device=device) + x_batch[i, :wlen] = chunk_tok[:-1] + y_batch[i, :wlen] = chunk_tok[1:] + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + nll = compiled_ttt(x_batch, y_batch, reduction="none").detach() + for i, ws in enumerate(batch_ws): + wlen = wlens[i] + s = 0 if ws == 0 else context_size + scored_nll = nll[i, s:wlen].to(torch.float64) + loss_sum += scored_nll.sum() + token_count += float(wlen - s) + tgt, prev = y_batch[i, s:wlen], x_batch[i, s:wlen] + tb = base_bytes_lut[tgt].to(torch.float64) + tb += (has_leading_space_lut[tgt] & ~is_boundary_token_lut[prev]).to(torch.float64) + byte_count += tb.sum() + + # Phase 2: TRAIN on chunk (score-first: already scored) + if ci < num_chunks - 1 and ttt_epochs > 0: + base_model.train() + chunk_seqs = (chunk_end - chunk_start) // seq_len + if chunk_seqs > 0: + cos_mul = 0.5 * (1.0 + math.cos(math.pi * ci / max(num_chunks - 1, 1))) + for pg in ttt_opt.param_groups: + pg["lr"] = pg.get("initial_lr", pg["lr"]) * cos_mul + my_seq_s = chunk_seqs * rank // world_size + my_seq_e = chunk_seqs * (rank + 1) // world_size + for _ep in range(ttt_epochs): + for bs in range(0, my_seq_e - my_seq_s, batch_seqs): + be = min(bs + batch_seqs, my_seq_e - my_seq_s) + start_tok = chunk_start + (my_seq_s + bs) * seq_len + end_tok = chunk_start + (my_seq_s + be) * seq_len + 1 + if end_tok > val_tokens.numel(): + continue + local = val_tokens[start_tok:end_tok].to(device=device, dtype=torch.int64) + x = local[:-1].reshape(-1, seq_len) + y = local[1:].reshape(-1, seq_len) + ttt_opt.zero_grad(set_to_none=True) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + loss = compiled_ttt(x, y) + loss.backward() + if distributed: + for p in ttt_params: + if p.grad is not None: + dist.all_reduce(p.grad, op=dist.ReduceOp.AVG) + torch.nn.utils.clip_grad_norm_(ttt_params, 1.0) + ttt_opt.step() + + if master_process and ci % max(1, num_chunks // 5) == 0: + log0(f"ttt chunk:{ci+1}/{num_chunks}") + + if distributed: + for t in (loss_sum, token_count, byte_count): + dist.all_reduce(t, op=dist.ReduceOp.SUM) + ttt_val_loss = (loss_sum / token_count).item() + ttt_bpb = (ttt_val_loss / math.log(2.0)) * (token_count.item() / byte_count.item()) + for p in base_model.parameters(): + p.requires_grad_(True) + torch._dynamo.reset() + torch.cuda.synchronize() + ttt_time_ms = 1000.0 * (time.perf_counter() - t_ttt) + log0(f"ttt_eval val_loss:{ttt_val_loss:.4f} val_bpb:{ttt_bpb:.4f} " + f"time:{ttt_time_ms:.0f}ms " + f"eval_tokens:{int(token_count.item())} eval_bytes:{int(byte_count.item())}") + + if distributed: + dist.destroy_process_group() + +if __name__ == "__main__": + main() +==================================================================================================== +Python 3.13.13 | packaged by Anaconda, Inc. | (main, Apr 14 2026, 06:19:41) [GCC 14.3.0] +PyTorch 2.10.0+cu128 +--- Hyperparameters --- +activation_type=relu2 adam_eps=1e-08 adam_lr=0.05 adam_wd=0.05 batch_schedule_fraction=0.33 batch_tokens_start=0 beta1=0.9 beta2=0.95 bitnet_group_size=192 churn_log_every=0 compile_mode=default data_path=./data/canonical/datasets/fineweb10B_sp16384 embed_dim=380 embed_lr=0.6 fp_storage=True grad_clip_norm=0.0 head_lr=0.02 iterations=10000 logit_softcap=10.0 matrix_lr=0.035 matrix_optimizer=muon max_wallclock_seconds=599.0 min_lr=0.0 mlp_mult=4 model_dim=576 muon_backend_steps=3 muon_momentum=0.95 muon_momentum_warmup_start=0.85 muon_momentum_warmup_steps=500 muon_wd=0.0 num_heads=6 num_kv_heads=3 num_layers=10 qk_gain_init=5.0 rope_base=5000.0 rope_type=yarn run_id=quinary_seed1337 scalar_lr=0.02 scale_quant_bits=5 seed=1337 seq_len_start=0 seq_schedule_fraction=0.0 skip_training=False softcap_type=poly tie_embeddings=1 tied_embed_init_std=0.005 tied_embed_lr=0.02 tokenizer_path=./data/canonical/tokenizers/fineweb_16384_bpe.model train_batch_tokens=524288 train_log_every=1000 train_seq_len=1024 ttt_lr=0.005 ttt_steps=3 ttt_tokens=32768 val_batch_size=524288 val_loss_every=0 vocab_size=16384 warmdown_fraction=0.2 warmup_steps=5 yarn_max_len=2048 +params:52828668 L:10 d:576 h:6 kv:3 ws:8 ga:1 s:1337 +warmup:1/5 +warmup:2/5 +warmup:3/5 +warmup:4/5 +warmup:5/5 +step:1000/10000 loss:3.8318 t:77272ms avg:77.3ms +step:2000/10000 loss:3.5168 t:154418ms avg:77.2ms +step:3000/10000 loss:3.4458 t:231363ms avg:77.1ms +step:4000/10000 loss:3.2941 t:308098ms avg:77.0ms +step:5000/10000 loss:3.5063 t:384842ms avg:77.0ms +step:6000/10000 loss:3.5410 t:461611ms avg:76.9ms +step:7000/10000 loss:3.4814 t:538278ms avg:76.9ms +step:7800/10000 val_loss:3.2719 val_bpb:1.1606 train_time:599689ms zero_frac:0.261 eval_tokens:37146624 eval_bytes:151078879 +stopping_early: wallclock_cap train_time:599689ms step:7800/10000 +serialize: start +serialize: q_sd in 0.1s (quinary=36.50M params/12.36MB, fp=6.90M params/7.11MB) +serialize: torch.save raw=19.53MB in 0.0s +per_stream_v2: meta=0.14MB, 63 bulk payloads (lzma=62, lrzip=1; raw=3, base5=1, base5_T=37, bitmask_T=22), qkv_splits=10, total=15.64MB +serialize: per_stream_v2 19.53MB -> 15.64MB (ratio 80.1%, saved 3.89MB) in 35.7s +artifact:15.64MB quinary:36495360(12355360B) fp:6896124(7114168B) code:79272 +budget:15721124/16000000 (15.72/16.00MB) FITS +final_quinary_roundtrip val_loss:3.2795 val_bpb:1.1633 eval_tokens:37146624 eval_bytes:151078879 +ttt: 42364 CTP params, lr=0.005 +ttt: 42364 CTP params, 1134 chunks, 3 epochs, lr=0.005 +ttt chunk:1/1134 +ttt chunk:227/1134 +ttt chunk:453/1134 +ttt chunk:679/1134 +ttt chunk:905/1134 +ttt chunk:1131/1134 +ttt_eval val_loss:3.2120 val_bpb:1.1394 time:216087ms eval_tokens:37146624 eval_bytes:151078879 diff --git a/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/quinary_seed42.txt b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/quinary_seed42.txt new file mode 100644 index 0000000000..18bc16852a --- /dev/null +++ b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/quinary_seed42.txt @@ -0,0 +1,1689 @@ +"Quinary training script for OpenAI's Parameter Golf Challenge. Based on ternary submission by Ciprian-Florin Ifrim (24 March 2026). Quinary: {-2,-1,0,+1,+2} with base-5 packing (3 quins/byte = 2.667 bpw vs ternary 1.6 bpw)." + +import copy +import glob +import io +import math +import os +import random +import subprocess +import sys +import tempfile +import time +import lzma +from pathlib import Path +import numpy as np +import sentencepiece as spm +import torch +import torch.distributed as dist +import torch.nn.functional as F +from torch import Tensor, nn +from torch.nn.parallel import DistributedDataParallel as DDP +from flash_attn_interface import flash_attn_func + +def _e(k, d, t=str): + v = os.environ.get(k, str(d)) + if t == bool: return bool(int(v)) + return t(v) + +class Hyperparameters: + # Defaults below match the canonical SP16384 quinary submission config — + # i.e. exactly what `run.sh` passes through to torchrun. A bare + # `torchrun --standalone --nproc_per_node=8 train_gpt.py` (no env vars) + # therefore reproduces the submission. `run.sh` is still the + # documentation-of-record for the canonical config; these defaults are + # mirrored from it so the two never silently disagree. + data_path = _e("DATA_PATH", "./data/canonical/datasets/fineweb10B_sp16384") + # [0-9] prefix matches only NNNNNN-suffixed shard files, ignoring any + # sibling parallel-array files that might share the prefix. + train_files = os.path.join(data_path, "fineweb_train_[0-9]*.bin") + val_files = os.path.join(data_path, "fineweb_val_[0-9]*.bin") + tokenizer_path = _e("TOKENIZER_PATH", "./data/canonical/tokenizers/fineweb_16384_bpe.model") + run_id = os.environ.get("RUN_ID", f"run_{int(time.time())}") + seed = _e("SEED", 42, int) + compile_mode = _e("COMPILE_MODE", "default") + val_batch_size = _e("VAL_BATCH_SIZE", 524288, int) + val_loss_every = _e("VAL_LOSS_EVERY", 0, int) # 0 = no in-training val + train_log_every = _e("TRAIN_LOG_EVERY", 1000, int) + iterations = _e("ITERATIONS", 10000, int) + warmdown_fraction = _e("WARMDOWN_FRACTION", 0.2, float) + min_lr = _e("MIN_LR", 0.0, float) # floor on the LR multiplier (fraction of base LR) + warmup_steps = _e("WARMUP_STEPS", 5, int) + train_batch_tokens = _e("TRAIN_BATCH_TOKENS", 524288, int) + train_seq_len = _e("TRAIN_SEQ_LEN", 1024, int) + max_wallclock_seconds = _e("MAX_WALLCLOCK_SECONDS", 599.0, float) + vocab_size = _e("VOCAB_SIZE", 16384, int) + num_layers = _e("NUM_LAYERS", 10, int) + num_kv_heads = _e("NUM_KV_HEADS", 3, int) + model_dim = _e("MODEL_DIM", 576, int) + num_heads = _e("NUM_HEADS", 6, int) + mlp_mult = _e("MLP_MULT", 4, int) + tie_embeddings = _e("TIE_EMBEDDINGS", 1, int) + rope_base = _e("ROPE_BASE", 5000.0, float) + rope_type = _e("ROPE_TYPE", "yarn") + yarn_max_len = _e("YARN_MAX_LEN", 2048, int) + logit_softcap = _e("LOGIT_SOFTCAP", 10.0, float) + softcap_type = _e("SOFTCAP_TYPE", "poly") + tied_embed_init_std = _e("TIED_EMBED_INIT_STD", 0.005, float) + qk_gain_init = _e("QK_GAIN_INIT", 5.0, float) + activation_type = _e("ACTIVATION", "relu2") + embed_dim = _e("EMBED_DIM", 380, int) + embed_lr = _e("EMBED_LR", 0.6, float) + head_lr = _e("HEAD_LR", 0.02, float) + adam_lr = _e("ADAM_LR", 0.05, float) + adam_wd = _e("ADAM_WD", 0.05, float) + tied_embed_lr = _e("TIED_EMBED_LR", 0.02, float) + seq_len_start = _e("SEQ_LEN_START", 0, int) + seq_schedule_fraction = _e("SEQ_SCHEDULE_FRACTION", 0.0, float) + batch_tokens_start = _e("BATCH_TOKENS_START", 0, int) + batch_schedule_fraction = _e("BATCH_SCHEDULE_FRACTION", 0.33, float) + churn_log_every = _e("CHURN_LOG_EVERY", 0, int) + matrix_lr = _e("MATRIX_LR", 0.035, float) + scalar_lr = _e("SCALAR_LR", 0.02, float) + muon_momentum = _e("MUON_MOMENTUM", 0.95, float) + muon_backend_steps = _e("MUON_BACKEND_STEPS", 3, int) + muon_wd = _e("MUON_WD", 0.0, float) + matrix_optimizer = _e("MATRIX_OPTIMIZER", "muon") + muon_momentum_warmup_start = _e("MUON_MOMENTUM_WARMUP_START", 0.85, float) + muon_momentum_warmup_steps = _e("MUON_MOMENTUM_WARMUP_STEPS", 500, int) + beta1 = _e("BETA1", 0.9, float) + beta2 = _e("BETA2", 0.95, float) + adam_eps = _e("ADAM_EPS", 1e-8, float) + grad_clip_norm = _e("GRAD_CLIP_NORM", 0.0, float) + bitnet_group_size = _e("BITNET_GROUP_SIZE", 192, int) + ttt_steps = _e("TTT_STEPS", 3, int) + ttt_lr = _e("TTT_LR", 0.005, float) + ttt_tokens = _e("TTT_TOKENS", 32768, int) + skip_training = _e("SKIP_TRAINING", 0, bool) + scale_quant_bits = _e("SCALE_QUANT_BITS", 5, int) # 5-bit log-delta scale quantization + fp_storage = True # FP8 storage for non-quinary (small) tensors + +# Calibration / fp16-stored scalar+vector params adapted by score-first TTT. +# `scale_correction` was previously listed here but its gradient is blocked by +# the STE detach in QuinaryLinear.forward, so including it as a TTT target was +# a no-op. Excluded from the CTP tuple so the TTT optimizer reflects what +# actually receives gradients (~42k params, not 232k). The parameter still +# exists in the state-dict and is stored fp16 in the artifact (via the ndim<2 +# bucket); it is just not selected for TTT adaptation. +CTP = ("attn_scale","mlp_scale","resid_mix","q_gain","skip_weights","vocab_bias") + +def pack_quinary(q: Tensor): + f = (q.reshape(-1).to(torch.int8) + 2).numpy() # {-2..2} -> {0..4} + n = len(f) + p = (3 - n % 3) % 3 + if p: f = np.concatenate([f, np.zeros(p, dtype=np.int8)]) + g = f.reshape(-1, 3).astype(np.uint8) + return (g[:,0] + g[:,1]*5 + g[:,2]*25).tobytes(), n # max = 4+20+100 = 124 + +def unpack_quinary(data: bytes, n: int) -> Tensor: + v = np.frombuffer(data, dtype=np.uint8).astype(np.int16) + t = np.zeros((len(v), 3), dtype=np.int8) + for i in range(3): t[:,i] = v % 5; v //= 5 + return torch.from_numpy(t.reshape(-1)[:n].astype(np.int8) - 2) + + +def pack_quinary_bitmask(q) -> tuple[bytes, int, int]: + """Pack quinary symbols ∈ {-2..2} as three concatenated bit-planes. + + Layout (big-endian within bytes via numpy.packbits): + [zero_mask: ceil(n_total / 8) bytes — 1 bit/symbol, 1=zero] + [sign_bits: ceil(n_nonzero / 8) bytes — 1 bit/nonzero, 1=negative] + [mag2_bits: ceil(n_nonzero / 8) bytes — 1 bit/nonzero, 1=|symbol|==2] + + Returns: (concat_bytes, n_total, n_nonzero). + Each plane has homogeneous bit statistics so the downstream compressor + can model them independently rather than fighting a multimodal mixture. + """ + if isinstance(q, torch.Tensor): + a = q.reshape(-1).to(torch.int8).numpy() + else: + a = np.asarray(q, dtype=np.int8).reshape(-1) + n_total = a.size + nz_mask = (a != 0) # bool, len n_total + n_nonzero = int(nz_mask.sum()) + nz_vals = a[nz_mask] # int8, len n_nonzero + sign_bits = (nz_vals < 0).astype(np.uint8) # 1 = negative + mag2_bits = (np.abs(nz_vals) == 2).astype(np.uint8) + zero_packed = np.packbits((~nz_mask).astype(np.uint8)) # 1 bit means "this symbol is zero" + sign_packed = np.packbits(sign_bits) + mag2_packed = np.packbits(mag2_bits) + return (zero_packed.tobytes() + sign_packed.tobytes() + mag2_packed.tobytes(), + n_total, n_nonzero) + + +def unpack_quinary_bitmask(data: bytes, n_total: int, n_nonzero: int) -> Tensor: + """Inverse of pack_quinary_bitmask. Returns int8 tensor of length n_total.""" + z_len = (n_total + 7) // 8 + s_len = (n_nonzero + 7) // 8 + m_len = (n_nonzero + 7) // 8 + expected = z_len + s_len + m_len + if len(data) != expected: + raise ValueError(f"bitmask data size {len(data)} != expected {expected} " + f"(n_total={n_total}, n_nonzero={n_nonzero})") + z_bytes = np.frombuffer(data[:z_len], dtype=np.uint8) + s_bytes = np.frombuffer(data[z_len:z_len + s_len], dtype=np.uint8) + m_bytes = np.frombuffer(data[z_len + s_len:], dtype=np.uint8) + is_zero = np.unpackbits(z_bytes)[:n_total].astype(bool) + sign = np.unpackbits(s_bytes)[:n_nonzero].astype(bool) + mag2 = np.unpackbits(m_bytes)[:n_nonzero].astype(bool) + nz_vals = np.where(mag2, 2, 1).astype(np.int8) + nz_vals = np.where(sign, -nz_vals, nz_vals) + out = np.zeros(n_total, dtype=np.int8) + out[~is_zero] = nz_vals + return torch.from_numpy(out) + + +# Artifact archive: layout-aware per-stream v2 (header byte 0x03). +# +# - For each quinary tensor: screen the 4 layouts {base5, base5_T, +# bitmask, bitmask_T} by LZMA9-compressed size, then run LZMA9 vs +# lrzip-zpaq -L9 only on the winning layout. Bounded heuristic +# with an LZMA floor — *not* an exhaustive 4×2 search. +# - For c_qkv.weight: split rows into Q / K / V sub-payloads each chosen +# independently (Q, K, V have different trained distributions). +# - For other bulk fields (FP8 embeddings, large fp16 tensors): torch.save +# the value and compress with min(lzma, lrzip). +# - Robust to the seed-dependent lrzip cliff observed on full-blob +# compression: even if lrzip ZPAQ underperforms on one tensor's bytes, +# lzma takes over for that tensor specifically. +_COMPRESSOR_LZMA = 0 +_COMPRESSOR_LRZIP_ZPAQ = 1 +_COMPRESSOR_PER_STREAM_V2 = 3 + +# Threshold for treating a value as "bulk" (compressed independently). +_PER_STREAM_BULK_BYTES = 64 * 1024 + +# v2 layout IDs — what kind of body is stored in a bulk payload. +_LAYOUT_RAW = 0 # opaque bytes (torch.save output for non-quinary fields) +_LAYOUT_Q_BASE5 = 1 # base-5 packed quinary symbols, canonical row-major order +_LAYOUT_Q_BASE5_T = 2 # base-5 packed quinary symbols, transposed (column-major) +_LAYOUT_Q_BITMASK = 3 # bitmask packed (zero|sign|mag2), canonical +_LAYOUT_Q_BITMASK_T = 4 # bitmask packed, transposed +_LAYOUT_NAMES = { + _LAYOUT_RAW: "raw", + _LAYOUT_Q_BASE5: "base5", + _LAYOUT_Q_BASE5_T: "base5_T", + _LAYOUT_Q_BITMASK: "bitmask", + _LAYOUT_Q_BITMASK_T: "bitmask_T", +} + +# v2 sentinel for metadata refs (distinct string from v1 to avoid version confusion). +_BULK_SENTINEL_V2 = "__BULK_REF_V2__" + + +def _lrzip_compress_bytes(data: bytes, level: int = 9) -> bytes: + """Compress raw bytes via `lrzip -z` (ZPAQ). Returns body only (no header).""" + with tempfile.NamedTemporaryFile(delete=False) as f: + f.write(data); in_path = f.name + out_path = in_path + ".lrz" + try: + try: + subprocess.run(["lrzip", "-z", "-L", str(level), "-q", "-f", in_path], + check=True, capture_output=True) + except FileNotFoundError as e: + raise RuntimeError("lrzip binary not found; run `apt-get install lrzip`") from e + with open(out_path, "rb") as f: + return f.read() + finally: + for p in (in_path, out_path): + try: os.unlink(p) + except FileNotFoundError: pass + + +def _lrzip_decompress_bytes(body: bytes) -> bytes: + """Decompress an lrzip ZPAQ body (no header).""" + with tempfile.NamedTemporaryFile(delete=False, suffix=".lrz") as f: + f.write(body); in_path = f.name + out_path = in_path[:-4] + try: + try: + subprocess.run(["lrzip", "-d", "-q", "-f", in_path], + check=True, capture_output=True) + except FileNotFoundError as e: + raise RuntimeError("lrzip binary not found; required to load this artifact") from e + with open(out_path, "rb") as f: + return f.read() + finally: + for p in (in_path, out_path): + try: os.unlink(p) + except FileNotFoundError: pass + + +def _pick_best_compressor(data: bytes, level: int = 9, has_lrzip: bool = True) -> tuple[int, bytes]: + """Try lzma + (optionally) lrzip; return (method_id, body) for the smaller. + + This is the per-stream robustness: even if one compressor cliffs on a + given byte distribution (the seed=7 issue), the other usually doesn't. + Any failure (missing binary, runtime error, broken lrzip backend) on + lrzip is silently skipped — lzma is the always-available floor. + """ + candidates: list[tuple[int, bytes]] = [(_COMPRESSOR_LZMA, lzma.compress(data, preset=level))] + if has_lrzip: + try: + candidates.append((_COMPRESSOR_LRZIP_ZPAQ, _lrzip_compress_bytes(data, level))) + except (RuntimeError, subprocess.CalledProcessError, OSError): + pass + return min(candidates, key=lambda c: len(c[1])) + + +def _is_bulk(value) -> bool: + """Decide whether a state_dict field should be peeled off for per-stream compression.""" + if isinstance(value, (bytes, bytearray)): + return len(value) >= _PER_STREAM_BULK_BYTES + if isinstance(value, torch.Tensor): + return value.numel() * value.element_size() >= _PER_STREAM_BULK_BYTES + return False + + +def _load_artifact(blob: bytes) -> dict: + """Load a per-stream v2 archive (header byte 0x03) into a state_dict.""" + return _deserialize_per_stream_v2(blob) + + +# --------------------------------------------------------------------------- +# v2 archive: layout-aware per-stream compression +# --------------------------------------------------------------------------- + +def _gen_quinary_layout_candidates(symbols_2d: np.ndarray) -> list[tuple[int, bytes, dict]]: + """For a 2D int8 symbol matrix in {-2..2}, return a list of + (layout_id, body_bytes, layout_meta) tuples — every supported layout. + + layout_meta carries the per-layout fields needed at deserialize time + (n_quins, n_nonzero, etc.). + """ + rows, cols = symbols_2d.shape + flat = symbols_2d.reshape(-1) + transposed = symbols_2d.T.reshape(-1).copy() # contiguous transpose + + # base-5 canonical + body_b5, n_b5 = pack_quinary(torch.from_numpy(flat)) + # base-5 transposed + body_b5_t, n_b5_t = pack_quinary(torch.from_numpy(transposed)) + # bitmask canonical + body_bm, nt_bm, nz_bm = pack_quinary_bitmask(torch.from_numpy(flat)) + # bitmask transposed + body_bm_t, nt_bm_t, nz_bm_t = pack_quinary_bitmask(torch.from_numpy(transposed)) + + return [ + (_LAYOUT_Q_BASE5, body_b5, {"n_quins": n_b5}), + (_LAYOUT_Q_BASE5_T, body_b5_t, {"n_quins": n_b5_t}), + (_LAYOUT_Q_BITMASK, body_bm, {"n_total": nt_bm, "n_nonzero": nz_bm}), + (_LAYOUT_Q_BITMASK_T, body_bm_t, {"n_total": nt_bm_t, "n_nonzero": nz_bm_t}), + ] + + +def _layout_to_canonical_bytes(layout: int, body: bytes, rows: int, cols: int, + layout_meta: dict) -> bytes: + """Inverse of _gen_quinary_layout_candidates: take a body in one of the + layout encodings and return the canonical base-5-packed bytes that + `deq_sd` expects in entry["packed"].""" + if layout == _LAYOUT_Q_BASE5: + # Already canonical. + return body + if layout == _LAYOUT_Q_BASE5_T: + symbols_t = unpack_quinary(body, layout_meta["n_quins"]).numpy() + symbols = symbols_t.reshape(cols, rows).T.reshape(-1).copy() + out, _ = pack_quinary(torch.from_numpy(symbols)) + return out + if layout == _LAYOUT_Q_BITMASK: + symbols = unpack_quinary_bitmask(body, layout_meta["n_total"], layout_meta["n_nonzero"]).numpy() + out, _ = pack_quinary(torch.from_numpy(symbols)) + return out + if layout == _LAYOUT_Q_BITMASK_T: + symbols_t = unpack_quinary_bitmask(body, layout_meta["n_total"], layout_meta["n_nonzero"]).numpy() + symbols = symbols_t.reshape(cols, rows).T.reshape(-1).copy() + out, _ = pack_quinary(torch.from_numpy(symbols)) + return out + raise ValueError(f"unknown layout id {layout}") + + +def _qkv_split_sizes(name: str, rows: int, cols: int) -> tuple[int, int] | None: + """For a c_qkv weight, derive (q_rows, kv_rows) row-counts. + + Standard transformer convention: q_size = num_heads * head_dim = model_dim + (the square attention assumption). So q_rows == cols. The remaining rows + are split equally between K and V: kv_rows = (rows - cols) // 2. + + Returns None if name doesn't look like c_qkv or the ratio doesn't decompose + cleanly (in which case we don't split, just compress as one tensor). + """ + if not name.endswith("c_qkv.weight"): + return None + if rows <= cols: + return None # not a GQA shape; or single-head case where Q==K==V + extra = rows - cols # 2 * kv_rows + if extra % 2 != 0: + return None + kv_rows = extra // 2 + q_rows = cols + if q_rows + 2 * kv_rows != rows: + return None + return q_rows, kv_rows + + +def _serialize_per_stream_v2(state_dict: dict, level: int = 9) -> tuple[bytes, str]: + """v2 archive: layout-aware per-stream compression. + + For each quinary entry, generate up to 4 layout candidates + (base5, base5_T, bitmask, bitmask_T), screen them by LZMA9-compressed + size, then compress the winning layout with min(LZMA9, lrzip-zpaq). + For c_qkv weights, the row-block is split into Q/K/V sub-payloads + handled independently. All other bulk fields use the v1 raw-bytes path. + + This is a bounded-cost heuristic with an LZMA floor — *not* an + exhaustive 4×2 search. It can in principle miss a (layout, compressor) + pair where the LZMA-screen-loser would have won under lrzip; in + practice this is rare on this stack and the LZMA floor caps the + worst case at the canonical base5+LZMA encoding. + + Returns (archive_bytes, audit_string) so the caller can route the + audit line through the run's logger. + """ + from shutil import which + has_lrzip = which("lrzip") is not None + + bulk_bodies: list[tuple[int, bytes]] = [] # (compressor_method, compressed_body) + metadata: dict = {} + audit_lines: list[str] = [] + layout_counts = {n: 0 for n in _LAYOUT_NAMES.values()} + method_counts = {"lzma": 0, "lrzip": 0} + qkv_split_count = 0 + + def _store(layout: int, body: bytes) -> tuple[int, int, int]: + """Compress and store; return (payload_idx, method_id, compressed_size).""" + method, comp = _pick_best_compressor(body, level, has_lrzip) + bulk_bodies.append((method, comp)) + method_counts["lzma" if method == _COMPRESSOR_LZMA else "lrzip"] += 1 + layout_counts[_LAYOUT_NAMES[layout]] += 1 + return len(bulk_bodies) - 1, method, len(comp) + + def _best_quinary_layout(symbols_2d: np.ndarray) -> tuple[int, int, int, dict, int]: + """Generate all layout candidates, screen with lzma to pick best layout, + then compress winner with min(lzma, lrzip) for the final body. + + Cheap-screen + refine cuts serialize compute from O(layouts*compressors) + to O(layouts) lzma + 1 lrzip, keeping serialize time bounded even when + lrzip is slow on bad streams. + + Returns (chosen_layout, payload_idx, compressed_size, layout_meta, n_candidates_evaluated). + """ + cands = _gen_quinary_layout_candidates(symbols_2d) + best_layout, best_body, best_meta = None, None, None + best_lzma_size = None + for layout, body, meta in cands: + lzma_size = len(lzma.compress(body, preset=level)) + if best_lzma_size is None or lzma_size < best_lzma_size: + best_lzma_size = lzma_size + best_layout, best_body, best_meta = layout, body, meta + # Now compress the winning layout with min(lzma, lrzip) for the actual stored body. + comp_method, comp_body = _pick_best_compressor(best_body, level, has_lrzip) + bulk_bodies.append((comp_method, comp_body)) + method_counts["lzma" if comp_method == _COMPRESSOR_LZMA else "lrzip"] += 1 + layout_counts[_LAYOUT_NAMES[best_layout]] += 1 + return best_layout, len(bulk_bodies) - 1, len(comp_body), best_meta, len(cands) + + for name, entry in state_dict.items(): + if not isinstance(entry, dict): + metadata[name] = entry + continue + new_entry = dict(entry) # shallow copy + + if entry.get("type") == "quinary" and "packed" in entry: + # Reconstruct full symbol matrix from canonical packed bytes. + rows, cols = entry["shape"][0], entry["padded_cols"] + symbols_full = unpack_quinary(entry["packed"], entry["n_quins"]).numpy() + symbols_2d = symbols_full[:rows * cols].reshape(rows, cols) + + split = _qkv_split_sizes(name, rows, cols) + if split is not None: + q_rows, kv_rows = split + qkv_split_count += 1 + refs = [] + for sub_name, sub_2d in ( + ("Q", symbols_2d[:q_rows]), + ("K", symbols_2d[q_rows:q_rows + kv_rows]), + ("V", symbols_2d[q_rows + kv_rows:]), + ): + layout, idx, sz, meta, _ = _best_quinary_layout(sub_2d.copy()) + sub_rows, sub_cols = sub_2d.shape + refs.append({ + "_v2": _BULK_SENTINEL_V2, "idx": idx, "layout": layout, + "rows": sub_rows, "cols": sub_cols, "meta": meta, + }) + new_entry["packed"] = ("__QKV_SPLIT__", refs) + audit_lines.append(f" {name}: qkv_split layouts={[_LAYOUT_NAMES[r['layout']] for r in refs]}") + else: + layout, idx, sz, meta, _ = _best_quinary_layout(symbols_2d.copy()) + new_entry["packed"] = { + "_v2": _BULK_SENTINEL_V2, "idx": idx, "layout": layout, + "rows": rows, "cols": cols, "meta": meta, + } + audit_lines.append(f" {name}: {_LAYOUT_NAMES[layout]} ({sz/1e6:.3f}MB)") + + # Other bulk fields (fp8 data, fp16 large tensors, scale_delta tensors, + # etc.) — fall back to v1's torch.save+pick path. + for key, value in list(new_entry.items()): + if key == "packed": # already handled + continue + if _is_bulk(value): + buf = io.BytesIO(); torch.save(value, buf) + method, comp = _pick_best_compressor(buf.getvalue(), level, has_lrzip) + bulk_bodies.append((method, comp)) + method_counts["lzma" if method == _COMPRESSOR_LZMA else "lrzip"] += 1 + layout_counts["raw"] += 1 + new_entry[key] = { + "_v2": _BULK_SENTINEL_V2, "idx": len(bulk_bodies) - 1, + "layout": _LAYOUT_RAW, + } + metadata[name] = new_entry + + # Compress metadata. + meta_buf = io.BytesIO(); torch.save(metadata, meta_buf) + meta_compressed = lzma.compress(meta_buf.getvalue(), preset=level) + + # Assemble archive (header 0x03). + out = bytearray() + out.append(_COMPRESSOR_PER_STREAM_V2) + out.extend(len(meta_compressed).to_bytes(4, "little")) + out.extend(meta_compressed) + out.extend(len(bulk_bodies).to_bytes(4, "little")) + for method, body in bulk_bodies: + out.append(method) + out.extend(len(body).to_bytes(4, "little")) + out.extend(body) + + layout_summary = ", ".join(f"{n}={c}" for n, c in layout_counts.items() if c) + audit = (f"per_stream_v2: meta={len(meta_compressed)/1e6:.2f}MB, " + f"{len(bulk_bodies)} bulk payloads " + f"(lzma={method_counts['lzma']}, lrzip={method_counts['lrzip']}; " + f"{layout_summary}), qkv_splits={qkv_split_count}, " + f"total={len(out)/1e6:.2f}MB") + return bytes(out), audit + + +def _deserialize_per_stream_v2(blob: bytes) -> dict: + """Inverse of _serialize_per_stream_v2. Returns a state_dict whose quinary + entries have canonical entry["packed"] bytes — `deq_sd` is unaware of the + archive layer.""" + cursor = 0 + assert blob[cursor] == _COMPRESSOR_PER_STREAM_V2, \ + f"expected v2 header 0x03, got {blob[0]:#x}" + cursor += 1 + meta_size = int.from_bytes(blob[cursor:cursor + 4], "little"); cursor += 4 + meta_blob = blob[cursor:cursor + meta_size]; cursor += meta_size + metadata = torch.load(io.BytesIO(lzma.decompress(meta_blob)), + map_location="cpu", weights_only=False) + n_payloads = int.from_bytes(blob[cursor:cursor + 4], "little"); cursor += 4 + decompressed: list[bytes] = [] + for _ in range(n_payloads): + method = blob[cursor]; cursor += 1 + size = int.from_bytes(blob[cursor:cursor + 4], "little"); cursor += 4 + body = blob[cursor:cursor + size]; cursor += size + if method == _COMPRESSOR_LZMA: + decompressed.append(lzma.decompress(body)) + elif method == _COMPRESSOR_LRZIP_ZPAQ: + decompressed.append(_lrzip_decompress_bytes(body)) + else: + raise ValueError(f"unknown compressor method {method:#x}") + + def _is_v2_ref(v): + return isinstance(v, dict) and v.get("_v2") == _BULK_SENTINEL_V2 + + for name, entry in metadata.items(): + if not isinstance(entry, dict): + continue + for key, value in list(entry.items()): + if _is_v2_ref(value): + if value["layout"] == _LAYOUT_RAW: + entry[key] = torch.load(io.BytesIO(decompressed[value["idx"]]), + map_location="cpu", weights_only=False) + else: + entry[key] = _layout_to_canonical_bytes( + value["layout"], decompressed[value["idx"]], + value["rows"], value["cols"], value["meta"]) + elif (isinstance(value, tuple) and len(value) == 2 + and value[0] == "__QKV_SPLIT__"): + # Reconstruct each part's symbol matrix, concat along row dim, + # repack as canonical base-5. + refs = value[1] + sub_symbols = [] + for ref in refs: + canonical_body = _layout_to_canonical_bytes( + ref["layout"], decompressed[ref["idx"]], + ref["rows"], ref["cols"], ref["meta"]) + n = ref["rows"] * ref["cols"] + sub_syms = unpack_quinary(canonical_body, n).numpy()[:n] + sub_symbols.append(sub_syms.reshape(ref["rows"], ref["cols"])) + full = np.concatenate(sub_symbols, axis=0).reshape(-1).copy() + entry[key], _ = pack_quinary(torch.from_numpy(full)) + return metadata + + +def _quant_scale(scale_fp16: Tensor, bits: int) -> dict: + """Quantize per-group scales to bits-bit log-deltas. Returns dict with + `scale_anchor` (fp16), `scale_step` (fp16), `scale_delta` (packed bytes + or int8 tensor), plus `scale_bits` to dispatch in deq_sd.""" + scale_f = scale_fp16.float().reshape(-1) + n_groups = scale_f.numel() + log_scale = torch.log2(scale_f.clamp(min=1e-12)) + lo, hi = float(log_scale.min().item()), float(log_scale.max().item()) + anchor = (lo + hi) / 2.0 + spread = hi - lo + levels = 1 << bits + half = levels // 2 + step = max(spread / max(levels - 1, 1), 1e-9) + delta = ((log_scale - anchor) / step).round().clamp(-half, half - 1).to(torch.int32) + if bits == 4: + d = (delta + half).to(torch.uint8).numpy() + if len(d) % 2: + d = np.concatenate([d, np.zeros(1, dtype=np.uint8)]) + packed = (d[::2] | (d[1::2] << 4)).astype(np.uint8) + delta_stored = torch.from_numpy(packed.copy()) + elif bits == 5: + delta_stored = (delta + half).to(torch.uint8) + else: # 8 + delta_stored = delta.to(torch.int8) + return { + "scale_anchor": torch.tensor(anchor, dtype=torch.float16), + "scale_step": torch.tensor(step, dtype=torch.float16), + "scale_delta": delta_stored, + "scale_n_groups": n_groups, + "scale_bits": bits, + } + +def _dequant_scale(entry: dict) -> Tensor: + """Reconstruct fp16 per-group scales from the quantized representation.""" + bits = int(entry["scale_bits"]) + half = (1 << bits) // 2 + n_groups = int(entry["scale_n_groups"]) + if bits == 4: + packed = entry["scale_delta"].to(torch.uint8).numpy() + low = packed & 0x0F + high = (packed >> 4) & 0x0F + d = np.empty(packed.size * 2, dtype=np.uint8) + d[0::2] = low + d[1::2] = high + delta = torch.from_numpy(d[:n_groups].astype(np.int32) - half) + elif bits == 5: + delta = entry["scale_delta"].to(torch.int32) - half + else: # 8 + delta = entry["scale_delta"].to(torch.int32) + anchor = entry["scale_anchor"].float() + step = entry["scale_step"].float() + return (2.0 ** (anchor + delta.float() * step)).to(torch.float16) + + +def q_sd(state_dict: dict, group_size: int = 64, + scale_quant_bits: int = 0) -> tuple[dict, dict]: + quantized = {} + stats = {"quinary_params": 0, "quinary_bytes": 0, "fp_params": 0, "fp_bytes": 0} + for name, tensor in state_dict.items(): + t = tensor.detach().cpu().float().contiguous() + t_orig_shape = list(t.shape) + if t.ndim == 3: + t = t.reshape(t.shape[0], -1) + is_quantized_candidate = ( + t.ndim == 2 and t.numel() > 65_536 + and "tok_emb" not in name and "lm_head" not in name and "embed_proj" not in name + ) + if is_quantized_candidate: + pad = (group_size - t.shape[1] % group_size) % group_size + t_padded = F.pad(t, (0, pad)) if pad > 0 else t + t_grouped = t_padded.reshape(-1, group_size) + scale = t_grouped.abs().mean(-1, keepdim=True).clamp(min=1e-8).half().float() + q = (t_grouped / scale).round().clamp(-2, 2).to(torch.int8) + + packed_bytes, n_packed = pack_quinary(q) + stats["quinary_params"] += t.numel() + + entry = { + "type": "quinary", "packed": packed_bytes, + "shape": list(t.shape), "padded_cols": t_padded.shape[1], + "group_size": group_size, "n_quins": n_packed, + "orig_shape": t_orig_shape, + } + if scale_quant_bits and scale_quant_bits in (4, 5, 8): + sq = _quant_scale(scale.half().squeeze(-1), scale_quant_bits) + entry.update(sq) + stats["quinary_bytes"] += len(packed_bytes) + sq["scale_delta"].numel() + 4 # +4 for anchor+step + else: + entry["scale"] = scale.half().squeeze(-1) + stats["quinary_bytes"] += len(packed_bytes) + scale.numel() * 2 + quantized[name] = entry + elif t.ndim == 2: + quantized[name] = {"type": "fp8", "data": t.to(torch.float8_e4m3fn)} + stats["fp_params"] += t.numel() + stats["fp_bytes"] += t.numel() + else: + quantized[name] = {"type": "fp16", "data": t.half()} + stats["fp_params"] += t.numel() + stats["fp_bytes"] += t.numel() * 2 + return quantized, stats + +def deq_sd(quantized: dict, target_dtype=torch.bfloat16): + out = {} + for name, entry in quantized.items(): + if entry["type"] == "quinary": + q = unpack_quinary(entry["packed"], entry["n_quins"]) + q = q.float().reshape(-1, entry["group_size"]) + if "scale_bits" in entry: + scale = _dequant_scale(entry).float().unsqueeze(-1) + else: + scale = entry["scale"].float().unsqueeze(-1) + q_absmean = q.abs().mean(-1, keepdim=True).clamp(min=1e-8) + t = (q * (scale / q_absmean)).reshape(-1, entry["padded_cols"]) + shape = entry["shape"] + result = t[:shape[0], :shape[1]].to(target_dtype) + orig = entry.get("orig_shape") + out[name] = result.reshape(orig).contiguous() if orig and orig != shape else result.contiguous() + elif entry["type"] == "fp8": + out[name] = entry["data"].to(torch.float32).to(target_dtype).contiguous() + else: + out[name] = entry["data"].to(target_dtype).contiguous() + return out + +def quin_stats(model: nn.Module, group_size: int = 64): + total = zeros = 0 + with torch.no_grad(): + for name, p in model.named_parameters(): + if p.ndim == 2 and ("weight" in name or "prototypes" in name) and p.shape[0] > 1 and p.numel() % group_size == 0: + w = p.detach().float().reshape(-1, group_size) + scale = w.abs().mean(-1, keepdim=True).clamp(min=1e-8).half().float() + q = (w / scale).round().clamp(-2, 2) + zeros += int((q == 0).sum().item()) + total += int(q.numel()) + return {"zero_frac": zeros / max(total, 1), "total_weights": total} + +_prev_committed: dict = {} + +def churn_fn(model: nn.Module, group_size: int = 64): + global _prev_committed + total = flipped = 0 + with torch.no_grad(): + for name, p in model.named_parameters(): + if p.ndim == 2 and ("weight" in name or "prototypes" in name) and p.shape[0] > 1 and p.numel() % group_size == 0: + w = p.detach().float().reshape(-1, group_size) + scale = w.abs().mean(-1, keepdim=True).clamp(min=1e-8).half().float() + q = (w / scale).round().clamp(-2, 2).cpu().numpy() + if name in _prev_committed: + flipped += int(np.sum(q != _prev_committed[name])) + total += q.size + _prev_committed[name] = q + return flipped / max(total, 1) + +def ns_orth(G: Tensor, steps: int = 10, eps: float = 1e-7) -> Tensor: + a, b, c = (3.4445, -4.7750, 2.0315) + X = G.bfloat16() + X /= X.norm() + eps + transposed = G.size(0) > G.size(1) + if transposed: + X = X.T + for _ in range(steps): + A = X @ X.T + B = b * A + c * A @ A + X = a * X + B @ X + return X.T if transposed else X + +class Muon(torch.optim.Optimizer): + def __init__(self, params, lr: float, momentum: float, backend_steps: int, nesterov: bool = True, wd: float = 0.0): + super().__init__(params, dict(lr=lr, momentum=momentum, backend_steps=backend_steps, nesterov=nesterov, wd=wd)) + + @torch.no_grad() + def step(self, closure=None): + loss = None + if closure is not None: + with torch.enable_grad(): + loss = closure() + distributed = dist.is_available() and dist.is_initialized() + world_size = dist.get_world_size() if distributed else 1 + rank = dist.get_rank() if distributed else 0 + for group in self.param_groups: + params = group["params"] + if not params: + continue + lr, momentum = group["lr"], group["momentum"] + backend_steps, nesterov = group["backend_steps"], group["nesterov"] + total_params = sum(int(p.numel()) for p in params) + updates_flat = torch.zeros(total_params, device=params[0].device, dtype=torch.bfloat16) + curr = 0 + for i, p in enumerate(params): + if i % world_size == rank and p.grad is not None: + g = p.grad + state = self.state[p] + if "momentum_buffer" not in state: + state["momentum_buffer"] = torch.zeros_like(g) + buf = state["momentum_buffer"] + buf.mul_(momentum).add_(g) + if nesterov: + g = g.add(buf, alpha=momentum) + g = F.rms_norm(g.float(), (g.size(-1),)).bfloat16() + g = ns_orth(g, steps=backend_steps) + g *= max(1, g.size(0) / g.size(1)) ** 0.5 + updates_flat[curr:curr + p.numel()] = g.reshape(-1) + curr += p.numel() + if distributed: + dist.all_reduce(updates_flat, op=dist.ReduceOp.SUM) + wd = group.get("wd", 0.0) + curr = 0 + for p in params: + g = updates_flat[curr : curr + p.numel()].view_as(p).to(dtype=p.dtype) + if wd > 0: + p.mul_(1 - lr * wd) + p.add_(g, alpha=-lr) + curr += p.numel() + return loss + +def ld_shard(file: Path) -> Tensor: + header_bytes = 256 * np.dtype(" Tensor: + chunks = [] + remaining = n + while remaining > 0: + avail = self.tokens.numel() - self.pos + if avail <= 0: + self._advance_file() + continue + k = min(remaining, avail) + chunks.append(self.tokens[self.pos:self.pos + k]) + self.pos += k + remaining -= k + return chunks[0] if len(chunks) == 1 else torch.cat(chunks) + +class DistributedTokenLoader: + def __init__(self, pattern: str, rank: int, world_size: int, device: torch.device): + self.rank, self.world_size, self.device = rank, world_size, device + self.stream = TokenStream(pattern) + + def next_batch(self, global_tokens: int, seq_len: int, grad_accum_steps: int) -> tuple[Tensor, Tensor]: + local_tokens = global_tokens // (self.world_size * grad_accum_steps) + per_rank_span = local_tokens + 1 + chunk = self.stream.take(per_rank_span * self.world_size) + start = self.rank * per_rank_span + local = chunk[start:start + per_rank_span].pin_memory().to(self.device, non_blocking=True).to(torch.int64) + x = local[:-1].reshape(-1, seq_len) + y = local[1:].reshape(-1, seq_len) + return x, y + +class RMSNorm(nn.Module): + def __init__(self, eps: float | None = None): + super().__init__() + self.eps = eps + + def forward(self, x: Tensor) -> Tensor: + return F.rms_norm(x, (x.size(-1),), eps=self.eps) + +def apply_fp8_ste(w: Tensor) -> Tensor: + w_sim = w.to(torch.float8_e4m3fn).to(w.dtype) + return (w_sim - w).detach() + w + +class QATLinear(nn.Linear): + def forward(self, x: Tensor) -> Tensor: + w_qat = apply_fp8_ste(self.weight) + return F.linear(x, w_qat.to(x.dtype), self.bias.to(x.dtype) if self.bias is not None else None) + +class QATEmbedding(nn.Embedding): + def forward(self, input: Tensor) -> Tensor: + w_qat = apply_fp8_ste(self.weight) + return F.embedding(input, w_qat, self.padding_idx, self.max_norm, + self.norm_type, self.scale_grad_by_freq, self.sparse) + +class QuinaryLinear(nn.Linear): + def __init__(self, in_features, out_features, bias=False, group_size=64): + super().__init__(in_features, out_features, bias=bias) + self.group_size = group_size + num_groups = (in_features * out_features) // group_size + # Inert by design: the STE detach below blocks gradients to + # `scale_correction`. Kept as a fp32 buffer at value 1.0 for + # backwards-compatibility with the ternary-base state-dict layout. + # An attempt to fix the STE so this parameter receives gradients + # was tested 2026-05-01 (commit b9c…) and showed a small + # training-time regression with no TTT benefit, so reverted. + self.scale_correction = nn.Parameter(torch.ones(num_groups, dtype=torch.float32)) + + def forward(self, x: Tensor) -> Tensor: + w = self.weight.bfloat16() + g = self.group_size + w_g = w.reshape(-1, g) + scale = w_g.abs().mean(-1, keepdim=True).clamp(min=1e-8) * self.scale_correction.to(w.dtype).unsqueeze(-1) + q = (w_g / scale).round().clamp(-2, 2) + w_quantized = w + ((q * scale).reshape(w.shape) - w).detach() + return F.linear(x, w_quantized, + self.bias.to(x.dtype) if self.bias is not None else None) + + +class NormedQuinaryLinear(QuinaryLinear): + def forward(self, x: Tensor) -> Tensor: + return super().forward(F.rms_norm(x, (x.size(-1),))) + +def restore_low_dim_params_to_fp32(module: nn.Module) -> None: + with torch.no_grad(): + for name, param in module.named_parameters(): + if (param.ndim < 2 or any(p in name for p in CTP)) and param.dtype != torch.float32: + param.data = param.data.float() + +class Rotary(nn.Module): + def __init__(self, dim: int, base: float = 10000.0, no_cache: bool = False, + rope_type: str = "rope", yarn_max_len: int = 4096, train_seq_len: int = 1024): + super().__init__() + self.no_cache = no_cache + inv_freq = 1.0 / (base ** (torch.arange(0, dim, 2, dtype=torch.float32) / dim)) + if rope_type == "yarn": + scale = train_seq_len / yarn_max_len + freq_idx = torch.arange(0, dim, 2, dtype=torch.float32) + ramp = torch.clamp((freq_idx / dim - 0.25) / 0.75, 0.0, 1.0) + inv_freq = inv_freq / (ramp * (1.0 / scale - 1.0) + 1.0) + self.register_buffer("inv_freq", inv_freq, persistent=False) + self._seq_len_cached = 0 + self._cos_cached: Tensor | None = None + self._sin_cached: Tensor | None = None + + def forward(self, seq_len, device, dtype): + if self.no_cache: + t = torch.arange(seq_len, device=device, dtype=self.inv_freq.dtype) + freqs = torch.outer(t, self.inv_freq.to(device)) + return freqs.cos()[None, :, None, :].to(dtype=dtype), freqs.sin()[None, :, None, :].to(dtype=dtype) + if ( + self._cos_cached is None + or self._sin_cached is None + or self._seq_len_cached != seq_len + or self._cos_cached.device != device + ): + t = torch.arange(seq_len, device=device, dtype=self.inv_freq.dtype) + freqs = torch.outer(t, self.inv_freq.to(device)) + self._cos_cached = freqs.cos()[None, :, None, :] + self._sin_cached = freqs.sin()[None, :, None, :] + self._seq_len_cached = seq_len + return self._cos_cached.to(dtype=dtype), self._sin_cached.to(dtype=dtype) + +def apply_rotary_emb(x: Tensor, cos: Tensor, sin: Tensor) -> Tensor: + half = x.size(-1) // 2 + x1, x2 = x[..., :half], x[..., half:] + return torch.cat((x1 * cos + x2 * sin, x1 * (-sin) + x2 * cos), dim=-1) + +class CausalSelfAttention(nn.Module): + def __init__(self, dim, num_heads, num_kv_heads, rope_base, qk_gain_init, + group_size=64, no_cache=False, rope_type="rope", + yarn_max_len=4096, train_seq_len=1024): + super().__init__() + self.num_heads, self.num_kv_heads = num_heads, num_kv_heads + self.head_dim = dim // num_heads + self.q_size = self.num_heads * self.head_dim + self.kv_size = self.num_kv_heads * self.head_dim + + self.c_qkv = QuinaryLinear(dim, self.q_size + 2 * self.kv_size, bias=False, group_size=group_size) + self.proj = NormedQuinaryLinear(dim, dim, bias=False, group_size=group_size) + self.proj._zero_init = True + self.q_gain = nn.Parameter(torch.full((num_heads,), qk_gain_init, dtype=torch.float32)) + self.rotary = Rotary(self.head_dim, base=rope_base, no_cache=no_cache, + rope_type=rope_type, yarn_max_len=yarn_max_len, + train_seq_len=train_seq_len) + + def forward(self, x: Tensor) -> Tensor: + bsz, seqlen, dim = x.shape + qkv_out = self.c_qkv(x) + q_out, k_out, v_out = qkv_out.split([self.q_size, self.kv_size, self.kv_size], dim=-1) + q = q_out.reshape(bsz, seqlen, self.num_heads, self.head_dim) + k = k_out.reshape(bsz, seqlen, self.num_kv_heads, self.head_dim) + v = v_out.reshape(bsz, seqlen, self.num_kv_heads, self.head_dim) + q, k = F.rms_norm(q, (q.size(-1),)), F.rms_norm(k, (k.size(-1),)) + cos, sin = self.rotary(seqlen, x.device, q.dtype) + q, k = apply_rotary_emb(q, cos, sin), apply_rotary_emb(k, cos, sin) + q = q * self.q_gain.to(dtype=q.dtype)[None, None, :, None] + y = flash_attn_func(q.contiguous(), k.contiguous(), v.contiguous(), causal=True) + y = y.reshape(bsz, seqlen, dim) + return self.proj(y) + +class MLP(nn.Module): + def __init__(self, dim, mlp_mult, group_size=64, activation="swiglu"): + super().__init__() + hidden = mlp_mult * dim + self.activation = activation + if activation == "swiglu": + self.gate_up = QuinaryLinear(dim, hidden * 2, bias=False, group_size=group_size) + else: + self.fc = QuinaryLinear(dim, hidden, bias=False, group_size=group_size) + self.proj = NormedQuinaryLinear(hidden, dim, bias=False, group_size=group_size) + self.proj._zero_init = True + + def forward(self, x: Tensor) -> Tensor: + if self.activation == "swiglu": + gu = self.gate_up(x) + gate, up = gu.chunk(2, dim=-1) + return self.proj(F.silu(gate) * up) + elif self.activation == "relu": + return self.proj(torch.relu(self.fc(x))) + elif self.activation == "leaky_relu": + return self.proj(F.leaky_relu(self.fc(x), negative_slope=0.01)) + elif self.activation == "leaky_relu2": + return self.proj(F.leaky_relu(self.fc(x), negative_slope=0.5).square()) + else: # relu2 + return self.proj(torch.relu(self.fc(x)).square()) + +class Block(nn.Module): + def __init__(self, dim: int, num_heads: int, num_kv_heads: int, mlp_mult: int, + rope_base: float, qk_gain_init: float, group_size: int=64, + activation: str="swiglu", no_cache: bool=False, + rope_type: str="rope", yarn_max_len: int=4096, + train_seq_len: int=1024): + super().__init__() + self.attn_norm = RMSNorm() + self.mlp_norm = RMSNorm() + self.attn = CausalSelfAttention(dim, num_heads, num_kv_heads, rope_base, qk_gain_init, + group_size, no_cache, rope_type, yarn_max_len, train_seq_len) + self.mlp = MLP(dim, mlp_mult, group_size, activation) + self.attn_scale = nn.Parameter(torch.ones(dim, dtype=torch.float32)) + self.mlp_scale = nn.Parameter(torch.ones(dim, dtype=torch.float32)) + self.resid_mix = nn.Parameter(torch.stack((torch.ones(dim), torch.zeros(dim))).float()) + + def forward(self, x: Tensor, x0: Tensor) -> Tensor: + mix = self.resid_mix.to(dtype=x.dtype) + x_in = mix[0] * x + mix[1] * x0 + n = self.attn_norm(x_in) + x = x_in + self.attn_scale.to(dtype=x_in.dtype) * self.attn(n) + x = x + self.mlp_scale.to(dtype=x.dtype) * self.mlp(self.mlp_norm(x)) + return x + +class GPT(nn.Module): + def __init__(self, vocab_size, num_layers, model_dim, num_heads, num_kv_heads, mlp_mult, + tie_embeddings, tied_embed_init_std, logit_softcap, rope_base, qk_gain_init, + group_size: int = 64, activation: str = "swiglu", + embed_dim: int = 0, softcap_type: str="poly", no_cache: bool=False, + rope_type: str="rope", yarn_max_len: int=4096, train_seq_len: int=1024): + super().__init__() + self.tie_embeddings = tie_embeddings + self.logit_softcap = logit_softcap + self.softcap_type = softcap_type + self.embed_dim = embed_dim if embed_dim > 0 else model_dim + self.tok_emb = QATEmbedding(vocab_size, self.embed_dim) + self.embed_proj = QATLinear(self.embed_dim, model_dim, bias=False) if self.embed_dim != model_dim else None + self.embed_proj_rev = QATLinear(model_dim, self.embed_dim, bias=False) if self.embed_dim != model_dim else None + + self.blocks = nn.ModuleList([ + Block(model_dim, num_heads, num_kv_heads, mlp_mult, rope_base, qk_gain_init, + group_size, activation, no_cache, rope_type, yarn_max_len, train_seq_len) + for _ in range(num_layers) + ]) + + # U-Net split: first half encoder, second half decoder, decoder layers + # add a learned-weighted skip from the symmetric encoder layer. + self.num_encoder_layers = num_layers // 2 + self.num_decoder_layers = num_layers - self.num_encoder_layers + self.num_skip_weights = min(self.num_encoder_layers, self.num_decoder_layers) + self.skip_weights = nn.Parameter(torch.ones(self.num_skip_weights, model_dim, dtype=torch.float32)) + + self.final_norm = RMSNorm() + self.lm_head = QATLinear(model_dim, vocab_size, bias=False) + self.lm_head._zero_init = True + if tie_embeddings: + self.lm_head.weight.requires_grad_(False) + + self.vocab_bias = nn.Parameter(torch.zeros(vocab_size, dtype=torch.float32)) + self._init_weights(tied_embed_init_std) + + def _init_weights(self, tied_embed_init_std: float) -> None: + if self.tie_embeddings: + nn.init.normal_(self.tok_emb.weight, mean=0.0, std=tied_embed_init_std) + for module in self.modules(): + if isinstance(module, QuinaryLinear) and not getattr(module, "_zero_init", False): + nn.init.normal_(module.weight, mean=0.0, std=0.02) + elif isinstance(module, nn.Linear) and getattr(module, "_zero_init", False): + nn.init.zeros_(module.weight) + + def _compute_logits(self, x: Tensor) -> Tensor: + if self.tie_embeddings: + proj = self.embed_proj_rev(x) if self.embed_proj_rev is not None else x + logits_raw = F.linear(proj, self.tok_emb.weight.to(x.dtype)) + else: + logits_raw = self.lm_head(x) + return logits_raw + self.vocab_bias.to(x.dtype) + + def _softcap(self, logits: Tensor) -> Tensor: + s = self.logit_softcap + if self.softcap_type == "tanh": + return s * torch.tanh(logits / s) + x_sc = torch.clamp(logits / s, -2.0, 2.0) + x2 = x_sc * x_sc + return s * torch.clamp(x_sc * (1.0 - x2 / 3.0 + x2 * x2 / 15.0), -1.0, 1.0) + + def forward(self, input_ids: Tensor, target_ids: Tensor, reduction: str = "mean") -> Tensor: + x = self.tok_emb(input_ids).float() + if self.embed_proj is not None: + x = self.embed_proj(x) + x = F.rms_norm(x, (x.size(-1),)) + x0 = x + + # U-Net encoder/decoder with skip connections. + skips = [] + for vi in range(self.num_encoder_layers): + x = self.blocks[vi](x, x0) + skips.append(x) + for vi in range(self.num_decoder_layers): + if vi < self.num_skip_weights: + x = x + self.skip_weights[vi].to(dtype=x.dtype) * skips.pop() + x = self.blocks[self.num_encoder_layers + vi](x, x0) + + x_normed = self.final_norm(x) + x_flat = x_normed.reshape(-1, x_normed.size(-1)) + targets = target_ids.reshape(-1) + logits = self._softcap(self._compute_logits(x_flat)) + + if reduction == "none": + return F.cross_entropy(logits.float(), targets, reduction="none").reshape(input_ids.shape) + + # Fused CE: single logsumexp; z-loss only during training (not eval BPB). + logits_f = logits.float() + lse = torch.logsumexp(logits_f, dim=-1) + target_logits = logits_f.gather(1, targets.unsqueeze(1)).squeeze(1) + main_loss = (lse - target_logits).mean() + if self.training: + main_loss = main_loss + 1e-4 * (lse ** 2).mean() + return main_loss + + +def build_luts(sp, vocab_size: int, device: torch.device): + sp_vocab_size = int(sp.vocab_size()) + table_size = max(sp_vocab_size, vocab_size) + base_bytes_np = np.zeros((table_size,), dtype=np.int16) + has_leading_space_np = np.zeros((table_size,), dtype=np.bool_) + is_boundary_token_np = np.ones((table_size,), dtype=np.bool_) + for token_id in range(sp_vocab_size): + if sp.is_control(token_id) or sp.is_unknown(token_id) or sp.is_unused(token_id): + continue + is_boundary_token_np[token_id] = False + if sp.is_byte(token_id): + base_bytes_np[token_id] = 1 + continue + piece = sp.id_to_piece(token_id) + if piece.startswith("\u2581"): + has_leading_space_np[token_id] = True + piece = piece[1:] + base_bytes_np[token_id] = len(piece.encode("utf-8")) + return ( + torch.tensor(base_bytes_np, dtype=torch.int16, device=device), + torch.tensor(has_leading_space_np, dtype=torch.bool, device=device), + torch.tensor(is_boundary_token_np, dtype=torch.bool, device=device), + ) + +def ld_val(pattern, seq_len, max_tok=int(os.environ.get("VAL_MAX_TOKENS", 0))): + files = sorted(glob.glob(pattern)) + assert files, f"No files: {pattern}" + tok = torch.cat([ld_shard(Path(p)) for p in files]).contiguous() + if max_tok > 0: tok = tok[:max_tok + 1] + u = ((tok.numel() - 1) // seq_len) * seq_len + return tok[:u + 1] + + +def eval_val(args, model, rank, world_size, device, grad_accum_steps, val_tokens, + base_bytes_lut, has_leading_space_lut, is_boundary_token_lut): + local_batch_tokens = args.val_batch_size // (world_size * grad_accum_steps) + local_batch_seqs = max(1, local_batch_tokens // args.train_seq_len) + total_seqs = (val_tokens.numel() - 1) // args.train_seq_len + seq_start = (total_seqs * rank) // world_size + seq_end = (total_seqs * (rank + 1)) // world_size + loss_sum = torch.zeros((), device=device, dtype=torch.float64) + token_count = torch.zeros((), device=device, dtype=torch.float64) + byte_count = torch.zeros((), device=device, dtype=torch.float64) + model.eval() + with torch.no_grad(): + for batch_start in range(seq_start, seq_end, local_batch_seqs): + batch_end = min(batch_start + local_batch_seqs, seq_end) + raw_start = batch_start * args.train_seq_len + raw_end = batch_end * args.train_seq_len + 1 + local = val_tokens[raw_start:raw_end].to(device=device, dtype=torch.int64) + x, y = local[:-1].reshape(-1, args.train_seq_len), local[1:].reshape(-1, args.train_seq_len) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + batch_loss = model(x, y).detach() + n = float(y.numel()) + loss_sum += batch_loss.to(torch.float64) * n + token_count += n + prev_ids, tgt_ids = x.reshape(-1), y.reshape(-1) + tok_bytes = base_bytes_lut[tgt_ids].to(torch.int16) + tok_bytes += (has_leading_space_lut[tgt_ids] & ~is_boundary_token_lut[prev_ids]).to(torch.int16) + byte_count += tok_bytes.to(torch.float64).sum() + if dist.is_available() and dist.is_initialized(): + for t in (loss_sum, token_count, byte_count): + dist.all_reduce(t, op=dist.ReduceOp.SUM) + val_loss = loss_sum / token_count + bpb = (val_loss.item() / math.log(2.0)) * (token_count.item() / byte_count.item()) + model.train() + return float(val_loss.item()), float(bpb), int(token_count.item()), int(byte_count.item()) + +def main() -> None: + args = Hyperparameters() + code = Path(__file__).read_text(encoding="utf-8") + + if args.matrix_optimizer != "adamw": + global ns_orth + ns_orth = torch.compile(ns_orth) + + distributed = "RANK" in os.environ and "WORLD_SIZE" in os.environ + rank = int(os.environ.get("RANK", "0")) + world_size = int(os.environ.get("WORLD_SIZE", "1")) + local_rank = int(os.environ.get("LOCAL_RANK", "0")) + grad_accum_steps = max(1, 8 // world_size) + grad_scale = 1.0 / grad_accum_steps + + if not torch.cuda.is_available(): + raise RuntimeError("CUDA is required") + device = torch.device("cuda", local_rank) + torch.cuda.set_device(device) + if distributed: + dist.init_process_group(backend="nccl", device_id=device) + dist.barrier() + master_process = rank == 0 + torch.backends.cuda.matmul.allow_tf32 = True + torch.backends.cudnn.allow_tf32 = True + + os.makedirs("logs/cuda/", exist_ok=True) + logfile = f"logs/cuda/{args.run_id}.txt" if master_process else None + if master_process: + print(logfile) + # Truncate any pre-existing logfile from a previous run with the same + # RUN_ID so the per-RUN_ID log file isn't append-stacked across reruns. + if logfile: + open(logfile, "w", encoding="utf-8").close() + + def log0(msg: str, console: bool = True) -> None: + if not master_process: + return + if console: + print(msg) + if logfile: + with open(logfile, "a", encoding="utf-8") as f: + print(msg, file=f) + + log0(code, console=False) + log0("=" * 100, console=False) + + log0(f"Python {sys.version}", console=False) + log0(f"PyTorch {torch.__version__}", console=False) + + random.seed(args.seed) + np.random.seed(args.seed) + torch.manual_seed(args.seed) + torch.cuda.manual_seed_all(args.seed) + + sp = spm.SentencePieceProcessor(model_file=args.tokenizer_path) + # Guard against an obvious tokenizer/vocab-size mismatch that would + # silently produce a wrong byte-count LUT (and therefore a wrong BPB + # denominator) without crashing. + assert sp.vocab_size() == args.vocab_size, ( + f"tokenizer vocab_size ({sp.vocab_size()}) != args.vocab_size " + f"({args.vocab_size}); check TOKENIZER_PATH / VOCAB_SIZE") + val_tokens = ld_val(args.val_files, args.train_seq_len) + base_bytes_lut, has_leading_space_lut, is_boundary_token_lut = build_luts( + sp, args.vocab_size, device) + + # --- Model --- + base_model = GPT( + vocab_size=args.vocab_size, num_layers=args.num_layers, model_dim=args.model_dim, + num_heads=args.num_heads, num_kv_heads=args.num_kv_heads, mlp_mult=args.mlp_mult, + tie_embeddings=args.tie_embeddings, tied_embed_init_std=args.tied_embed_init_std, + logit_softcap=args.logit_softcap, rope_base=args.rope_base, qk_gain_init=args.qk_gain_init, + group_size=args.bitnet_group_size, activation=args.activation_type, + embed_dim=args.embed_dim, + softcap_type=args.softcap_type, no_cache=(args.compile_mode == "reduce-overhead"), + rope_type=args.rope_type, yarn_max_len=args.yarn_max_len, train_seq_len=args.train_seq_len, + ).to(device).bfloat16() + + for module in base_model.modules(): + if isinstance(module, nn.Linear): + module.float() + restore_low_dim_params_to_fp32(base_model) + if args.tie_embeddings: + base_model.lm_head.weight.requires_grad_(False) + + torch._dynamo.config.optimize_ddp = False + + compiled_model = torch.compile(base_model, mode=args.compile_mode if args.compile_mode != "default" else None) + use_find_unused = not args.tie_embeddings + model = DDP(compiled_model, device_ids=[local_rank], broadcast_buffers=False, + find_unused_parameters=use_find_unused, + static_graph=not use_find_unused, + gradient_as_bucket_view=True) if distributed else compiled_model + + # --- Optimizers --- + _excl = {"tok_emb.weight", "lm_head.weight"} + all_other_params = [(n, p) for n, p in base_model.named_parameters() + if not any(eh in n for eh in _excl)] + matrix_params = [p for n, p in all_other_params + if p.ndim == 2 and not any(pat in n for pat in CTP)] + scalar_params = [p for n, p in all_other_params + if p.ndim < 2 or any(pat in n for pat in CTP)] + + token_lr = args.tied_embed_lr if args.tie_embeddings else args.embed_lr + opt_tok = torch.optim.Adam( + [{"params": [base_model.tok_emb.weight], "lr": token_lr, "base_lr": token_lr}], + betas=(args.beta1, args.beta2), eps=args.adam_eps, fused=True) + if args.matrix_optimizer == "adamw": + opt_muon = torch.optim.AdamW( + [{"params": matrix_params, "lr": args.adam_lr, "base_lr": args.adam_lr}], + betas=(args.beta1, args.beta2), eps=args.adam_eps, weight_decay=args.adam_wd, fused=True) + else: + opt_muon = Muon(matrix_params, lr=args.matrix_lr, momentum=args.muon_momentum, + backend_steps=args.muon_backend_steps, wd=args.muon_wd) + for g in opt_muon.param_groups: + g["base_lr"] = args.matrix_lr + opt_scalar = torch.optim.Adam( + [{"params": scalar_params, "lr": args.scalar_lr, "base_lr": args.scalar_lr}], + betas=(args.beta1, args.beta2), eps=args.adam_eps, fused=True) + opt_head = torch.optim.Adam( + [{"params": [base_model.lm_head.weight], "lr": 0.0, "base_lr": 0.0}], + betas=(args.beta1, args.beta2), eps=args.adam_eps, fused=True) + + optimizers = [opt_tok, opt_muon, opt_scalar, opt_head] + + # --- Log all hyperparameters --- + log0("--- Hyperparameters ---", console=False) + log0(" ".join(f"{a}={getattr(args,a)}" for a in sorted(dir(args)) if not a.startswith("_") and a not in ("train_files","val_files") and not callable(getattr(args,a))), console=False) + n_params = sum(p.numel() for p in base_model.parameters()) + log0(f"params:{n_params} L:{args.num_layers} d:{args.model_dim} h:{args.num_heads} kv:{args.num_kv_heads} ws:{world_size} ga:{grad_accum_steps} s:{args.seed}") + + # --- Data loader & helpers --- + train_loader = DistributedTokenLoader(args.train_files, rank, world_size, device) + + def zero_grad_all(): + for opt in optimizers: + opt.zero_grad(set_to_none=True) + + max_wallclock_ms = 1000.0 * args.max_wallclock_seconds if args.max_wallclock_seconds > 0 else None + + def lr_mul(step: int, elapsed_ms: float): + if args.warmdown_fraction <= 0: + return 1.0 + if max_wallclock_ms is None: + warmdown_start = int(args.iterations * (1.0 - args.warmdown_fraction)) + ratio = max((args.iterations - step) / max(args.iterations * args.warmdown_fraction, 1), 0.0) if step >= warmdown_start else 1.0 + else: + warmdown_ms = max_wallclock_ms * args.warmdown_fraction + remaining_ms = max(max_wallclock_ms - elapsed_ms, 0.0) + ratio = remaining_ms / max(warmdown_ms, 1e-9) if remaining_ms <= warmdown_ms else 1.0 + return max(ratio, args.min_lr) + + _seq_switched = False + _batch_switched = False + active_seq_len = args.seq_len_start if args.seq_len_start > 0 else args.train_seq_len + active_batch_tokens = args.batch_tokens_start if args.batch_tokens_start > 0 else args.train_batch_tokens + + if args.skip_training: + log0("skip_training=1, using existing artifact final_model.quinary.ptz") + + # --- Compiler warmup --- + if args.warmup_steps > 0 and not args.skip_training: + _ms = {n: t.detach().cpu().clone() for n, t in base_model.state_dict().items()} + _os = [copy.deepcopy(o.state_dict()) for o in optimizers] + model.train() + for ws in range(args.warmup_steps): + zero_grad_all() + for mi in range(grad_accum_steps): + if distributed: model.require_backward_grad_sync = mi == grad_accum_steps - 1 + x, y = train_loader.next_batch(active_batch_tokens, active_seq_len, grad_accum_steps) + torch.compiler.cudagraph_mark_step_begin() + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): loss = model(x, y) + (loss * grad_scale).backward() + for o in optimizers: o.step() + zero_grad_all() + log0(f"warmup:{ws+1}/{args.warmup_steps}") + base_model.load_state_dict(_ms, strict=True) + for o, s in zip(optimizers, _os): o.load_state_dict(s) + zero_grad_all() + train_loader = DistributedTokenLoader(args.train_files, rank, world_size, device) + + # --- Main training loop --- + training_time_ms = 0.0 + stop_after_step: int | None = None + train_loss = torch.zeros((), device=device) + torch.cuda.synchronize() + t0 = time.perf_counter() + step = 0 + + while not args.skip_training: + last_step = step == args.iterations or (stop_after_step is not None and step >= stop_after_step) + + if last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + training_time_ms += 1000.0 * (time.perf_counter() - t0) + val_loss, val_bpb, val_tok_count, val_byte_count = eval_val( + args, model, rank, world_size, device, grad_accum_steps, + val_tokens, base_bytes_lut, has_leading_space_lut, is_boundary_token_lut) + tstats = quin_stats(base_model, group_size=args.bitnet_group_size) + log0(f"step:{step}/{args.iterations} val_loss:{val_loss:.4f} val_bpb:{val_bpb:.4f} " + f"train_time:{training_time_ms:.0f}ms zero_frac:{tstats['zero_frac']:.3f} " + f"eval_tokens:{val_tok_count} eval_bytes:{val_byte_count}") + torch.cuda.synchronize() + t0 = time.perf_counter() + + if last_step: + if stop_after_step is not None and step < args.iterations: + log0(f"stopping_early: wallclock_cap train_time:{training_time_ms:.0f}ms step:{step}/{args.iterations}") + break + + elapsed_ms = training_time_ms + 1000.0 * (time.perf_counter() - t0) + scale = lr_mul(step, elapsed_ms) + + # Sequence length schedule + if args.seq_len_start > 0 and not _seq_switched: + if max_wallclock_ms is not None: + should_switch_seq = elapsed_ms >= args.seq_schedule_fraction * max_wallclock_ms + else: + should_switch_seq = step >= int(args.iterations * args.seq_schedule_fraction) + if should_switch_seq: + active_seq_len = args.train_seq_len + _seq_switched = True + torch._dynamo.reset() + train_loader = DistributedTokenLoader(args.train_files, rank, world_size, device) + log0(f"step:{step} seq_len_switch:{args.seq_len_start}->{active_seq_len}") + + # Batch size schedule + if args.batch_tokens_start > 0 and not _batch_switched: + if max_wallclock_ms is not None: + should_switch_batch = elapsed_ms >= args.batch_schedule_fraction * max_wallclock_ms + else: + should_switch_batch = step >= int(args.iterations * args.batch_schedule_fraction) + if should_switch_batch: + active_batch_tokens = args.train_batch_tokens + _batch_switched = True + log0(f"step:{step} batch_switch:{args.batch_tokens_start}->{active_batch_tokens}") + + zero_grad_all() + train_loss.zero_() + + for micro in range(grad_accum_steps): + if distributed: + model.require_backward_grad_sync = micro == grad_accum_steps - 1 + x, y = train_loader.next_batch(active_batch_tokens, active_seq_len, grad_accum_steps) + torch.compiler.cudagraph_mark_step_begin() + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + loss = model(x, y) + train_loss.add_(loss.detach()) + (loss * grad_scale).backward() + train_loss /= grad_accum_steps + + # Muon momentum warmup (skip when AdamW is the matrix optimizer) + if args.matrix_optimizer != "adamw": + frac = min(step / args.muon_momentum_warmup_steps, 1.0) if args.muon_momentum_warmup_steps > 0 else 1.0 + for g in opt_muon.param_groups: + g["momentum"] = (1 - frac) * args.muon_momentum_warmup_start + frac * args.muon_momentum + + # LR scheduling + for opt in optimizers: + for g in opt.param_groups: + g["lr"] = g["base_lr"] * scale + opt.step() + zero_grad_all() + step += 1 + approx_ms = training_time_ms + 1000.0 * (time.perf_counter() - t0) + + if args.train_log_every > 0 and step % args.train_log_every == 0: + log0(f"step:{step}/{args.iterations} loss:{train_loss.item():.4f} t:{approx_ms:.0f}ms avg:{approx_ms/step:.1f}ms") + if args.churn_log_every > 0 and step % args.churn_log_every == 0: + log0(f"step:{step} churn:{churn_fn(base_model, args.bitnet_group_size):.4f} zero:{quin_stats(base_model, args.bitnet_group_size)['zero_frac']:.3f}") + + # Wallclock cap sync + if stop_after_step is None and max_wallclock_ms is not None and step % 10 == 0: + reached_cap = approx_ms >= max_wallclock_ms + if distributed: + cap_t = torch.tensor(int(reached_cap), device=device) + dist.all_reduce(cap_t, op=dist.ReduceOp.MAX) + reached_cap = bool(cap_t.item()) + if reached_cap: + stop_after_step = step + + # --- Serialization --- + if master_process and not args.skip_training: + sd = base_model.state_dict() + if base_model.tie_embeddings: + sd.pop("lm_head.weight", None) + + log0("serialize: start") + t_qsd = time.perf_counter() + q_obj, q_stats = q_sd(sd, group_size=args.bitnet_group_size, + scale_quant_bits=args.scale_quant_bits) + qsd_time = time.perf_counter() - t_qsd + log0(f"serialize: q_sd in {qsd_time:.1f}s " + f"(quinary={q_stats['quinary_params']/1e6:.2f}M params/{q_stats['quinary_bytes']/1e6:.2f}MB, " + f"fp={q_stats['fp_params']/1e6:.2f}M params/{q_stats['fp_bytes']/1e6:.2f}MB)") + + t_save = time.perf_counter() + buf = io.BytesIO() + torch.save(q_obj, buf) + raw_bytes = len(buf.getvalue()) + log0(f"serialize: torch.save raw={raw_bytes/1e6:.2f}MB in {time.perf_counter()-t_save:.1f}s") + + t_compress = time.perf_counter() + final_blob, per_stream_audit = _serialize_per_stream_v2(q_obj, level=9) + compress_time = time.perf_counter() - t_compress + artifact_bytes = len(final_blob) + log0(per_stream_audit) + log0(f"serialize: per_stream_v2 {raw_bytes/1e6:.2f}MB -> {artifact_bytes/1e6:.2f}MB " + f"(ratio {artifact_bytes/raw_bytes:.1%}, saved {(raw_bytes-artifact_bytes)/1e6:.2f}MB) " + f"in {compress_time:.1f}s") + + with open("final_model.quinary.ptz", "wb") as f: + f.write(final_blob) + + code_bytes = len(code.encode("utf-8")) + + total = artifact_bytes + code_bytes + log0(f"artifact:{artifact_bytes/1e6:.2f}MB quinary:{q_stats['quinary_params']}({q_stats['quinary_bytes']}B) fp:{q_stats['fp_params']}({q_stats['fp_bytes']}B) code:{code_bytes}") + log0(f"budget:{total}/{16000000} ({total/1e6:.2f}/{16.00:.2f}MB) {'FITS' if total <= 16000000 else 'OVER'}") + + # --- All ranks load roundtrip weights and evaluate --- + if distributed: + dist.barrier() + + with open("final_model.quinary.ptz", "rb") as f: + loaded = _load_artifact(f.read()) + missing, unexpected = base_model.load_state_dict(deq_sd(loaded), strict=False) + # The only "missing" key we expect is `lm_head.weight` when tied, since + # q_sd drops it from the saved state-dict. Any other missing/unexpected + # key is a serialization-roundtrip bug and should fail loudly. + expected_missing = {"lm_head.weight"} if base_model.tie_embeddings else set() + assert set(missing) <= expected_missing, f"unexpected missing keys after artifact load: {set(missing) - expected_missing}" + assert not unexpected, f"unexpected extra keys after artifact load: {unexpected}" + torch._dynamo.reset() + + q_val_loss, q_val_bpb, q_tok_count, q_byte_count = eval_val( + args, model, rank, world_size, device, grad_accum_steps, + val_tokens, base_bytes_lut, has_leading_space_lut, is_boundary_token_lut) + log0(f"final_quinary_roundtrip val_loss:{q_val_loss:.4f} val_bpb:{q_val_bpb:.4f} " + f"eval_tokens:{q_tok_count} eval_bytes:{q_byte_count}") + + # --- Score-first chunk-based CTP TTT --- + if args.ttt_steps > 0: + torch.cuda.synchronize() + t_ttt = time.perf_counter() + seq_len = args.train_seq_len + # TTT_STRIDE: sliding-window step used to slice the val stream into TTT + # examples. Hardcoded at 16 (canonical sp16384 quinary submission); a + # smaller stride yields more overlapping windows but more compute. + stride = 16 + ttt_chunk_tokens = args.ttt_tokens if args.ttt_tokens > 0 else 32768 + ttt_epochs = args.ttt_steps + batch_seqs = 32 + total_tokens = val_tokens.numel() - 1 + + # Select TTT params (CTP only — quinary weights are frozen) + for p in base_model.parameters(): + p.requires_grad_(False) + ttt_params = [] + for name, p in base_model.named_parameters(): + if any(pat in name for pat in CTP): + p.requires_grad_(True) + ttt_params.append(p) + n_ttt = sum(p.numel() for p in ttt_params) + ttt_opt = torch.optim.SGD(ttt_params, lr=args.ttt_lr, momentum=0.9) + for pg in ttt_opt.param_groups: + pg["initial_lr"] = pg["lr"] + log0(f"ttt: {n_ttt} CTP params, lr={args.ttt_lr}") + + # Assign sliding windows to chunks + context_size = seq_len - stride + window_starts = [ws for ws in range(0, total_tokens, stride) + if ws + context_size < total_tokens] + num_chunks = (total_tokens + ttt_chunk_tokens - 1) // ttt_chunk_tokens + chunk_windows = [[] for _ in range(num_chunks)] + for ws in window_starts: + s = 0 if ws == 0 else context_size + ci = min((ws + s) // ttt_chunk_tokens, num_chunks - 1) + chunk_windows[ci].append(ws) + log0(f"ttt: {n_ttt} CTP params, {num_chunks} chunks, {ttt_epochs} epochs, lr={args.ttt_lr}") + + loss_sum = torch.zeros((), device=device, dtype=torch.float64) + token_count = torch.zeros((), device=device, dtype=torch.float64) + byte_count = torch.zeros((), device=device, dtype=torch.float64) + + # Recompile for TTT (find_unused_parameters needed with frozen params) + torch._dynamo.reset() + compiled_ttt = torch.compile(base_model, mode=args.compile_mode if args.compile_mode != "default" else None) + + for ci in range(num_chunks): + windows = chunk_windows[ci] + if not windows: + continue + chunk_start = ci * ttt_chunk_tokens + chunk_end = min((ci + 1) * ttt_chunk_tokens, total_tokens) + my_s = len(windows) * rank // world_size + my_e = len(windows) * (rank + 1) // world_size + my_windows = windows[my_s:my_e] + + # Phase 1: SCORE (no_grad, compiled) + base_model.eval() + with torch.no_grad(): + for bi in range(0, len(my_windows), batch_seqs): + batch_ws = my_windows[bi:bi + batch_seqs] + bsz = len(batch_ws) + x_batch = torch.zeros(bsz, seq_len, dtype=torch.int64, device=device) + y_batch = torch.zeros(bsz, seq_len, dtype=torch.int64, device=device) + wlens = [] + for i, ws in enumerate(batch_ws): + we = min(ws + seq_len, total_tokens) + wlen = we - ws + wlens.append(wlen) + chunk_tok = val_tokens[ws:we + 1].to(dtype=torch.int64, device=device) + x_batch[i, :wlen] = chunk_tok[:-1] + y_batch[i, :wlen] = chunk_tok[1:] + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + nll = compiled_ttt(x_batch, y_batch, reduction="none").detach() + for i, ws in enumerate(batch_ws): + wlen = wlens[i] + s = 0 if ws == 0 else context_size + scored_nll = nll[i, s:wlen].to(torch.float64) + loss_sum += scored_nll.sum() + token_count += float(wlen - s) + tgt, prev = y_batch[i, s:wlen], x_batch[i, s:wlen] + tb = base_bytes_lut[tgt].to(torch.float64) + tb += (has_leading_space_lut[tgt] & ~is_boundary_token_lut[prev]).to(torch.float64) + byte_count += tb.sum() + + # Phase 2: TRAIN on chunk (score-first: already scored) + if ci < num_chunks - 1 and ttt_epochs > 0: + base_model.train() + chunk_seqs = (chunk_end - chunk_start) // seq_len + if chunk_seqs > 0: + cos_mul = 0.5 * (1.0 + math.cos(math.pi * ci / max(num_chunks - 1, 1))) + for pg in ttt_opt.param_groups: + pg["lr"] = pg.get("initial_lr", pg["lr"]) * cos_mul + my_seq_s = chunk_seqs * rank // world_size + my_seq_e = chunk_seqs * (rank + 1) // world_size + for _ep in range(ttt_epochs): + for bs in range(0, my_seq_e - my_seq_s, batch_seqs): + be = min(bs + batch_seqs, my_seq_e - my_seq_s) + start_tok = chunk_start + (my_seq_s + bs) * seq_len + end_tok = chunk_start + (my_seq_s + be) * seq_len + 1 + if end_tok > val_tokens.numel(): + continue + local = val_tokens[start_tok:end_tok].to(device=device, dtype=torch.int64) + x = local[:-1].reshape(-1, seq_len) + y = local[1:].reshape(-1, seq_len) + ttt_opt.zero_grad(set_to_none=True) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + loss = compiled_ttt(x, y) + loss.backward() + if distributed: + for p in ttt_params: + if p.grad is not None: + dist.all_reduce(p.grad, op=dist.ReduceOp.AVG) + torch.nn.utils.clip_grad_norm_(ttt_params, 1.0) + ttt_opt.step() + + if master_process and ci % max(1, num_chunks // 5) == 0: + log0(f"ttt chunk:{ci+1}/{num_chunks}") + + if distributed: + for t in (loss_sum, token_count, byte_count): + dist.all_reduce(t, op=dist.ReduceOp.SUM) + ttt_val_loss = (loss_sum / token_count).item() + ttt_bpb = (ttt_val_loss / math.log(2.0)) * (token_count.item() / byte_count.item()) + for p in base_model.parameters(): + p.requires_grad_(True) + torch._dynamo.reset() + torch.cuda.synchronize() + ttt_time_ms = 1000.0 * (time.perf_counter() - t_ttt) + log0(f"ttt_eval val_loss:{ttt_val_loss:.4f} val_bpb:{ttt_bpb:.4f} " + f"time:{ttt_time_ms:.0f}ms " + f"eval_tokens:{int(token_count.item())} eval_bytes:{int(byte_count.item())}") + + if distributed: + dist.destroy_process_group() + +if __name__ == "__main__": + main() +==================================================================================================== +Python 3.13.13 | packaged by Anaconda, Inc. | (main, Apr 14 2026, 06:19:41) [GCC 14.3.0] +PyTorch 2.10.0+cu128 +--- Hyperparameters --- +activation_type=relu2 adam_eps=1e-08 adam_lr=0.05 adam_wd=0.05 batch_schedule_fraction=0.33 batch_tokens_start=0 beta1=0.9 beta2=0.95 bitnet_group_size=192 churn_log_every=0 compile_mode=default data_path=./data/canonical/datasets/fineweb10B_sp16384 embed_dim=380 embed_lr=0.6 fp_storage=True grad_clip_norm=0.0 head_lr=0.02 iterations=10000 logit_softcap=10.0 matrix_lr=0.035 matrix_optimizer=muon max_wallclock_seconds=599.0 min_lr=0.0 mlp_mult=4 model_dim=576 muon_backend_steps=3 muon_momentum=0.95 muon_momentum_warmup_start=0.85 muon_momentum_warmup_steps=500 muon_wd=0.0 num_heads=6 num_kv_heads=3 num_layers=10 qk_gain_init=5.0 rope_base=5000.0 rope_type=yarn run_id=quinary_seed42 scalar_lr=0.02 scale_quant_bits=5 seed=42 seq_len_start=0 seq_schedule_fraction=0.0 skip_training=False softcap_type=poly tie_embeddings=1 tied_embed_init_std=0.005 tied_embed_lr=0.02 tokenizer_path=./data/canonical/tokenizers/fineweb_16384_bpe.model train_batch_tokens=524288 train_log_every=1000 train_seq_len=1024 ttt_lr=0.005 ttt_steps=3 ttt_tokens=32768 val_batch_size=524288 val_loss_every=0 vocab_size=16384 warmdown_fraction=0.2 warmup_steps=5 yarn_max_len=2048 +params:52828668 L:10 d:576 h:6 kv:3 ws:8 ga:1 s:42 +warmup:1/5 +warmup:2/5 +warmup:3/5 +warmup:4/5 +warmup:5/5 +step:1000/10000 loss:3.8235 t:76844ms avg:76.8ms +step:2000/10000 loss:3.5118 t:153939ms avg:77.0ms +step:3000/10000 loss:3.4336 t:230839ms avg:76.9ms +step:4000/10000 loss:3.2842 t:307597ms avg:76.9ms +step:5000/10000 loss:3.5044 t:384327ms avg:76.9ms +step:6000/10000 loss:3.5311 t:461068ms avg:76.8ms +step:7000/10000 loss:3.4751 t:537724ms avg:76.8ms +step:7800/10000 val_loss:3.2656 val_bpb:1.1584 train_time:599124ms zero_frac:0.262 eval_tokens:37146624 eval_bytes:151078879 +stopping_early: wallclock_cap train_time:599124ms step:7800/10000 +serialize: start +serialize: q_sd in 0.1s (quinary=36.50M params/12.36MB, fp=6.90M params/7.11MB) +serialize: torch.save raw=19.53MB in 0.0s +per_stream_v2: meta=0.14MB, 63 bulk payloads (lzma=61, lrzip=2; raw=3, base5=2, base5_T=35, bitmask_T=23), qkv_splits=10, total=15.64MB +serialize: per_stream_v2 19.53MB -> 15.64MB (ratio 80.1%, saved 3.90MB) in 36.9s +artifact:15.64MB quinary:36495360(12355360B) fp:6896124(7114168B) code:79272 +budget:15714938/16000000 (15.71/16.00MB) FITS +final_quinary_roundtrip val_loss:3.2774 val_bpb:1.1626 eval_tokens:37146624 eval_bytes:151078879 +ttt: 42364 CTP params, lr=0.005 +ttt: 42364 CTP params, 1134 chunks, 3 epochs, lr=0.005 +ttt chunk:1/1134 +ttt chunk:227/1134 +ttt chunk:453/1134 +ttt chunk:679/1134 +ttt chunk:905/1134 +ttt chunk:1131/1134 +ttt_eval val_loss:3.2083 val_bpb:1.1381 time:213616ms eval_tokens:37146624 eval_bytes:151078879 diff --git a/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/quinary_seed7.txt b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/quinary_seed7.txt new file mode 100644 index 0000000000..386fd3c195 --- /dev/null +++ b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/quinary_seed7.txt @@ -0,0 +1,1689 @@ +"Quinary training script for OpenAI's Parameter Golf Challenge. Based on ternary submission by Ciprian-Florin Ifrim (24 March 2026). Quinary: {-2,-1,0,+1,+2} with base-5 packing (3 quins/byte = 2.667 bpw vs ternary 1.6 bpw)." + +import copy +import glob +import io +import math +import os +import random +import subprocess +import sys +import tempfile +import time +import lzma +from pathlib import Path +import numpy as np +import sentencepiece as spm +import torch +import torch.distributed as dist +import torch.nn.functional as F +from torch import Tensor, nn +from torch.nn.parallel import DistributedDataParallel as DDP +from flash_attn_interface import flash_attn_func + +def _e(k, d, t=str): + v = os.environ.get(k, str(d)) + if t == bool: return bool(int(v)) + return t(v) + +class Hyperparameters: + # Defaults below match the canonical SP16384 quinary submission config — + # i.e. exactly what `run.sh` passes through to torchrun. A bare + # `torchrun --standalone --nproc_per_node=8 train_gpt.py` (no env vars) + # therefore reproduces the submission. `run.sh` is still the + # documentation-of-record for the canonical config; these defaults are + # mirrored from it so the two never silently disagree. + data_path = _e("DATA_PATH", "./data/canonical/datasets/fineweb10B_sp16384") + # [0-9] prefix matches only NNNNNN-suffixed shard files, ignoring any + # sibling parallel-array files that might share the prefix. + train_files = os.path.join(data_path, "fineweb_train_[0-9]*.bin") + val_files = os.path.join(data_path, "fineweb_val_[0-9]*.bin") + tokenizer_path = _e("TOKENIZER_PATH", "./data/canonical/tokenizers/fineweb_16384_bpe.model") + run_id = os.environ.get("RUN_ID", f"run_{int(time.time())}") + seed = _e("SEED", 42, int) + compile_mode = _e("COMPILE_MODE", "default") + val_batch_size = _e("VAL_BATCH_SIZE", 524288, int) + val_loss_every = _e("VAL_LOSS_EVERY", 0, int) # 0 = no in-training val + train_log_every = _e("TRAIN_LOG_EVERY", 1000, int) + iterations = _e("ITERATIONS", 10000, int) + warmdown_fraction = _e("WARMDOWN_FRACTION", 0.2, float) + min_lr = _e("MIN_LR", 0.0, float) # floor on the LR multiplier (fraction of base LR) + warmup_steps = _e("WARMUP_STEPS", 5, int) + train_batch_tokens = _e("TRAIN_BATCH_TOKENS", 524288, int) + train_seq_len = _e("TRAIN_SEQ_LEN", 1024, int) + max_wallclock_seconds = _e("MAX_WALLCLOCK_SECONDS", 599.0, float) + vocab_size = _e("VOCAB_SIZE", 16384, int) + num_layers = _e("NUM_LAYERS", 10, int) + num_kv_heads = _e("NUM_KV_HEADS", 3, int) + model_dim = _e("MODEL_DIM", 576, int) + num_heads = _e("NUM_HEADS", 6, int) + mlp_mult = _e("MLP_MULT", 4, int) + tie_embeddings = _e("TIE_EMBEDDINGS", 1, int) + rope_base = _e("ROPE_BASE", 5000.0, float) + rope_type = _e("ROPE_TYPE", "yarn") + yarn_max_len = _e("YARN_MAX_LEN", 2048, int) + logit_softcap = _e("LOGIT_SOFTCAP", 10.0, float) + softcap_type = _e("SOFTCAP_TYPE", "poly") + tied_embed_init_std = _e("TIED_EMBED_INIT_STD", 0.005, float) + qk_gain_init = _e("QK_GAIN_INIT", 5.0, float) + activation_type = _e("ACTIVATION", "relu2") + embed_dim = _e("EMBED_DIM", 380, int) + embed_lr = _e("EMBED_LR", 0.6, float) + head_lr = _e("HEAD_LR", 0.02, float) + adam_lr = _e("ADAM_LR", 0.05, float) + adam_wd = _e("ADAM_WD", 0.05, float) + tied_embed_lr = _e("TIED_EMBED_LR", 0.02, float) + seq_len_start = _e("SEQ_LEN_START", 0, int) + seq_schedule_fraction = _e("SEQ_SCHEDULE_FRACTION", 0.0, float) + batch_tokens_start = _e("BATCH_TOKENS_START", 0, int) + batch_schedule_fraction = _e("BATCH_SCHEDULE_FRACTION", 0.33, float) + churn_log_every = _e("CHURN_LOG_EVERY", 0, int) + matrix_lr = _e("MATRIX_LR", 0.035, float) + scalar_lr = _e("SCALAR_LR", 0.02, float) + muon_momentum = _e("MUON_MOMENTUM", 0.95, float) + muon_backend_steps = _e("MUON_BACKEND_STEPS", 3, int) + muon_wd = _e("MUON_WD", 0.0, float) + matrix_optimizer = _e("MATRIX_OPTIMIZER", "muon") + muon_momentum_warmup_start = _e("MUON_MOMENTUM_WARMUP_START", 0.85, float) + muon_momentum_warmup_steps = _e("MUON_MOMENTUM_WARMUP_STEPS", 500, int) + beta1 = _e("BETA1", 0.9, float) + beta2 = _e("BETA2", 0.95, float) + adam_eps = _e("ADAM_EPS", 1e-8, float) + grad_clip_norm = _e("GRAD_CLIP_NORM", 0.0, float) + bitnet_group_size = _e("BITNET_GROUP_SIZE", 192, int) + ttt_steps = _e("TTT_STEPS", 3, int) + ttt_lr = _e("TTT_LR", 0.005, float) + ttt_tokens = _e("TTT_TOKENS", 32768, int) + skip_training = _e("SKIP_TRAINING", 0, bool) + scale_quant_bits = _e("SCALE_QUANT_BITS", 5, int) # 5-bit log-delta scale quantization + fp_storage = True # FP8 storage for non-quinary (small) tensors + +# Calibration / fp16-stored scalar+vector params adapted by score-first TTT. +# `scale_correction` was previously listed here but its gradient is blocked by +# the STE detach in QuinaryLinear.forward, so including it as a TTT target was +# a no-op. Excluded from the CTP tuple so the TTT optimizer reflects what +# actually receives gradients (~42k params, not 232k). The parameter still +# exists in the state-dict and is stored fp16 in the artifact (via the ndim<2 +# bucket); it is just not selected for TTT adaptation. +CTP = ("attn_scale","mlp_scale","resid_mix","q_gain","skip_weights","vocab_bias") + +def pack_quinary(q: Tensor): + f = (q.reshape(-1).to(torch.int8) + 2).numpy() # {-2..2} -> {0..4} + n = len(f) + p = (3 - n % 3) % 3 + if p: f = np.concatenate([f, np.zeros(p, dtype=np.int8)]) + g = f.reshape(-1, 3).astype(np.uint8) + return (g[:,0] + g[:,1]*5 + g[:,2]*25).tobytes(), n # max = 4+20+100 = 124 + +def unpack_quinary(data: bytes, n: int) -> Tensor: + v = np.frombuffer(data, dtype=np.uint8).astype(np.int16) + t = np.zeros((len(v), 3), dtype=np.int8) + for i in range(3): t[:,i] = v % 5; v //= 5 + return torch.from_numpy(t.reshape(-1)[:n].astype(np.int8) - 2) + + +def pack_quinary_bitmask(q) -> tuple[bytes, int, int]: + """Pack quinary symbols ∈ {-2..2} as three concatenated bit-planes. + + Layout (big-endian within bytes via numpy.packbits): + [zero_mask: ceil(n_total / 8) bytes — 1 bit/symbol, 1=zero] + [sign_bits: ceil(n_nonzero / 8) bytes — 1 bit/nonzero, 1=negative] + [mag2_bits: ceil(n_nonzero / 8) bytes — 1 bit/nonzero, 1=|symbol|==2] + + Returns: (concat_bytes, n_total, n_nonzero). + Each plane has homogeneous bit statistics so the downstream compressor + can model them independently rather than fighting a multimodal mixture. + """ + if isinstance(q, torch.Tensor): + a = q.reshape(-1).to(torch.int8).numpy() + else: + a = np.asarray(q, dtype=np.int8).reshape(-1) + n_total = a.size + nz_mask = (a != 0) # bool, len n_total + n_nonzero = int(nz_mask.sum()) + nz_vals = a[nz_mask] # int8, len n_nonzero + sign_bits = (nz_vals < 0).astype(np.uint8) # 1 = negative + mag2_bits = (np.abs(nz_vals) == 2).astype(np.uint8) + zero_packed = np.packbits((~nz_mask).astype(np.uint8)) # 1 bit means "this symbol is zero" + sign_packed = np.packbits(sign_bits) + mag2_packed = np.packbits(mag2_bits) + return (zero_packed.tobytes() + sign_packed.tobytes() + mag2_packed.tobytes(), + n_total, n_nonzero) + + +def unpack_quinary_bitmask(data: bytes, n_total: int, n_nonzero: int) -> Tensor: + """Inverse of pack_quinary_bitmask. Returns int8 tensor of length n_total.""" + z_len = (n_total + 7) // 8 + s_len = (n_nonzero + 7) // 8 + m_len = (n_nonzero + 7) // 8 + expected = z_len + s_len + m_len + if len(data) != expected: + raise ValueError(f"bitmask data size {len(data)} != expected {expected} " + f"(n_total={n_total}, n_nonzero={n_nonzero})") + z_bytes = np.frombuffer(data[:z_len], dtype=np.uint8) + s_bytes = np.frombuffer(data[z_len:z_len + s_len], dtype=np.uint8) + m_bytes = np.frombuffer(data[z_len + s_len:], dtype=np.uint8) + is_zero = np.unpackbits(z_bytes)[:n_total].astype(bool) + sign = np.unpackbits(s_bytes)[:n_nonzero].astype(bool) + mag2 = np.unpackbits(m_bytes)[:n_nonzero].astype(bool) + nz_vals = np.where(mag2, 2, 1).astype(np.int8) + nz_vals = np.where(sign, -nz_vals, nz_vals) + out = np.zeros(n_total, dtype=np.int8) + out[~is_zero] = nz_vals + return torch.from_numpy(out) + + +# Artifact archive: layout-aware per-stream v2 (header byte 0x03). +# +# - For each quinary tensor: screen the 4 layouts {base5, base5_T, +# bitmask, bitmask_T} by LZMA9-compressed size, then run LZMA9 vs +# lrzip-zpaq -L9 only on the winning layout. Bounded heuristic +# with an LZMA floor — *not* an exhaustive 4×2 search. +# - For c_qkv.weight: split rows into Q / K / V sub-payloads each chosen +# independently (Q, K, V have different trained distributions). +# - For other bulk fields (FP8 embeddings, large fp16 tensors): torch.save +# the value and compress with min(lzma, lrzip). +# - Robust to the seed-dependent lrzip cliff observed on full-blob +# compression: even if lrzip ZPAQ underperforms on one tensor's bytes, +# lzma takes over for that tensor specifically. +_COMPRESSOR_LZMA = 0 +_COMPRESSOR_LRZIP_ZPAQ = 1 +_COMPRESSOR_PER_STREAM_V2 = 3 + +# Threshold for treating a value as "bulk" (compressed independently). +_PER_STREAM_BULK_BYTES = 64 * 1024 + +# v2 layout IDs — what kind of body is stored in a bulk payload. +_LAYOUT_RAW = 0 # opaque bytes (torch.save output for non-quinary fields) +_LAYOUT_Q_BASE5 = 1 # base-5 packed quinary symbols, canonical row-major order +_LAYOUT_Q_BASE5_T = 2 # base-5 packed quinary symbols, transposed (column-major) +_LAYOUT_Q_BITMASK = 3 # bitmask packed (zero|sign|mag2), canonical +_LAYOUT_Q_BITMASK_T = 4 # bitmask packed, transposed +_LAYOUT_NAMES = { + _LAYOUT_RAW: "raw", + _LAYOUT_Q_BASE5: "base5", + _LAYOUT_Q_BASE5_T: "base5_T", + _LAYOUT_Q_BITMASK: "bitmask", + _LAYOUT_Q_BITMASK_T: "bitmask_T", +} + +# v2 sentinel for metadata refs (distinct string from v1 to avoid version confusion). +_BULK_SENTINEL_V2 = "__BULK_REF_V2__" + + +def _lrzip_compress_bytes(data: bytes, level: int = 9) -> bytes: + """Compress raw bytes via `lrzip -z` (ZPAQ). Returns body only (no header).""" + with tempfile.NamedTemporaryFile(delete=False) as f: + f.write(data); in_path = f.name + out_path = in_path + ".lrz" + try: + try: + subprocess.run(["lrzip", "-z", "-L", str(level), "-q", "-f", in_path], + check=True, capture_output=True) + except FileNotFoundError as e: + raise RuntimeError("lrzip binary not found; run `apt-get install lrzip`") from e + with open(out_path, "rb") as f: + return f.read() + finally: + for p in (in_path, out_path): + try: os.unlink(p) + except FileNotFoundError: pass + + +def _lrzip_decompress_bytes(body: bytes) -> bytes: + """Decompress an lrzip ZPAQ body (no header).""" + with tempfile.NamedTemporaryFile(delete=False, suffix=".lrz") as f: + f.write(body); in_path = f.name + out_path = in_path[:-4] + try: + try: + subprocess.run(["lrzip", "-d", "-q", "-f", in_path], + check=True, capture_output=True) + except FileNotFoundError as e: + raise RuntimeError("lrzip binary not found; required to load this artifact") from e + with open(out_path, "rb") as f: + return f.read() + finally: + for p in (in_path, out_path): + try: os.unlink(p) + except FileNotFoundError: pass + + +def _pick_best_compressor(data: bytes, level: int = 9, has_lrzip: bool = True) -> tuple[int, bytes]: + """Try lzma + (optionally) lrzip; return (method_id, body) for the smaller. + + This is the per-stream robustness: even if one compressor cliffs on a + given byte distribution (the seed=7 issue), the other usually doesn't. + Any failure (missing binary, runtime error, broken lrzip backend) on + lrzip is silently skipped — lzma is the always-available floor. + """ + candidates: list[tuple[int, bytes]] = [(_COMPRESSOR_LZMA, lzma.compress(data, preset=level))] + if has_lrzip: + try: + candidates.append((_COMPRESSOR_LRZIP_ZPAQ, _lrzip_compress_bytes(data, level))) + except (RuntimeError, subprocess.CalledProcessError, OSError): + pass + return min(candidates, key=lambda c: len(c[1])) + + +def _is_bulk(value) -> bool: + """Decide whether a state_dict field should be peeled off for per-stream compression.""" + if isinstance(value, (bytes, bytearray)): + return len(value) >= _PER_STREAM_BULK_BYTES + if isinstance(value, torch.Tensor): + return value.numel() * value.element_size() >= _PER_STREAM_BULK_BYTES + return False + + +def _load_artifact(blob: bytes) -> dict: + """Load a per-stream v2 archive (header byte 0x03) into a state_dict.""" + return _deserialize_per_stream_v2(blob) + + +# --------------------------------------------------------------------------- +# v2 archive: layout-aware per-stream compression +# --------------------------------------------------------------------------- + +def _gen_quinary_layout_candidates(symbols_2d: np.ndarray) -> list[tuple[int, bytes, dict]]: + """For a 2D int8 symbol matrix in {-2..2}, return a list of + (layout_id, body_bytes, layout_meta) tuples — every supported layout. + + layout_meta carries the per-layout fields needed at deserialize time + (n_quins, n_nonzero, etc.). + """ + rows, cols = symbols_2d.shape + flat = symbols_2d.reshape(-1) + transposed = symbols_2d.T.reshape(-1).copy() # contiguous transpose + + # base-5 canonical + body_b5, n_b5 = pack_quinary(torch.from_numpy(flat)) + # base-5 transposed + body_b5_t, n_b5_t = pack_quinary(torch.from_numpy(transposed)) + # bitmask canonical + body_bm, nt_bm, nz_bm = pack_quinary_bitmask(torch.from_numpy(flat)) + # bitmask transposed + body_bm_t, nt_bm_t, nz_bm_t = pack_quinary_bitmask(torch.from_numpy(transposed)) + + return [ + (_LAYOUT_Q_BASE5, body_b5, {"n_quins": n_b5}), + (_LAYOUT_Q_BASE5_T, body_b5_t, {"n_quins": n_b5_t}), + (_LAYOUT_Q_BITMASK, body_bm, {"n_total": nt_bm, "n_nonzero": nz_bm}), + (_LAYOUT_Q_BITMASK_T, body_bm_t, {"n_total": nt_bm_t, "n_nonzero": nz_bm_t}), + ] + + +def _layout_to_canonical_bytes(layout: int, body: bytes, rows: int, cols: int, + layout_meta: dict) -> bytes: + """Inverse of _gen_quinary_layout_candidates: take a body in one of the + layout encodings and return the canonical base-5-packed bytes that + `deq_sd` expects in entry["packed"].""" + if layout == _LAYOUT_Q_BASE5: + # Already canonical. + return body + if layout == _LAYOUT_Q_BASE5_T: + symbols_t = unpack_quinary(body, layout_meta["n_quins"]).numpy() + symbols = symbols_t.reshape(cols, rows).T.reshape(-1).copy() + out, _ = pack_quinary(torch.from_numpy(symbols)) + return out + if layout == _LAYOUT_Q_BITMASK: + symbols = unpack_quinary_bitmask(body, layout_meta["n_total"], layout_meta["n_nonzero"]).numpy() + out, _ = pack_quinary(torch.from_numpy(symbols)) + return out + if layout == _LAYOUT_Q_BITMASK_T: + symbols_t = unpack_quinary_bitmask(body, layout_meta["n_total"], layout_meta["n_nonzero"]).numpy() + symbols = symbols_t.reshape(cols, rows).T.reshape(-1).copy() + out, _ = pack_quinary(torch.from_numpy(symbols)) + return out + raise ValueError(f"unknown layout id {layout}") + + +def _qkv_split_sizes(name: str, rows: int, cols: int) -> tuple[int, int] | None: + """For a c_qkv weight, derive (q_rows, kv_rows) row-counts. + + Standard transformer convention: q_size = num_heads * head_dim = model_dim + (the square attention assumption). So q_rows == cols. The remaining rows + are split equally between K and V: kv_rows = (rows - cols) // 2. + + Returns None if name doesn't look like c_qkv or the ratio doesn't decompose + cleanly (in which case we don't split, just compress as one tensor). + """ + if not name.endswith("c_qkv.weight"): + return None + if rows <= cols: + return None # not a GQA shape; or single-head case where Q==K==V + extra = rows - cols # 2 * kv_rows + if extra % 2 != 0: + return None + kv_rows = extra // 2 + q_rows = cols + if q_rows + 2 * kv_rows != rows: + return None + return q_rows, kv_rows + + +def _serialize_per_stream_v2(state_dict: dict, level: int = 9) -> tuple[bytes, str]: + """v2 archive: layout-aware per-stream compression. + + For each quinary entry, generate up to 4 layout candidates + (base5, base5_T, bitmask, bitmask_T), screen them by LZMA9-compressed + size, then compress the winning layout with min(LZMA9, lrzip-zpaq). + For c_qkv weights, the row-block is split into Q/K/V sub-payloads + handled independently. All other bulk fields use the v1 raw-bytes path. + + This is a bounded-cost heuristic with an LZMA floor — *not* an + exhaustive 4×2 search. It can in principle miss a (layout, compressor) + pair where the LZMA-screen-loser would have won under lrzip; in + practice this is rare on this stack and the LZMA floor caps the + worst case at the canonical base5+LZMA encoding. + + Returns (archive_bytes, audit_string) so the caller can route the + audit line through the run's logger. + """ + from shutil import which + has_lrzip = which("lrzip") is not None + + bulk_bodies: list[tuple[int, bytes]] = [] # (compressor_method, compressed_body) + metadata: dict = {} + audit_lines: list[str] = [] + layout_counts = {n: 0 for n in _LAYOUT_NAMES.values()} + method_counts = {"lzma": 0, "lrzip": 0} + qkv_split_count = 0 + + def _store(layout: int, body: bytes) -> tuple[int, int, int]: + """Compress and store; return (payload_idx, method_id, compressed_size).""" + method, comp = _pick_best_compressor(body, level, has_lrzip) + bulk_bodies.append((method, comp)) + method_counts["lzma" if method == _COMPRESSOR_LZMA else "lrzip"] += 1 + layout_counts[_LAYOUT_NAMES[layout]] += 1 + return len(bulk_bodies) - 1, method, len(comp) + + def _best_quinary_layout(symbols_2d: np.ndarray) -> tuple[int, int, int, dict, int]: + """Generate all layout candidates, screen with lzma to pick best layout, + then compress winner with min(lzma, lrzip) for the final body. + + Cheap-screen + refine cuts serialize compute from O(layouts*compressors) + to O(layouts) lzma + 1 lrzip, keeping serialize time bounded even when + lrzip is slow on bad streams. + + Returns (chosen_layout, payload_idx, compressed_size, layout_meta, n_candidates_evaluated). + """ + cands = _gen_quinary_layout_candidates(symbols_2d) + best_layout, best_body, best_meta = None, None, None + best_lzma_size = None + for layout, body, meta in cands: + lzma_size = len(lzma.compress(body, preset=level)) + if best_lzma_size is None or lzma_size < best_lzma_size: + best_lzma_size = lzma_size + best_layout, best_body, best_meta = layout, body, meta + # Now compress the winning layout with min(lzma, lrzip) for the actual stored body. + comp_method, comp_body = _pick_best_compressor(best_body, level, has_lrzip) + bulk_bodies.append((comp_method, comp_body)) + method_counts["lzma" if comp_method == _COMPRESSOR_LZMA else "lrzip"] += 1 + layout_counts[_LAYOUT_NAMES[best_layout]] += 1 + return best_layout, len(bulk_bodies) - 1, len(comp_body), best_meta, len(cands) + + for name, entry in state_dict.items(): + if not isinstance(entry, dict): + metadata[name] = entry + continue + new_entry = dict(entry) # shallow copy + + if entry.get("type") == "quinary" and "packed" in entry: + # Reconstruct full symbol matrix from canonical packed bytes. + rows, cols = entry["shape"][0], entry["padded_cols"] + symbols_full = unpack_quinary(entry["packed"], entry["n_quins"]).numpy() + symbols_2d = symbols_full[:rows * cols].reshape(rows, cols) + + split = _qkv_split_sizes(name, rows, cols) + if split is not None: + q_rows, kv_rows = split + qkv_split_count += 1 + refs = [] + for sub_name, sub_2d in ( + ("Q", symbols_2d[:q_rows]), + ("K", symbols_2d[q_rows:q_rows + kv_rows]), + ("V", symbols_2d[q_rows + kv_rows:]), + ): + layout, idx, sz, meta, _ = _best_quinary_layout(sub_2d.copy()) + sub_rows, sub_cols = sub_2d.shape + refs.append({ + "_v2": _BULK_SENTINEL_V2, "idx": idx, "layout": layout, + "rows": sub_rows, "cols": sub_cols, "meta": meta, + }) + new_entry["packed"] = ("__QKV_SPLIT__", refs) + audit_lines.append(f" {name}: qkv_split layouts={[_LAYOUT_NAMES[r['layout']] for r in refs]}") + else: + layout, idx, sz, meta, _ = _best_quinary_layout(symbols_2d.copy()) + new_entry["packed"] = { + "_v2": _BULK_SENTINEL_V2, "idx": idx, "layout": layout, + "rows": rows, "cols": cols, "meta": meta, + } + audit_lines.append(f" {name}: {_LAYOUT_NAMES[layout]} ({sz/1e6:.3f}MB)") + + # Other bulk fields (fp8 data, fp16 large tensors, scale_delta tensors, + # etc.) — fall back to v1's torch.save+pick path. + for key, value in list(new_entry.items()): + if key == "packed": # already handled + continue + if _is_bulk(value): + buf = io.BytesIO(); torch.save(value, buf) + method, comp = _pick_best_compressor(buf.getvalue(), level, has_lrzip) + bulk_bodies.append((method, comp)) + method_counts["lzma" if method == _COMPRESSOR_LZMA else "lrzip"] += 1 + layout_counts["raw"] += 1 + new_entry[key] = { + "_v2": _BULK_SENTINEL_V2, "idx": len(bulk_bodies) - 1, + "layout": _LAYOUT_RAW, + } + metadata[name] = new_entry + + # Compress metadata. + meta_buf = io.BytesIO(); torch.save(metadata, meta_buf) + meta_compressed = lzma.compress(meta_buf.getvalue(), preset=level) + + # Assemble archive (header 0x03). + out = bytearray() + out.append(_COMPRESSOR_PER_STREAM_V2) + out.extend(len(meta_compressed).to_bytes(4, "little")) + out.extend(meta_compressed) + out.extend(len(bulk_bodies).to_bytes(4, "little")) + for method, body in bulk_bodies: + out.append(method) + out.extend(len(body).to_bytes(4, "little")) + out.extend(body) + + layout_summary = ", ".join(f"{n}={c}" for n, c in layout_counts.items() if c) + audit = (f"per_stream_v2: meta={len(meta_compressed)/1e6:.2f}MB, " + f"{len(bulk_bodies)} bulk payloads " + f"(lzma={method_counts['lzma']}, lrzip={method_counts['lrzip']}; " + f"{layout_summary}), qkv_splits={qkv_split_count}, " + f"total={len(out)/1e6:.2f}MB") + return bytes(out), audit + + +def _deserialize_per_stream_v2(blob: bytes) -> dict: + """Inverse of _serialize_per_stream_v2. Returns a state_dict whose quinary + entries have canonical entry["packed"] bytes — `deq_sd` is unaware of the + archive layer.""" + cursor = 0 + assert blob[cursor] == _COMPRESSOR_PER_STREAM_V2, \ + f"expected v2 header 0x03, got {blob[0]:#x}" + cursor += 1 + meta_size = int.from_bytes(blob[cursor:cursor + 4], "little"); cursor += 4 + meta_blob = blob[cursor:cursor + meta_size]; cursor += meta_size + metadata = torch.load(io.BytesIO(lzma.decompress(meta_blob)), + map_location="cpu", weights_only=False) + n_payloads = int.from_bytes(blob[cursor:cursor + 4], "little"); cursor += 4 + decompressed: list[bytes] = [] + for _ in range(n_payloads): + method = blob[cursor]; cursor += 1 + size = int.from_bytes(blob[cursor:cursor + 4], "little"); cursor += 4 + body = blob[cursor:cursor + size]; cursor += size + if method == _COMPRESSOR_LZMA: + decompressed.append(lzma.decompress(body)) + elif method == _COMPRESSOR_LRZIP_ZPAQ: + decompressed.append(_lrzip_decompress_bytes(body)) + else: + raise ValueError(f"unknown compressor method {method:#x}") + + def _is_v2_ref(v): + return isinstance(v, dict) and v.get("_v2") == _BULK_SENTINEL_V2 + + for name, entry in metadata.items(): + if not isinstance(entry, dict): + continue + for key, value in list(entry.items()): + if _is_v2_ref(value): + if value["layout"] == _LAYOUT_RAW: + entry[key] = torch.load(io.BytesIO(decompressed[value["idx"]]), + map_location="cpu", weights_only=False) + else: + entry[key] = _layout_to_canonical_bytes( + value["layout"], decompressed[value["idx"]], + value["rows"], value["cols"], value["meta"]) + elif (isinstance(value, tuple) and len(value) == 2 + and value[0] == "__QKV_SPLIT__"): + # Reconstruct each part's symbol matrix, concat along row dim, + # repack as canonical base-5. + refs = value[1] + sub_symbols = [] + for ref in refs: + canonical_body = _layout_to_canonical_bytes( + ref["layout"], decompressed[ref["idx"]], + ref["rows"], ref["cols"], ref["meta"]) + n = ref["rows"] * ref["cols"] + sub_syms = unpack_quinary(canonical_body, n).numpy()[:n] + sub_symbols.append(sub_syms.reshape(ref["rows"], ref["cols"])) + full = np.concatenate(sub_symbols, axis=0).reshape(-1).copy() + entry[key], _ = pack_quinary(torch.from_numpy(full)) + return metadata + + +def _quant_scale(scale_fp16: Tensor, bits: int) -> dict: + """Quantize per-group scales to bits-bit log-deltas. Returns dict with + `scale_anchor` (fp16), `scale_step` (fp16), `scale_delta` (packed bytes + or int8 tensor), plus `scale_bits` to dispatch in deq_sd.""" + scale_f = scale_fp16.float().reshape(-1) + n_groups = scale_f.numel() + log_scale = torch.log2(scale_f.clamp(min=1e-12)) + lo, hi = float(log_scale.min().item()), float(log_scale.max().item()) + anchor = (lo + hi) / 2.0 + spread = hi - lo + levels = 1 << bits + half = levels // 2 + step = max(spread / max(levels - 1, 1), 1e-9) + delta = ((log_scale - anchor) / step).round().clamp(-half, half - 1).to(torch.int32) + if bits == 4: + d = (delta + half).to(torch.uint8).numpy() + if len(d) % 2: + d = np.concatenate([d, np.zeros(1, dtype=np.uint8)]) + packed = (d[::2] | (d[1::2] << 4)).astype(np.uint8) + delta_stored = torch.from_numpy(packed.copy()) + elif bits == 5: + delta_stored = (delta + half).to(torch.uint8) + else: # 8 + delta_stored = delta.to(torch.int8) + return { + "scale_anchor": torch.tensor(anchor, dtype=torch.float16), + "scale_step": torch.tensor(step, dtype=torch.float16), + "scale_delta": delta_stored, + "scale_n_groups": n_groups, + "scale_bits": bits, + } + +def _dequant_scale(entry: dict) -> Tensor: + """Reconstruct fp16 per-group scales from the quantized representation.""" + bits = int(entry["scale_bits"]) + half = (1 << bits) // 2 + n_groups = int(entry["scale_n_groups"]) + if bits == 4: + packed = entry["scale_delta"].to(torch.uint8).numpy() + low = packed & 0x0F + high = (packed >> 4) & 0x0F + d = np.empty(packed.size * 2, dtype=np.uint8) + d[0::2] = low + d[1::2] = high + delta = torch.from_numpy(d[:n_groups].astype(np.int32) - half) + elif bits == 5: + delta = entry["scale_delta"].to(torch.int32) - half + else: # 8 + delta = entry["scale_delta"].to(torch.int32) + anchor = entry["scale_anchor"].float() + step = entry["scale_step"].float() + return (2.0 ** (anchor + delta.float() * step)).to(torch.float16) + + +def q_sd(state_dict: dict, group_size: int = 64, + scale_quant_bits: int = 0) -> tuple[dict, dict]: + quantized = {} + stats = {"quinary_params": 0, "quinary_bytes": 0, "fp_params": 0, "fp_bytes": 0} + for name, tensor in state_dict.items(): + t = tensor.detach().cpu().float().contiguous() + t_orig_shape = list(t.shape) + if t.ndim == 3: + t = t.reshape(t.shape[0], -1) + is_quantized_candidate = ( + t.ndim == 2 and t.numel() > 65_536 + and "tok_emb" not in name and "lm_head" not in name and "embed_proj" not in name + ) + if is_quantized_candidate: + pad = (group_size - t.shape[1] % group_size) % group_size + t_padded = F.pad(t, (0, pad)) if pad > 0 else t + t_grouped = t_padded.reshape(-1, group_size) + scale = t_grouped.abs().mean(-1, keepdim=True).clamp(min=1e-8).half().float() + q = (t_grouped / scale).round().clamp(-2, 2).to(torch.int8) + + packed_bytes, n_packed = pack_quinary(q) + stats["quinary_params"] += t.numel() + + entry = { + "type": "quinary", "packed": packed_bytes, + "shape": list(t.shape), "padded_cols": t_padded.shape[1], + "group_size": group_size, "n_quins": n_packed, + "orig_shape": t_orig_shape, + } + if scale_quant_bits and scale_quant_bits in (4, 5, 8): + sq = _quant_scale(scale.half().squeeze(-1), scale_quant_bits) + entry.update(sq) + stats["quinary_bytes"] += len(packed_bytes) + sq["scale_delta"].numel() + 4 # +4 for anchor+step + else: + entry["scale"] = scale.half().squeeze(-1) + stats["quinary_bytes"] += len(packed_bytes) + scale.numel() * 2 + quantized[name] = entry + elif t.ndim == 2: + quantized[name] = {"type": "fp8", "data": t.to(torch.float8_e4m3fn)} + stats["fp_params"] += t.numel() + stats["fp_bytes"] += t.numel() + else: + quantized[name] = {"type": "fp16", "data": t.half()} + stats["fp_params"] += t.numel() + stats["fp_bytes"] += t.numel() * 2 + return quantized, stats + +def deq_sd(quantized: dict, target_dtype=torch.bfloat16): + out = {} + for name, entry in quantized.items(): + if entry["type"] == "quinary": + q = unpack_quinary(entry["packed"], entry["n_quins"]) + q = q.float().reshape(-1, entry["group_size"]) + if "scale_bits" in entry: + scale = _dequant_scale(entry).float().unsqueeze(-1) + else: + scale = entry["scale"].float().unsqueeze(-1) + q_absmean = q.abs().mean(-1, keepdim=True).clamp(min=1e-8) + t = (q * (scale / q_absmean)).reshape(-1, entry["padded_cols"]) + shape = entry["shape"] + result = t[:shape[0], :shape[1]].to(target_dtype) + orig = entry.get("orig_shape") + out[name] = result.reshape(orig).contiguous() if orig and orig != shape else result.contiguous() + elif entry["type"] == "fp8": + out[name] = entry["data"].to(torch.float32).to(target_dtype).contiguous() + else: + out[name] = entry["data"].to(target_dtype).contiguous() + return out + +def quin_stats(model: nn.Module, group_size: int = 64): + total = zeros = 0 + with torch.no_grad(): + for name, p in model.named_parameters(): + if p.ndim == 2 and ("weight" in name or "prototypes" in name) and p.shape[0] > 1 and p.numel() % group_size == 0: + w = p.detach().float().reshape(-1, group_size) + scale = w.abs().mean(-1, keepdim=True).clamp(min=1e-8).half().float() + q = (w / scale).round().clamp(-2, 2) + zeros += int((q == 0).sum().item()) + total += int(q.numel()) + return {"zero_frac": zeros / max(total, 1), "total_weights": total} + +_prev_committed: dict = {} + +def churn_fn(model: nn.Module, group_size: int = 64): + global _prev_committed + total = flipped = 0 + with torch.no_grad(): + for name, p in model.named_parameters(): + if p.ndim == 2 and ("weight" in name or "prototypes" in name) and p.shape[0] > 1 and p.numel() % group_size == 0: + w = p.detach().float().reshape(-1, group_size) + scale = w.abs().mean(-1, keepdim=True).clamp(min=1e-8).half().float() + q = (w / scale).round().clamp(-2, 2).cpu().numpy() + if name in _prev_committed: + flipped += int(np.sum(q != _prev_committed[name])) + total += q.size + _prev_committed[name] = q + return flipped / max(total, 1) + +def ns_orth(G: Tensor, steps: int = 10, eps: float = 1e-7) -> Tensor: + a, b, c = (3.4445, -4.7750, 2.0315) + X = G.bfloat16() + X /= X.norm() + eps + transposed = G.size(0) > G.size(1) + if transposed: + X = X.T + for _ in range(steps): + A = X @ X.T + B = b * A + c * A @ A + X = a * X + B @ X + return X.T if transposed else X + +class Muon(torch.optim.Optimizer): + def __init__(self, params, lr: float, momentum: float, backend_steps: int, nesterov: bool = True, wd: float = 0.0): + super().__init__(params, dict(lr=lr, momentum=momentum, backend_steps=backend_steps, nesterov=nesterov, wd=wd)) + + @torch.no_grad() + def step(self, closure=None): + loss = None + if closure is not None: + with torch.enable_grad(): + loss = closure() + distributed = dist.is_available() and dist.is_initialized() + world_size = dist.get_world_size() if distributed else 1 + rank = dist.get_rank() if distributed else 0 + for group in self.param_groups: + params = group["params"] + if not params: + continue + lr, momentum = group["lr"], group["momentum"] + backend_steps, nesterov = group["backend_steps"], group["nesterov"] + total_params = sum(int(p.numel()) for p in params) + updates_flat = torch.zeros(total_params, device=params[0].device, dtype=torch.bfloat16) + curr = 0 + for i, p in enumerate(params): + if i % world_size == rank and p.grad is not None: + g = p.grad + state = self.state[p] + if "momentum_buffer" not in state: + state["momentum_buffer"] = torch.zeros_like(g) + buf = state["momentum_buffer"] + buf.mul_(momentum).add_(g) + if nesterov: + g = g.add(buf, alpha=momentum) + g = F.rms_norm(g.float(), (g.size(-1),)).bfloat16() + g = ns_orth(g, steps=backend_steps) + g *= max(1, g.size(0) / g.size(1)) ** 0.5 + updates_flat[curr:curr + p.numel()] = g.reshape(-1) + curr += p.numel() + if distributed: + dist.all_reduce(updates_flat, op=dist.ReduceOp.SUM) + wd = group.get("wd", 0.0) + curr = 0 + for p in params: + g = updates_flat[curr : curr + p.numel()].view_as(p).to(dtype=p.dtype) + if wd > 0: + p.mul_(1 - lr * wd) + p.add_(g, alpha=-lr) + curr += p.numel() + return loss + +def ld_shard(file: Path) -> Tensor: + header_bytes = 256 * np.dtype(" Tensor: + chunks = [] + remaining = n + while remaining > 0: + avail = self.tokens.numel() - self.pos + if avail <= 0: + self._advance_file() + continue + k = min(remaining, avail) + chunks.append(self.tokens[self.pos:self.pos + k]) + self.pos += k + remaining -= k + return chunks[0] if len(chunks) == 1 else torch.cat(chunks) + +class DistributedTokenLoader: + def __init__(self, pattern: str, rank: int, world_size: int, device: torch.device): + self.rank, self.world_size, self.device = rank, world_size, device + self.stream = TokenStream(pattern) + + def next_batch(self, global_tokens: int, seq_len: int, grad_accum_steps: int) -> tuple[Tensor, Tensor]: + local_tokens = global_tokens // (self.world_size * grad_accum_steps) + per_rank_span = local_tokens + 1 + chunk = self.stream.take(per_rank_span * self.world_size) + start = self.rank * per_rank_span + local = chunk[start:start + per_rank_span].pin_memory().to(self.device, non_blocking=True).to(torch.int64) + x = local[:-1].reshape(-1, seq_len) + y = local[1:].reshape(-1, seq_len) + return x, y + +class RMSNorm(nn.Module): + def __init__(self, eps: float | None = None): + super().__init__() + self.eps = eps + + def forward(self, x: Tensor) -> Tensor: + return F.rms_norm(x, (x.size(-1),), eps=self.eps) + +def apply_fp8_ste(w: Tensor) -> Tensor: + w_sim = w.to(torch.float8_e4m3fn).to(w.dtype) + return (w_sim - w).detach() + w + +class QATLinear(nn.Linear): + def forward(self, x: Tensor) -> Tensor: + w_qat = apply_fp8_ste(self.weight) + return F.linear(x, w_qat.to(x.dtype), self.bias.to(x.dtype) if self.bias is not None else None) + +class QATEmbedding(nn.Embedding): + def forward(self, input: Tensor) -> Tensor: + w_qat = apply_fp8_ste(self.weight) + return F.embedding(input, w_qat, self.padding_idx, self.max_norm, + self.norm_type, self.scale_grad_by_freq, self.sparse) + +class QuinaryLinear(nn.Linear): + def __init__(self, in_features, out_features, bias=False, group_size=64): + super().__init__(in_features, out_features, bias=bias) + self.group_size = group_size + num_groups = (in_features * out_features) // group_size + # Inert by design: the STE detach below blocks gradients to + # `scale_correction`. Kept as a fp32 buffer at value 1.0 for + # backwards-compatibility with the ternary-base state-dict layout. + # An attempt to fix the STE so this parameter receives gradients + # was tested 2026-05-01 (commit b9c…) and showed a small + # training-time regression with no TTT benefit, so reverted. + self.scale_correction = nn.Parameter(torch.ones(num_groups, dtype=torch.float32)) + + def forward(self, x: Tensor) -> Tensor: + w = self.weight.bfloat16() + g = self.group_size + w_g = w.reshape(-1, g) + scale = w_g.abs().mean(-1, keepdim=True).clamp(min=1e-8) * self.scale_correction.to(w.dtype).unsqueeze(-1) + q = (w_g / scale).round().clamp(-2, 2) + w_quantized = w + ((q * scale).reshape(w.shape) - w).detach() + return F.linear(x, w_quantized, + self.bias.to(x.dtype) if self.bias is not None else None) + + +class NormedQuinaryLinear(QuinaryLinear): + def forward(self, x: Tensor) -> Tensor: + return super().forward(F.rms_norm(x, (x.size(-1),))) + +def restore_low_dim_params_to_fp32(module: nn.Module) -> None: + with torch.no_grad(): + for name, param in module.named_parameters(): + if (param.ndim < 2 or any(p in name for p in CTP)) and param.dtype != torch.float32: + param.data = param.data.float() + +class Rotary(nn.Module): + def __init__(self, dim: int, base: float = 10000.0, no_cache: bool = False, + rope_type: str = "rope", yarn_max_len: int = 4096, train_seq_len: int = 1024): + super().__init__() + self.no_cache = no_cache + inv_freq = 1.0 / (base ** (torch.arange(0, dim, 2, dtype=torch.float32) / dim)) + if rope_type == "yarn": + scale = train_seq_len / yarn_max_len + freq_idx = torch.arange(0, dim, 2, dtype=torch.float32) + ramp = torch.clamp((freq_idx / dim - 0.25) / 0.75, 0.0, 1.0) + inv_freq = inv_freq / (ramp * (1.0 / scale - 1.0) + 1.0) + self.register_buffer("inv_freq", inv_freq, persistent=False) + self._seq_len_cached = 0 + self._cos_cached: Tensor | None = None + self._sin_cached: Tensor | None = None + + def forward(self, seq_len, device, dtype): + if self.no_cache: + t = torch.arange(seq_len, device=device, dtype=self.inv_freq.dtype) + freqs = torch.outer(t, self.inv_freq.to(device)) + return freqs.cos()[None, :, None, :].to(dtype=dtype), freqs.sin()[None, :, None, :].to(dtype=dtype) + if ( + self._cos_cached is None + or self._sin_cached is None + or self._seq_len_cached != seq_len + or self._cos_cached.device != device + ): + t = torch.arange(seq_len, device=device, dtype=self.inv_freq.dtype) + freqs = torch.outer(t, self.inv_freq.to(device)) + self._cos_cached = freqs.cos()[None, :, None, :] + self._sin_cached = freqs.sin()[None, :, None, :] + self._seq_len_cached = seq_len + return self._cos_cached.to(dtype=dtype), self._sin_cached.to(dtype=dtype) + +def apply_rotary_emb(x: Tensor, cos: Tensor, sin: Tensor) -> Tensor: + half = x.size(-1) // 2 + x1, x2 = x[..., :half], x[..., half:] + return torch.cat((x1 * cos + x2 * sin, x1 * (-sin) + x2 * cos), dim=-1) + +class CausalSelfAttention(nn.Module): + def __init__(self, dim, num_heads, num_kv_heads, rope_base, qk_gain_init, + group_size=64, no_cache=False, rope_type="rope", + yarn_max_len=4096, train_seq_len=1024): + super().__init__() + self.num_heads, self.num_kv_heads = num_heads, num_kv_heads + self.head_dim = dim // num_heads + self.q_size = self.num_heads * self.head_dim + self.kv_size = self.num_kv_heads * self.head_dim + + self.c_qkv = QuinaryLinear(dim, self.q_size + 2 * self.kv_size, bias=False, group_size=group_size) + self.proj = NormedQuinaryLinear(dim, dim, bias=False, group_size=group_size) + self.proj._zero_init = True + self.q_gain = nn.Parameter(torch.full((num_heads,), qk_gain_init, dtype=torch.float32)) + self.rotary = Rotary(self.head_dim, base=rope_base, no_cache=no_cache, + rope_type=rope_type, yarn_max_len=yarn_max_len, + train_seq_len=train_seq_len) + + def forward(self, x: Tensor) -> Tensor: + bsz, seqlen, dim = x.shape + qkv_out = self.c_qkv(x) + q_out, k_out, v_out = qkv_out.split([self.q_size, self.kv_size, self.kv_size], dim=-1) + q = q_out.reshape(bsz, seqlen, self.num_heads, self.head_dim) + k = k_out.reshape(bsz, seqlen, self.num_kv_heads, self.head_dim) + v = v_out.reshape(bsz, seqlen, self.num_kv_heads, self.head_dim) + q, k = F.rms_norm(q, (q.size(-1),)), F.rms_norm(k, (k.size(-1),)) + cos, sin = self.rotary(seqlen, x.device, q.dtype) + q, k = apply_rotary_emb(q, cos, sin), apply_rotary_emb(k, cos, sin) + q = q * self.q_gain.to(dtype=q.dtype)[None, None, :, None] + y = flash_attn_func(q.contiguous(), k.contiguous(), v.contiguous(), causal=True) + y = y.reshape(bsz, seqlen, dim) + return self.proj(y) + +class MLP(nn.Module): + def __init__(self, dim, mlp_mult, group_size=64, activation="swiglu"): + super().__init__() + hidden = mlp_mult * dim + self.activation = activation + if activation == "swiglu": + self.gate_up = QuinaryLinear(dim, hidden * 2, bias=False, group_size=group_size) + else: + self.fc = QuinaryLinear(dim, hidden, bias=False, group_size=group_size) + self.proj = NormedQuinaryLinear(hidden, dim, bias=False, group_size=group_size) + self.proj._zero_init = True + + def forward(self, x: Tensor) -> Tensor: + if self.activation == "swiglu": + gu = self.gate_up(x) + gate, up = gu.chunk(2, dim=-1) + return self.proj(F.silu(gate) * up) + elif self.activation == "relu": + return self.proj(torch.relu(self.fc(x))) + elif self.activation == "leaky_relu": + return self.proj(F.leaky_relu(self.fc(x), negative_slope=0.01)) + elif self.activation == "leaky_relu2": + return self.proj(F.leaky_relu(self.fc(x), negative_slope=0.5).square()) + else: # relu2 + return self.proj(torch.relu(self.fc(x)).square()) + +class Block(nn.Module): + def __init__(self, dim: int, num_heads: int, num_kv_heads: int, mlp_mult: int, + rope_base: float, qk_gain_init: float, group_size: int=64, + activation: str="swiglu", no_cache: bool=False, + rope_type: str="rope", yarn_max_len: int=4096, + train_seq_len: int=1024): + super().__init__() + self.attn_norm = RMSNorm() + self.mlp_norm = RMSNorm() + self.attn = CausalSelfAttention(dim, num_heads, num_kv_heads, rope_base, qk_gain_init, + group_size, no_cache, rope_type, yarn_max_len, train_seq_len) + self.mlp = MLP(dim, mlp_mult, group_size, activation) + self.attn_scale = nn.Parameter(torch.ones(dim, dtype=torch.float32)) + self.mlp_scale = nn.Parameter(torch.ones(dim, dtype=torch.float32)) + self.resid_mix = nn.Parameter(torch.stack((torch.ones(dim), torch.zeros(dim))).float()) + + def forward(self, x: Tensor, x0: Tensor) -> Tensor: + mix = self.resid_mix.to(dtype=x.dtype) + x_in = mix[0] * x + mix[1] * x0 + n = self.attn_norm(x_in) + x = x_in + self.attn_scale.to(dtype=x_in.dtype) * self.attn(n) + x = x + self.mlp_scale.to(dtype=x.dtype) * self.mlp(self.mlp_norm(x)) + return x + +class GPT(nn.Module): + def __init__(self, vocab_size, num_layers, model_dim, num_heads, num_kv_heads, mlp_mult, + tie_embeddings, tied_embed_init_std, logit_softcap, rope_base, qk_gain_init, + group_size: int = 64, activation: str = "swiglu", + embed_dim: int = 0, softcap_type: str="poly", no_cache: bool=False, + rope_type: str="rope", yarn_max_len: int=4096, train_seq_len: int=1024): + super().__init__() + self.tie_embeddings = tie_embeddings + self.logit_softcap = logit_softcap + self.softcap_type = softcap_type + self.embed_dim = embed_dim if embed_dim > 0 else model_dim + self.tok_emb = QATEmbedding(vocab_size, self.embed_dim) + self.embed_proj = QATLinear(self.embed_dim, model_dim, bias=False) if self.embed_dim != model_dim else None + self.embed_proj_rev = QATLinear(model_dim, self.embed_dim, bias=False) if self.embed_dim != model_dim else None + + self.blocks = nn.ModuleList([ + Block(model_dim, num_heads, num_kv_heads, mlp_mult, rope_base, qk_gain_init, + group_size, activation, no_cache, rope_type, yarn_max_len, train_seq_len) + for _ in range(num_layers) + ]) + + # U-Net split: first half encoder, second half decoder, decoder layers + # add a learned-weighted skip from the symmetric encoder layer. + self.num_encoder_layers = num_layers // 2 + self.num_decoder_layers = num_layers - self.num_encoder_layers + self.num_skip_weights = min(self.num_encoder_layers, self.num_decoder_layers) + self.skip_weights = nn.Parameter(torch.ones(self.num_skip_weights, model_dim, dtype=torch.float32)) + + self.final_norm = RMSNorm() + self.lm_head = QATLinear(model_dim, vocab_size, bias=False) + self.lm_head._zero_init = True + if tie_embeddings: + self.lm_head.weight.requires_grad_(False) + + self.vocab_bias = nn.Parameter(torch.zeros(vocab_size, dtype=torch.float32)) + self._init_weights(tied_embed_init_std) + + def _init_weights(self, tied_embed_init_std: float) -> None: + if self.tie_embeddings: + nn.init.normal_(self.tok_emb.weight, mean=0.0, std=tied_embed_init_std) + for module in self.modules(): + if isinstance(module, QuinaryLinear) and not getattr(module, "_zero_init", False): + nn.init.normal_(module.weight, mean=0.0, std=0.02) + elif isinstance(module, nn.Linear) and getattr(module, "_zero_init", False): + nn.init.zeros_(module.weight) + + def _compute_logits(self, x: Tensor) -> Tensor: + if self.tie_embeddings: + proj = self.embed_proj_rev(x) if self.embed_proj_rev is not None else x + logits_raw = F.linear(proj, self.tok_emb.weight.to(x.dtype)) + else: + logits_raw = self.lm_head(x) + return logits_raw + self.vocab_bias.to(x.dtype) + + def _softcap(self, logits: Tensor) -> Tensor: + s = self.logit_softcap + if self.softcap_type == "tanh": + return s * torch.tanh(logits / s) + x_sc = torch.clamp(logits / s, -2.0, 2.0) + x2 = x_sc * x_sc + return s * torch.clamp(x_sc * (1.0 - x2 / 3.0 + x2 * x2 / 15.0), -1.0, 1.0) + + def forward(self, input_ids: Tensor, target_ids: Tensor, reduction: str = "mean") -> Tensor: + x = self.tok_emb(input_ids).float() + if self.embed_proj is not None: + x = self.embed_proj(x) + x = F.rms_norm(x, (x.size(-1),)) + x0 = x + + # U-Net encoder/decoder with skip connections. + skips = [] + for vi in range(self.num_encoder_layers): + x = self.blocks[vi](x, x0) + skips.append(x) + for vi in range(self.num_decoder_layers): + if vi < self.num_skip_weights: + x = x + self.skip_weights[vi].to(dtype=x.dtype) * skips.pop() + x = self.blocks[self.num_encoder_layers + vi](x, x0) + + x_normed = self.final_norm(x) + x_flat = x_normed.reshape(-1, x_normed.size(-1)) + targets = target_ids.reshape(-1) + logits = self._softcap(self._compute_logits(x_flat)) + + if reduction == "none": + return F.cross_entropy(logits.float(), targets, reduction="none").reshape(input_ids.shape) + + # Fused CE: single logsumexp; z-loss only during training (not eval BPB). + logits_f = logits.float() + lse = torch.logsumexp(logits_f, dim=-1) + target_logits = logits_f.gather(1, targets.unsqueeze(1)).squeeze(1) + main_loss = (lse - target_logits).mean() + if self.training: + main_loss = main_loss + 1e-4 * (lse ** 2).mean() + return main_loss + + +def build_luts(sp, vocab_size: int, device: torch.device): + sp_vocab_size = int(sp.vocab_size()) + table_size = max(sp_vocab_size, vocab_size) + base_bytes_np = np.zeros((table_size,), dtype=np.int16) + has_leading_space_np = np.zeros((table_size,), dtype=np.bool_) + is_boundary_token_np = np.ones((table_size,), dtype=np.bool_) + for token_id in range(sp_vocab_size): + if sp.is_control(token_id) or sp.is_unknown(token_id) or sp.is_unused(token_id): + continue + is_boundary_token_np[token_id] = False + if sp.is_byte(token_id): + base_bytes_np[token_id] = 1 + continue + piece = sp.id_to_piece(token_id) + if piece.startswith("\u2581"): + has_leading_space_np[token_id] = True + piece = piece[1:] + base_bytes_np[token_id] = len(piece.encode("utf-8")) + return ( + torch.tensor(base_bytes_np, dtype=torch.int16, device=device), + torch.tensor(has_leading_space_np, dtype=torch.bool, device=device), + torch.tensor(is_boundary_token_np, dtype=torch.bool, device=device), + ) + +def ld_val(pattern, seq_len, max_tok=int(os.environ.get("VAL_MAX_TOKENS", 0))): + files = sorted(glob.glob(pattern)) + assert files, f"No files: {pattern}" + tok = torch.cat([ld_shard(Path(p)) for p in files]).contiguous() + if max_tok > 0: tok = tok[:max_tok + 1] + u = ((tok.numel() - 1) // seq_len) * seq_len + return tok[:u + 1] + + +def eval_val(args, model, rank, world_size, device, grad_accum_steps, val_tokens, + base_bytes_lut, has_leading_space_lut, is_boundary_token_lut): + local_batch_tokens = args.val_batch_size // (world_size * grad_accum_steps) + local_batch_seqs = max(1, local_batch_tokens // args.train_seq_len) + total_seqs = (val_tokens.numel() - 1) // args.train_seq_len + seq_start = (total_seqs * rank) // world_size + seq_end = (total_seqs * (rank + 1)) // world_size + loss_sum = torch.zeros((), device=device, dtype=torch.float64) + token_count = torch.zeros((), device=device, dtype=torch.float64) + byte_count = torch.zeros((), device=device, dtype=torch.float64) + model.eval() + with torch.no_grad(): + for batch_start in range(seq_start, seq_end, local_batch_seqs): + batch_end = min(batch_start + local_batch_seqs, seq_end) + raw_start = batch_start * args.train_seq_len + raw_end = batch_end * args.train_seq_len + 1 + local = val_tokens[raw_start:raw_end].to(device=device, dtype=torch.int64) + x, y = local[:-1].reshape(-1, args.train_seq_len), local[1:].reshape(-1, args.train_seq_len) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + batch_loss = model(x, y).detach() + n = float(y.numel()) + loss_sum += batch_loss.to(torch.float64) * n + token_count += n + prev_ids, tgt_ids = x.reshape(-1), y.reshape(-1) + tok_bytes = base_bytes_lut[tgt_ids].to(torch.int16) + tok_bytes += (has_leading_space_lut[tgt_ids] & ~is_boundary_token_lut[prev_ids]).to(torch.int16) + byte_count += tok_bytes.to(torch.float64).sum() + if dist.is_available() and dist.is_initialized(): + for t in (loss_sum, token_count, byte_count): + dist.all_reduce(t, op=dist.ReduceOp.SUM) + val_loss = loss_sum / token_count + bpb = (val_loss.item() / math.log(2.0)) * (token_count.item() / byte_count.item()) + model.train() + return float(val_loss.item()), float(bpb), int(token_count.item()), int(byte_count.item()) + +def main() -> None: + args = Hyperparameters() + code = Path(__file__).read_text(encoding="utf-8") + + if args.matrix_optimizer != "adamw": + global ns_orth + ns_orth = torch.compile(ns_orth) + + distributed = "RANK" in os.environ and "WORLD_SIZE" in os.environ + rank = int(os.environ.get("RANK", "0")) + world_size = int(os.environ.get("WORLD_SIZE", "1")) + local_rank = int(os.environ.get("LOCAL_RANK", "0")) + grad_accum_steps = max(1, 8 // world_size) + grad_scale = 1.0 / grad_accum_steps + + if not torch.cuda.is_available(): + raise RuntimeError("CUDA is required") + device = torch.device("cuda", local_rank) + torch.cuda.set_device(device) + if distributed: + dist.init_process_group(backend="nccl", device_id=device) + dist.barrier() + master_process = rank == 0 + torch.backends.cuda.matmul.allow_tf32 = True + torch.backends.cudnn.allow_tf32 = True + + os.makedirs("logs/cuda/", exist_ok=True) + logfile = f"logs/cuda/{args.run_id}.txt" if master_process else None + if master_process: + print(logfile) + # Truncate any pre-existing logfile from a previous run with the same + # RUN_ID so the per-RUN_ID log file isn't append-stacked across reruns. + if logfile: + open(logfile, "w", encoding="utf-8").close() + + def log0(msg: str, console: bool = True) -> None: + if not master_process: + return + if console: + print(msg) + if logfile: + with open(logfile, "a", encoding="utf-8") as f: + print(msg, file=f) + + log0(code, console=False) + log0("=" * 100, console=False) + + log0(f"Python {sys.version}", console=False) + log0(f"PyTorch {torch.__version__}", console=False) + + random.seed(args.seed) + np.random.seed(args.seed) + torch.manual_seed(args.seed) + torch.cuda.manual_seed_all(args.seed) + + sp = spm.SentencePieceProcessor(model_file=args.tokenizer_path) + # Guard against an obvious tokenizer/vocab-size mismatch that would + # silently produce a wrong byte-count LUT (and therefore a wrong BPB + # denominator) without crashing. + assert sp.vocab_size() == args.vocab_size, ( + f"tokenizer vocab_size ({sp.vocab_size()}) != args.vocab_size " + f"({args.vocab_size}); check TOKENIZER_PATH / VOCAB_SIZE") + val_tokens = ld_val(args.val_files, args.train_seq_len) + base_bytes_lut, has_leading_space_lut, is_boundary_token_lut = build_luts( + sp, args.vocab_size, device) + + # --- Model --- + base_model = GPT( + vocab_size=args.vocab_size, num_layers=args.num_layers, model_dim=args.model_dim, + num_heads=args.num_heads, num_kv_heads=args.num_kv_heads, mlp_mult=args.mlp_mult, + tie_embeddings=args.tie_embeddings, tied_embed_init_std=args.tied_embed_init_std, + logit_softcap=args.logit_softcap, rope_base=args.rope_base, qk_gain_init=args.qk_gain_init, + group_size=args.bitnet_group_size, activation=args.activation_type, + embed_dim=args.embed_dim, + softcap_type=args.softcap_type, no_cache=(args.compile_mode == "reduce-overhead"), + rope_type=args.rope_type, yarn_max_len=args.yarn_max_len, train_seq_len=args.train_seq_len, + ).to(device).bfloat16() + + for module in base_model.modules(): + if isinstance(module, nn.Linear): + module.float() + restore_low_dim_params_to_fp32(base_model) + if args.tie_embeddings: + base_model.lm_head.weight.requires_grad_(False) + + torch._dynamo.config.optimize_ddp = False + + compiled_model = torch.compile(base_model, mode=args.compile_mode if args.compile_mode != "default" else None) + use_find_unused = not args.tie_embeddings + model = DDP(compiled_model, device_ids=[local_rank], broadcast_buffers=False, + find_unused_parameters=use_find_unused, + static_graph=not use_find_unused, + gradient_as_bucket_view=True) if distributed else compiled_model + + # --- Optimizers --- + _excl = {"tok_emb.weight", "lm_head.weight"} + all_other_params = [(n, p) for n, p in base_model.named_parameters() + if not any(eh in n for eh in _excl)] + matrix_params = [p for n, p in all_other_params + if p.ndim == 2 and not any(pat in n for pat in CTP)] + scalar_params = [p for n, p in all_other_params + if p.ndim < 2 or any(pat in n for pat in CTP)] + + token_lr = args.tied_embed_lr if args.tie_embeddings else args.embed_lr + opt_tok = torch.optim.Adam( + [{"params": [base_model.tok_emb.weight], "lr": token_lr, "base_lr": token_lr}], + betas=(args.beta1, args.beta2), eps=args.adam_eps, fused=True) + if args.matrix_optimizer == "adamw": + opt_muon = torch.optim.AdamW( + [{"params": matrix_params, "lr": args.adam_lr, "base_lr": args.adam_lr}], + betas=(args.beta1, args.beta2), eps=args.adam_eps, weight_decay=args.adam_wd, fused=True) + else: + opt_muon = Muon(matrix_params, lr=args.matrix_lr, momentum=args.muon_momentum, + backend_steps=args.muon_backend_steps, wd=args.muon_wd) + for g in opt_muon.param_groups: + g["base_lr"] = args.matrix_lr + opt_scalar = torch.optim.Adam( + [{"params": scalar_params, "lr": args.scalar_lr, "base_lr": args.scalar_lr}], + betas=(args.beta1, args.beta2), eps=args.adam_eps, fused=True) + opt_head = torch.optim.Adam( + [{"params": [base_model.lm_head.weight], "lr": 0.0, "base_lr": 0.0}], + betas=(args.beta1, args.beta2), eps=args.adam_eps, fused=True) + + optimizers = [opt_tok, opt_muon, opt_scalar, opt_head] + + # --- Log all hyperparameters --- + log0("--- Hyperparameters ---", console=False) + log0(" ".join(f"{a}={getattr(args,a)}" for a in sorted(dir(args)) if not a.startswith("_") and a not in ("train_files","val_files") and not callable(getattr(args,a))), console=False) + n_params = sum(p.numel() for p in base_model.parameters()) + log0(f"params:{n_params} L:{args.num_layers} d:{args.model_dim} h:{args.num_heads} kv:{args.num_kv_heads} ws:{world_size} ga:{grad_accum_steps} s:{args.seed}") + + # --- Data loader & helpers --- + train_loader = DistributedTokenLoader(args.train_files, rank, world_size, device) + + def zero_grad_all(): + for opt in optimizers: + opt.zero_grad(set_to_none=True) + + max_wallclock_ms = 1000.0 * args.max_wallclock_seconds if args.max_wallclock_seconds > 0 else None + + def lr_mul(step: int, elapsed_ms: float): + if args.warmdown_fraction <= 0: + return 1.0 + if max_wallclock_ms is None: + warmdown_start = int(args.iterations * (1.0 - args.warmdown_fraction)) + ratio = max((args.iterations - step) / max(args.iterations * args.warmdown_fraction, 1), 0.0) if step >= warmdown_start else 1.0 + else: + warmdown_ms = max_wallclock_ms * args.warmdown_fraction + remaining_ms = max(max_wallclock_ms - elapsed_ms, 0.0) + ratio = remaining_ms / max(warmdown_ms, 1e-9) if remaining_ms <= warmdown_ms else 1.0 + return max(ratio, args.min_lr) + + _seq_switched = False + _batch_switched = False + active_seq_len = args.seq_len_start if args.seq_len_start > 0 else args.train_seq_len + active_batch_tokens = args.batch_tokens_start if args.batch_tokens_start > 0 else args.train_batch_tokens + + if args.skip_training: + log0("skip_training=1, using existing artifact final_model.quinary.ptz") + + # --- Compiler warmup --- + if args.warmup_steps > 0 and not args.skip_training: + _ms = {n: t.detach().cpu().clone() for n, t in base_model.state_dict().items()} + _os = [copy.deepcopy(o.state_dict()) for o in optimizers] + model.train() + for ws in range(args.warmup_steps): + zero_grad_all() + for mi in range(grad_accum_steps): + if distributed: model.require_backward_grad_sync = mi == grad_accum_steps - 1 + x, y = train_loader.next_batch(active_batch_tokens, active_seq_len, grad_accum_steps) + torch.compiler.cudagraph_mark_step_begin() + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): loss = model(x, y) + (loss * grad_scale).backward() + for o in optimizers: o.step() + zero_grad_all() + log0(f"warmup:{ws+1}/{args.warmup_steps}") + base_model.load_state_dict(_ms, strict=True) + for o, s in zip(optimizers, _os): o.load_state_dict(s) + zero_grad_all() + train_loader = DistributedTokenLoader(args.train_files, rank, world_size, device) + + # --- Main training loop --- + training_time_ms = 0.0 + stop_after_step: int | None = None + train_loss = torch.zeros((), device=device) + torch.cuda.synchronize() + t0 = time.perf_counter() + step = 0 + + while not args.skip_training: + last_step = step == args.iterations or (stop_after_step is not None and step >= stop_after_step) + + if last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + training_time_ms += 1000.0 * (time.perf_counter() - t0) + val_loss, val_bpb, val_tok_count, val_byte_count = eval_val( + args, model, rank, world_size, device, grad_accum_steps, + val_tokens, base_bytes_lut, has_leading_space_lut, is_boundary_token_lut) + tstats = quin_stats(base_model, group_size=args.bitnet_group_size) + log0(f"step:{step}/{args.iterations} val_loss:{val_loss:.4f} val_bpb:{val_bpb:.4f} " + f"train_time:{training_time_ms:.0f}ms zero_frac:{tstats['zero_frac']:.3f} " + f"eval_tokens:{val_tok_count} eval_bytes:{val_byte_count}") + torch.cuda.synchronize() + t0 = time.perf_counter() + + if last_step: + if stop_after_step is not None and step < args.iterations: + log0(f"stopping_early: wallclock_cap train_time:{training_time_ms:.0f}ms step:{step}/{args.iterations}") + break + + elapsed_ms = training_time_ms + 1000.0 * (time.perf_counter() - t0) + scale = lr_mul(step, elapsed_ms) + + # Sequence length schedule + if args.seq_len_start > 0 and not _seq_switched: + if max_wallclock_ms is not None: + should_switch_seq = elapsed_ms >= args.seq_schedule_fraction * max_wallclock_ms + else: + should_switch_seq = step >= int(args.iterations * args.seq_schedule_fraction) + if should_switch_seq: + active_seq_len = args.train_seq_len + _seq_switched = True + torch._dynamo.reset() + train_loader = DistributedTokenLoader(args.train_files, rank, world_size, device) + log0(f"step:{step} seq_len_switch:{args.seq_len_start}->{active_seq_len}") + + # Batch size schedule + if args.batch_tokens_start > 0 and not _batch_switched: + if max_wallclock_ms is not None: + should_switch_batch = elapsed_ms >= args.batch_schedule_fraction * max_wallclock_ms + else: + should_switch_batch = step >= int(args.iterations * args.batch_schedule_fraction) + if should_switch_batch: + active_batch_tokens = args.train_batch_tokens + _batch_switched = True + log0(f"step:{step} batch_switch:{args.batch_tokens_start}->{active_batch_tokens}") + + zero_grad_all() + train_loss.zero_() + + for micro in range(grad_accum_steps): + if distributed: + model.require_backward_grad_sync = micro == grad_accum_steps - 1 + x, y = train_loader.next_batch(active_batch_tokens, active_seq_len, grad_accum_steps) + torch.compiler.cudagraph_mark_step_begin() + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + loss = model(x, y) + train_loss.add_(loss.detach()) + (loss * grad_scale).backward() + train_loss /= grad_accum_steps + + # Muon momentum warmup (skip when AdamW is the matrix optimizer) + if args.matrix_optimizer != "adamw": + frac = min(step / args.muon_momentum_warmup_steps, 1.0) if args.muon_momentum_warmup_steps > 0 else 1.0 + for g in opt_muon.param_groups: + g["momentum"] = (1 - frac) * args.muon_momentum_warmup_start + frac * args.muon_momentum + + # LR scheduling + for opt in optimizers: + for g in opt.param_groups: + g["lr"] = g["base_lr"] * scale + opt.step() + zero_grad_all() + step += 1 + approx_ms = training_time_ms + 1000.0 * (time.perf_counter() - t0) + + if args.train_log_every > 0 and step % args.train_log_every == 0: + log0(f"step:{step}/{args.iterations} loss:{train_loss.item():.4f} t:{approx_ms:.0f}ms avg:{approx_ms/step:.1f}ms") + if args.churn_log_every > 0 and step % args.churn_log_every == 0: + log0(f"step:{step} churn:{churn_fn(base_model, args.bitnet_group_size):.4f} zero:{quin_stats(base_model, args.bitnet_group_size)['zero_frac']:.3f}") + + # Wallclock cap sync + if stop_after_step is None and max_wallclock_ms is not None and step % 10 == 0: + reached_cap = approx_ms >= max_wallclock_ms + if distributed: + cap_t = torch.tensor(int(reached_cap), device=device) + dist.all_reduce(cap_t, op=dist.ReduceOp.MAX) + reached_cap = bool(cap_t.item()) + if reached_cap: + stop_after_step = step + + # --- Serialization --- + if master_process and not args.skip_training: + sd = base_model.state_dict() + if base_model.tie_embeddings: + sd.pop("lm_head.weight", None) + + log0("serialize: start") + t_qsd = time.perf_counter() + q_obj, q_stats = q_sd(sd, group_size=args.bitnet_group_size, + scale_quant_bits=args.scale_quant_bits) + qsd_time = time.perf_counter() - t_qsd + log0(f"serialize: q_sd in {qsd_time:.1f}s " + f"(quinary={q_stats['quinary_params']/1e6:.2f}M params/{q_stats['quinary_bytes']/1e6:.2f}MB, " + f"fp={q_stats['fp_params']/1e6:.2f}M params/{q_stats['fp_bytes']/1e6:.2f}MB)") + + t_save = time.perf_counter() + buf = io.BytesIO() + torch.save(q_obj, buf) + raw_bytes = len(buf.getvalue()) + log0(f"serialize: torch.save raw={raw_bytes/1e6:.2f}MB in {time.perf_counter()-t_save:.1f}s") + + t_compress = time.perf_counter() + final_blob, per_stream_audit = _serialize_per_stream_v2(q_obj, level=9) + compress_time = time.perf_counter() - t_compress + artifact_bytes = len(final_blob) + log0(per_stream_audit) + log0(f"serialize: per_stream_v2 {raw_bytes/1e6:.2f}MB -> {artifact_bytes/1e6:.2f}MB " + f"(ratio {artifact_bytes/raw_bytes:.1%}, saved {(raw_bytes-artifact_bytes)/1e6:.2f}MB) " + f"in {compress_time:.1f}s") + + with open("final_model.quinary.ptz", "wb") as f: + f.write(final_blob) + + code_bytes = len(code.encode("utf-8")) + + total = artifact_bytes + code_bytes + log0(f"artifact:{artifact_bytes/1e6:.2f}MB quinary:{q_stats['quinary_params']}({q_stats['quinary_bytes']}B) fp:{q_stats['fp_params']}({q_stats['fp_bytes']}B) code:{code_bytes}") + log0(f"budget:{total}/{16000000} ({total/1e6:.2f}/{16.00:.2f}MB) {'FITS' if total <= 16000000 else 'OVER'}") + + # --- All ranks load roundtrip weights and evaluate --- + if distributed: + dist.barrier() + + with open("final_model.quinary.ptz", "rb") as f: + loaded = _load_artifact(f.read()) + missing, unexpected = base_model.load_state_dict(deq_sd(loaded), strict=False) + # The only "missing" key we expect is `lm_head.weight` when tied, since + # q_sd drops it from the saved state-dict. Any other missing/unexpected + # key is a serialization-roundtrip bug and should fail loudly. + expected_missing = {"lm_head.weight"} if base_model.tie_embeddings else set() + assert set(missing) <= expected_missing, f"unexpected missing keys after artifact load: {set(missing) - expected_missing}" + assert not unexpected, f"unexpected extra keys after artifact load: {unexpected}" + torch._dynamo.reset() + + q_val_loss, q_val_bpb, q_tok_count, q_byte_count = eval_val( + args, model, rank, world_size, device, grad_accum_steps, + val_tokens, base_bytes_lut, has_leading_space_lut, is_boundary_token_lut) + log0(f"final_quinary_roundtrip val_loss:{q_val_loss:.4f} val_bpb:{q_val_bpb:.4f} " + f"eval_tokens:{q_tok_count} eval_bytes:{q_byte_count}") + + # --- Score-first chunk-based CTP TTT --- + if args.ttt_steps > 0: + torch.cuda.synchronize() + t_ttt = time.perf_counter() + seq_len = args.train_seq_len + # TTT_STRIDE: sliding-window step used to slice the val stream into TTT + # examples. Hardcoded at 16 (canonical sp16384 quinary submission); a + # smaller stride yields more overlapping windows but more compute. + stride = 16 + ttt_chunk_tokens = args.ttt_tokens if args.ttt_tokens > 0 else 32768 + ttt_epochs = args.ttt_steps + batch_seqs = 32 + total_tokens = val_tokens.numel() - 1 + + # Select TTT params (CTP only — quinary weights are frozen) + for p in base_model.parameters(): + p.requires_grad_(False) + ttt_params = [] + for name, p in base_model.named_parameters(): + if any(pat in name for pat in CTP): + p.requires_grad_(True) + ttt_params.append(p) + n_ttt = sum(p.numel() for p in ttt_params) + ttt_opt = torch.optim.SGD(ttt_params, lr=args.ttt_lr, momentum=0.9) + for pg in ttt_opt.param_groups: + pg["initial_lr"] = pg["lr"] + log0(f"ttt: {n_ttt} CTP params, lr={args.ttt_lr}") + + # Assign sliding windows to chunks + context_size = seq_len - stride + window_starts = [ws for ws in range(0, total_tokens, stride) + if ws + context_size < total_tokens] + num_chunks = (total_tokens + ttt_chunk_tokens - 1) // ttt_chunk_tokens + chunk_windows = [[] for _ in range(num_chunks)] + for ws in window_starts: + s = 0 if ws == 0 else context_size + ci = min((ws + s) // ttt_chunk_tokens, num_chunks - 1) + chunk_windows[ci].append(ws) + log0(f"ttt: {n_ttt} CTP params, {num_chunks} chunks, {ttt_epochs} epochs, lr={args.ttt_lr}") + + loss_sum = torch.zeros((), device=device, dtype=torch.float64) + token_count = torch.zeros((), device=device, dtype=torch.float64) + byte_count = torch.zeros((), device=device, dtype=torch.float64) + + # Recompile for TTT (find_unused_parameters needed with frozen params) + torch._dynamo.reset() + compiled_ttt = torch.compile(base_model, mode=args.compile_mode if args.compile_mode != "default" else None) + + for ci in range(num_chunks): + windows = chunk_windows[ci] + if not windows: + continue + chunk_start = ci * ttt_chunk_tokens + chunk_end = min((ci + 1) * ttt_chunk_tokens, total_tokens) + my_s = len(windows) * rank // world_size + my_e = len(windows) * (rank + 1) // world_size + my_windows = windows[my_s:my_e] + + # Phase 1: SCORE (no_grad, compiled) + base_model.eval() + with torch.no_grad(): + for bi in range(0, len(my_windows), batch_seqs): + batch_ws = my_windows[bi:bi + batch_seqs] + bsz = len(batch_ws) + x_batch = torch.zeros(bsz, seq_len, dtype=torch.int64, device=device) + y_batch = torch.zeros(bsz, seq_len, dtype=torch.int64, device=device) + wlens = [] + for i, ws in enumerate(batch_ws): + we = min(ws + seq_len, total_tokens) + wlen = we - ws + wlens.append(wlen) + chunk_tok = val_tokens[ws:we + 1].to(dtype=torch.int64, device=device) + x_batch[i, :wlen] = chunk_tok[:-1] + y_batch[i, :wlen] = chunk_tok[1:] + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + nll = compiled_ttt(x_batch, y_batch, reduction="none").detach() + for i, ws in enumerate(batch_ws): + wlen = wlens[i] + s = 0 if ws == 0 else context_size + scored_nll = nll[i, s:wlen].to(torch.float64) + loss_sum += scored_nll.sum() + token_count += float(wlen - s) + tgt, prev = y_batch[i, s:wlen], x_batch[i, s:wlen] + tb = base_bytes_lut[tgt].to(torch.float64) + tb += (has_leading_space_lut[tgt] & ~is_boundary_token_lut[prev]).to(torch.float64) + byte_count += tb.sum() + + # Phase 2: TRAIN on chunk (score-first: already scored) + if ci < num_chunks - 1 and ttt_epochs > 0: + base_model.train() + chunk_seqs = (chunk_end - chunk_start) // seq_len + if chunk_seqs > 0: + cos_mul = 0.5 * (1.0 + math.cos(math.pi * ci / max(num_chunks - 1, 1))) + for pg in ttt_opt.param_groups: + pg["lr"] = pg.get("initial_lr", pg["lr"]) * cos_mul + my_seq_s = chunk_seqs * rank // world_size + my_seq_e = chunk_seqs * (rank + 1) // world_size + for _ep in range(ttt_epochs): + for bs in range(0, my_seq_e - my_seq_s, batch_seqs): + be = min(bs + batch_seqs, my_seq_e - my_seq_s) + start_tok = chunk_start + (my_seq_s + bs) * seq_len + end_tok = chunk_start + (my_seq_s + be) * seq_len + 1 + if end_tok > val_tokens.numel(): + continue + local = val_tokens[start_tok:end_tok].to(device=device, dtype=torch.int64) + x = local[:-1].reshape(-1, seq_len) + y = local[1:].reshape(-1, seq_len) + ttt_opt.zero_grad(set_to_none=True) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + loss = compiled_ttt(x, y) + loss.backward() + if distributed: + for p in ttt_params: + if p.grad is not None: + dist.all_reduce(p.grad, op=dist.ReduceOp.AVG) + torch.nn.utils.clip_grad_norm_(ttt_params, 1.0) + ttt_opt.step() + + if master_process and ci % max(1, num_chunks // 5) == 0: + log0(f"ttt chunk:{ci+1}/{num_chunks}") + + if distributed: + for t in (loss_sum, token_count, byte_count): + dist.all_reduce(t, op=dist.ReduceOp.SUM) + ttt_val_loss = (loss_sum / token_count).item() + ttt_bpb = (ttt_val_loss / math.log(2.0)) * (token_count.item() / byte_count.item()) + for p in base_model.parameters(): + p.requires_grad_(True) + torch._dynamo.reset() + torch.cuda.synchronize() + ttt_time_ms = 1000.0 * (time.perf_counter() - t_ttt) + log0(f"ttt_eval val_loss:{ttt_val_loss:.4f} val_bpb:{ttt_bpb:.4f} " + f"time:{ttt_time_ms:.0f}ms " + f"eval_tokens:{int(token_count.item())} eval_bytes:{int(byte_count.item())}") + + if distributed: + dist.destroy_process_group() + +if __name__ == "__main__": + main() +==================================================================================================== +Python 3.13.13 | packaged by Anaconda, Inc. | (main, Apr 14 2026, 06:19:41) [GCC 14.3.0] +PyTorch 2.10.0+cu128 +--- Hyperparameters --- +activation_type=relu2 adam_eps=1e-08 adam_lr=0.05 adam_wd=0.05 batch_schedule_fraction=0.33 batch_tokens_start=0 beta1=0.9 beta2=0.95 bitnet_group_size=192 churn_log_every=0 compile_mode=default data_path=./data/canonical/datasets/fineweb10B_sp16384 embed_dim=380 embed_lr=0.6 fp_storage=True grad_clip_norm=0.0 head_lr=0.02 iterations=10000 logit_softcap=10.0 matrix_lr=0.035 matrix_optimizer=muon max_wallclock_seconds=599.0 min_lr=0.0 mlp_mult=4 model_dim=576 muon_backend_steps=3 muon_momentum=0.95 muon_momentum_warmup_start=0.85 muon_momentum_warmup_steps=500 muon_wd=0.0 num_heads=6 num_kv_heads=3 num_layers=10 qk_gain_init=5.0 rope_base=5000.0 rope_type=yarn run_id=quinary_seed7 scalar_lr=0.02 scale_quant_bits=5 seed=7 seq_len_start=0 seq_schedule_fraction=0.0 skip_training=False softcap_type=poly tie_embeddings=1 tied_embed_init_std=0.005 tied_embed_lr=0.02 tokenizer_path=./data/canonical/tokenizers/fineweb_16384_bpe.model train_batch_tokens=524288 train_log_every=1000 train_seq_len=1024 ttt_lr=0.005 ttt_steps=3 ttt_tokens=32768 val_batch_size=524288 val_loss_every=0 vocab_size=16384 warmdown_fraction=0.2 warmup_steps=5 yarn_max_len=2048 +params:52828668 L:10 d:576 h:6 kv:3 ws:8 ga:1 s:7 +warmup:1/5 +warmup:2/5 +warmup:3/5 +warmup:4/5 +warmup:5/5 +step:1000/10000 loss:3.8264 t:77231ms avg:77.2ms +step:2000/10000 loss:3.5207 t:154350ms avg:77.2ms +step:3000/10000 loss:3.4328 t:231252ms avg:77.1ms +step:4000/10000 loss:3.2871 t:307962ms avg:77.0ms +step:5000/10000 loss:3.5032 t:384654ms avg:76.9ms +step:6000/10000 loss:3.5300 t:461365ms avg:76.9ms +step:7000/10000 loss:3.4704 t:538006ms avg:76.9ms +step:7800/10000 val_loss:3.2666 val_bpb:1.1587 train_time:599456ms zero_frac:0.261 eval_tokens:37146624 eval_bytes:151078879 +stopping_early: wallclock_cap train_time:599456ms step:7800/10000 +serialize: start +serialize: q_sd in 0.1s (quinary=36.50M params/12.36MB, fp=6.90M params/7.11MB) +serialize: torch.save raw=19.53MB in 0.0s +per_stream_v2: meta=0.14MB, 63 bulk payloads (lzma=61, lrzip=2; raw=3, base5=2, base5_T=37, bitmask_T=21), qkv_splits=10, total=15.65MB +serialize: per_stream_v2 19.53MB -> 15.65MB (ratio 80.1%, saved 3.89MB) in 36.2s +artifact:15.65MB quinary:36495360(12355360B) fp:6896124(7114168B) code:79272 +budget:15724839/16000000 (15.72/16.00MB) FITS +final_quinary_roundtrip val_loss:3.2762 val_bpb:1.1622 eval_tokens:37146624 eval_bytes:151078879 +ttt: 42364 CTP params, lr=0.005 +ttt: 42364 CTP params, 1134 chunks, 3 epochs, lr=0.005 +ttt chunk:1/1134 +ttt chunk:227/1134 +ttt chunk:453/1134 +ttt chunk:679/1134 +ttt chunk:905/1134 +ttt chunk:1131/1134 +ttt_eval val_loss:3.2076 val_bpb:1.1378 time:214181ms eval_tokens:37146624 eval_bytes:151078879 diff --git a/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/requirements.txt b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/requirements.txt new file mode 100644 index 0000000000..0c5eedce7b --- /dev/null +++ b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/requirements.txt @@ -0,0 +1,10 @@ +numpy +tqdm +torch==2.10 +huggingface-hub +kernels +setuptools +typing-extensions==4.15.0 +datasets +tiktoken +sentencepiece \ No newline at end of file diff --git a/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/run.sh b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/run.sh new file mode 100755 index 0000000000..067425a7a4 --- /dev/null +++ b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/run.sh @@ -0,0 +1,74 @@ +# Quinary (5-level) Parameter Golf submission. +# +# Defaults below match the canonical 53M-param model + per-stream v2 +# layout-aware compression that produced the artifact in this folder: +# - sp16384 vocab + tokenizer +# - EMBED_DIM=380, MODEL_DIM=576, NUM_LAYERS=10, NUM_HEADS=6, NUM_KV_HEADS=3 +# - QK_GAIN_INIT=5.0, MATRIX_LR=0.035 +# - TTT_STEPS=3, TTT_LR=0.005, TTT_TOKENS=32768 +# - per-stream v2 archive (header byte 0x03): +# * splits each bulk tensor into its own compressed payload +# * for each quinary tensor, screens 4 layouts {base5, base5_T, +# bitmask, bitmask_T} by LZMA9 size, then runs LZMA9 vs lrzip-zpaq +# only on the winning layout (bounded heuristic, not exhaustive 4×2) +# * for c_qkv.weight, splits rows into Q/K/V sub-payloads independently +# * robust to the seed-dependent lrzip cliff (full-blob lrzip can OVER +# on ~33% of seeds; per-stream v2 consistently FITS at ~15.64 MB) +# - SCALE_QUANT_BITS=5 (per-group scale log-delta quant, saves ~141 KB +# at +2.1 mBPB TTT cost; net Pareto-positive) +# +# To run with a different seed (e.g., for the 3-seed mean): +# SEED=1337 bash run.sh + +RUN_ID=${RUN_ID:-quinary_seed42} \ +DATA_PATH=${DATA_PATH:-./data/canonical/datasets/fineweb10B_sp16384} \ +TOKENIZER_PATH=${TOKENIZER_PATH:-./data/canonical/tokenizers/fineweb_16384_bpe.model} \ +VOCAB_SIZE=${VOCAB_SIZE:-16384} \ +BITNET_GROUP_SIZE=${BITNET_GROUP_SIZE:-192} \ +EMBED_DIM=${EMBED_DIM:-380} \ +NUM_LAYERS=${NUM_LAYERS:-10} \ +MODEL_DIM=${MODEL_DIM:-576} \ +NUM_KV_HEADS=${NUM_KV_HEADS:-3} \ +NUM_HEADS=${NUM_HEADS:-6} \ +MLP_MULT=${MLP_MULT:-4} \ +MATRIX_OPTIMIZER=${MATRIX_OPTIMIZER:-muon} \ +ADAM_LR=${ADAM_LR:-0.05} \ +ADAM_WD=${ADAM_WD:-0.05} \ +MUON_BACKEND_STEPS=${MUON_BACKEND_STEPS:-3} \ +MUON_MOMENTUM=${MUON_MOMENTUM:-0.95} \ +MUON_MOMENTUM_WARMUP_START=${MUON_MOMENTUM_WARMUP_START:-0.85} \ +MUON_MOMENTUM_WARMUP_STEPS=${MUON_MOMENTUM_WARMUP_STEPS:-500} \ +MUON_WD=${MUON_WD:-0.0} \ +MATRIX_LR=${MATRIX_LR:-0.035} \ +SCALAR_LR=${SCALAR_LR:-0.02} \ +TIED_EMBED_LR=${TIED_EMBED_LR:-0.02} \ +WARMDOWN_FRACTION=${WARMDOWN_FRACTION:-0.2} \ +LOGIT_SOFTCAP=${LOGIT_SOFTCAP:-10} \ +QK_GAIN_INIT=${QK_GAIN_INIT:-5.0} \ +ROPE_TYPE=${ROPE_TYPE:-yarn} \ +YARN_MAX_LEN=${YARN_MAX_LEN:-2048} \ +ROPE_BASE=${ROPE_BASE:-5000} \ +BATCH_TOKENS_START=${BATCH_TOKENS_START:-0} \ +BATCH_SCHEDULE_FRACTION=${BATCH_SCHEDULE_FRACTION:-0.33} \ +TRAIN_BATCH_TOKENS=${TRAIN_BATCH_TOKENS:-524288} \ +SEQ_LEN_START=${SEQ_LEN_START:-0} \ +SEQ_SCHEDULE_FRACTION=${SEQ_SCHEDULE_FRACTION:-0.0} \ +TRAIN_SEQ_LEN=${TRAIN_SEQ_LEN:-1024} \ +ITERATIONS=${ITERATIONS:-10000} \ +WARMUP_STEPS=${WARMUP_STEPS:-5} \ +MAX_WALLCLOCK_SECONDS=${MAX_WALLCLOCK_SECONDS:-599} \ +VAL_LOSS_EVERY=${VAL_LOSS_EVERY:-0} \ +TRAIN_LOG_EVERY=${TRAIN_LOG_EVERY:-1000} \ +CHURN_LOG_EVERY=${CHURN_LOG_EVERY:-0} \ +VAL_MAX_TOKENS=${VAL_MAX_TOKENS:-0} \ +TIE_EMBEDDINGS=${TIE_EMBEDDINGS:-1} \ +HEAD_LR=${HEAD_LR:-0.02} \ +ACTIVATION=${ACTIVATION:-relu2} \ +SOFTCAP_TYPE=${SOFTCAP_TYPE:-poly} \ +TTT_STEPS=${TTT_STEPS:-3} \ +TTT_LR=${TTT_LR:-0.005} \ +TTT_TOKENS=${TTT_TOKENS:-32768} \ +SCALE_QUANT_BITS=${SCALE_QUANT_BITS:-5} \ +SEED=${SEED:-42} \ +COMPILE_MODE=${COMPILE_MODE:-default} \ +OMP_NUM_THREADS=${OMP_NUM_THREADS:-1} torchrun --standalone --nproc_per_node=8 train_gpt.py diff --git a/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/setup.sh b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/setup.sh new file mode 100755 index 0000000000..27920ae3c7 --- /dev/null +++ b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/setup.sh @@ -0,0 +1,130 @@ +#!/bin/bash +# ------------------------------------------------------------------------------- +# Parameter Golf -- Quinary submission environment setup +# Run from the submission/ directory on a fresh 8xH100 pod. +# +# After this finishes: +# - lrzip is installed (used by per-stream compression) +# - Python deps from requirements.txt are installed +# - FlashAttention-3 wheel is installed (Hopper-only) +# - sp16384 tokenizer + tokenized FineWeb shards are at ./data/ +# +# Total time on a fresh pod: ~10-25 min (mostly the ~23 GB HF download). +# ------------------------------------------------------------------------------- + +set -e + +echo "==============================================" +echo " Parameter Golf -- Quinary submission setup" +echo "==============================================" + +# -------------------------------------------------------------------- +# 1. System packages (lrzip; needed by per-stream artifact compression) +# -------------------------------------------------------------------- +echo "" +echo "[1/4] System packages (lrzip)..." + +if command -v lrzip >/dev/null 2>&1; then + echo " lrzip already installed -- skipping." +else + apt-get update -qq + apt-get install -y -qq lrzip + echo " Installed." +fi + +# -------------------------------------------------------------------- +# 2. Python requirements +# -------------------------------------------------------------------- +echo "" +echo "[2/4] Python requirements..." + +if python3 -c "import torch, sentencepiece, numpy, huggingface_hub" 2>/dev/null; then + echo " Core packages already installed -- skipping." +else + pip install --upgrade pip -q + pip install -r requirements.txt -q + echo " Installed." +fi + +# -------------------------------------------------------------------- +# 3. FlashAttention-3 (Hopper-specific wheel) +# -------------------------------------------------------------------- +echo "" +echo "[3/4] FlashAttention-3..." + +if python3 -c "import flash_attn_interface" 2>/dev/null; then + echo " Already installed -- skipping." +else + pip install --no-cache-dir \ + "https://download.pytorch.org/whl/cu128/flash_attn_3-3.0.0-cp39-abi3-manylinux_2_28_x86_64.whl" + echo " Installed." +fi + +# -------------------------------------------------------------------- +# 4. FineWeb dataset + sp16384 tokenizer (canonical/ subset only) +# -------------------------------------------------------------------- +echo "" +echo "[4/4] FineWeb sp16384 dataset + tokenizer..." + +if ls ./data/canonical/datasets/fineweb10B_sp16384/fineweb_val_*.bin 1>/dev/null 2>&1; then + echo " Already present at ./data/canonical/ -- skipping." +else + echo " Downloading from deniskurlov/parameter-golf-fineweb-sp16384 (canonical/ only, ~23 GB)..." + hf download deniskurlov/parameter-golf-fineweb-sp16384 \ + --include "canonical/**" \ + --local-dir ./data \ + --repo-type dataset + echo " Downloaded." +fi + +# -------------------------------------------------------------------- +# Verification +# -------------------------------------------------------------------- +echo "" +echo "==============================================" +echo " Verification" +echo "==============================================" + +python3 - << 'EOF' +import sys, glob +import torch, numpy as np + +print(f"Python : {sys.version.split()[0]}") +print(f"PyTorch : {torch.__version__}") +print(f"CUDA : {torch.cuda.is_available()}") +print(f"GPUs : {torch.cuda.device_count()}") +if torch.cuda.is_available(): + for i in range(torch.cuda.device_count()): + props = torch.cuda.get_device_properties(i) + print(f" GPU {i} : {props.name} ({props.total_memory // 1024**3} GB)") + +try: + import flash_attn_interface # noqa + print("FlashAttn3 : installed") +except ImportError: + print("FlashAttn3 : NOT found (required for training)") + +import sentencepiece as spm +sp_path = "./data/canonical/tokenizers/fineweb_16384_bpe.model" +sp = spm.SentencePieceProcessor(model_file=sp_path) +print(f"Tokenizer : {sp.vocab_size()}-vocab SentencePiece BPE @ {sp_path}") + +train = sorted(glob.glob("./data/canonical/datasets/fineweb10B_sp16384/fineweb_train_*.bin")) +val = sorted(glob.glob("./data/canonical/datasets/fineweb10B_sp16384/fineweb_val_*.bin")) +total_val = sum(int(np.fromfile(f, dtype="576d, GQA 8:4->6:3, embed bottleneck 254->380, group_size 128->192, tokenizer SP8192->SP16384, single-blob LZMA->layout-aware per-stream v2 archive (header 0x03; per-quinary-tensor LZMA-screened layout selection over {base5, base5_T, bitmask, bitmask_T} then LZMA-vs-lrzip on the winner — bounded heuristic, not exhaustive 4×2; c_qkv split into Q/K/V sub-payloads; structurally based on parameter-golf PR #1855), and stride-16 sliding eval -> score-first TTT (3 epochs, lr=0.005, adapting only the 42,364 fp16 calibration parameters — per-layer scales, residual mix, Q-gain, skip weights, vocab bias). 52.8M params in 15.72MB max-seed total. Trained 7,800 steps in ~599s on 8xH100 SXM. 3-seed validation (42, 1337, 7): TTT BPB 1.1384 ± 0.0009 std (-22 mBPB from quinary architectural change vs ternary RT 1.1842, -24 mBPB additional from TTT), all FITS with margin ~275 KB under the 16 MB cap. BPB denominator audited end-to-end: verify_bpb.py exact-eval-slice lut_bytes=151,078,879 matches the runtime eval_bytes:151,078,879 printed by train_gpt.py for every seed (delta=+0). Per-stream v2 also solves the seed-dependent lrzip cliff that forced earlier full-blob lrzip artifacts to OVER on ~33% of seeds (with the v2 archive seed=7 is now actually the best-fitting seed at 1.1378 TTT BPB).", + "date": "2026-05-01T22:00:00Z", + "val_loss": 3.2093, + "val_bpb": 1.1384, + "bytes_total": 15724839, + "bytes_total_note": "Max across the 3 verified seeds (seed=7); per-seed values in seed_results. Range across seeds: 15,714,938 - 15,724,839. Margin under 16 MB cap = 275,161 bytes.", + "bytes_code": 79272, + "seed_results": { + "42": { "val_loss": 3.2083, "val_bpb": 1.1381, "val_bpb_roundtrip": 1.1626, "bytes_total": 15714938, "steps_reached": 7800, "eval_tokens": 37146624, "eval_bytes": 151078879, "verified": true }, + "1337": { "val_loss": 3.2120, "val_bpb": 1.1394, "val_bpb_roundtrip": 1.1633, "bytes_total": 15721124, "steps_reached": 7800, "eval_tokens": 37146624, "eval_bytes": 151078879, "verified": true }, + "7": { "val_loss": 3.2076, "val_bpb": 1.1378, "val_bpb_roundtrip": 1.1622, "bytes_total": 15724839, "steps_reached": 7800, "eval_tokens": 37146624, "eval_bytes": 151078879, "verified": true } + }, + "seed_mean_val_bpb": 1.1384, + "seed_std_val_bpb": 0.00085, + "seed_stderr_val_bpb": 0.00049, + "seed_mean_val_bpb_roundtrip": 1.1627, + "seed_std_val_bpb_roundtrip": 0.00056, + "seed_stderr_val_bpb_roundtrip": 0.00032, + "n_params": 52828668, + "n_quinary_params": 36495360, + "n_fp_params": 6896124, + "training_seconds": 599, + "eval_seconds": 300, + "eval_seconds_breakdown": { + "load_artifact_and_decompress": 5, + "roundtrip_eval": 80, + "ttt_eval": 215 + }, + "track": "track_non_record_16mb" +} diff --git a/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/train_gpt.py b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/train_gpt.py new file mode 100644 index 0000000000..446bde7cd8 --- /dev/null +++ b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/train_gpt.py @@ -0,0 +1,1652 @@ +"Quinary training script for OpenAI's Parameter Golf Challenge. Based on ternary submission by Ciprian-Florin Ifrim (24 March 2026). Quinary: {-2,-1,0,+1,+2} with base-5 packing (3 quins/byte = 2.667 bpw vs ternary 1.6 bpw)." + +import copy +import glob +import io +import math +import os +import random +import subprocess +import sys +import tempfile +import time +import lzma +from pathlib import Path +import numpy as np +import sentencepiece as spm +import torch +import torch.distributed as dist +import torch.nn.functional as F +from torch import Tensor, nn +from torch.nn.parallel import DistributedDataParallel as DDP +from flash_attn_interface import flash_attn_func + +def _e(k, d, t=str): + v = os.environ.get(k, str(d)) + if t == bool: return bool(int(v)) + return t(v) + +class Hyperparameters: + # Defaults below match the canonical SP16384 quinary submission config — + # i.e. exactly what `run.sh` passes through to torchrun. A bare + # `torchrun --standalone --nproc_per_node=8 train_gpt.py` (no env vars) + # therefore reproduces the submission. `run.sh` is still the + # documentation-of-record for the canonical config; these defaults are + # mirrored from it so the two never silently disagree. + data_path = _e("DATA_PATH", "./data/canonical/datasets/fineweb10B_sp16384") + # [0-9] prefix matches only NNNNNN-suffixed shard files, ignoring any + # sibling parallel-array files that might share the prefix. + train_files = os.path.join(data_path, "fineweb_train_[0-9]*.bin") + val_files = os.path.join(data_path, "fineweb_val_[0-9]*.bin") + tokenizer_path = _e("TOKENIZER_PATH", "./data/canonical/tokenizers/fineweb_16384_bpe.model") + run_id = os.environ.get("RUN_ID", f"run_{int(time.time())}") + seed = _e("SEED", 42, int) + compile_mode = _e("COMPILE_MODE", "default") + val_batch_size = _e("VAL_BATCH_SIZE", 524288, int) + val_loss_every = _e("VAL_LOSS_EVERY", 0, int) # 0 = no in-training val + train_log_every = _e("TRAIN_LOG_EVERY", 1000, int) + iterations = _e("ITERATIONS", 10000, int) + warmdown_fraction = _e("WARMDOWN_FRACTION", 0.2, float) + min_lr = _e("MIN_LR", 0.0, float) # floor on the LR multiplier (fraction of base LR) + warmup_steps = _e("WARMUP_STEPS", 5, int) + train_batch_tokens = _e("TRAIN_BATCH_TOKENS", 524288, int) + train_seq_len = _e("TRAIN_SEQ_LEN", 1024, int) + max_wallclock_seconds = _e("MAX_WALLCLOCK_SECONDS", 599.0, float) + vocab_size = _e("VOCAB_SIZE", 16384, int) + num_layers = _e("NUM_LAYERS", 10, int) + num_kv_heads = _e("NUM_KV_HEADS", 3, int) + model_dim = _e("MODEL_DIM", 576, int) + num_heads = _e("NUM_HEADS", 6, int) + mlp_mult = _e("MLP_MULT", 4, int) + tie_embeddings = _e("TIE_EMBEDDINGS", 1, int) + rope_base = _e("ROPE_BASE", 5000.0, float) + rope_type = _e("ROPE_TYPE", "yarn") + yarn_max_len = _e("YARN_MAX_LEN", 2048, int) + logit_softcap = _e("LOGIT_SOFTCAP", 10.0, float) + softcap_type = _e("SOFTCAP_TYPE", "poly") + tied_embed_init_std = _e("TIED_EMBED_INIT_STD", 0.005, float) + qk_gain_init = _e("QK_GAIN_INIT", 5.0, float) + activation_type = _e("ACTIVATION", "relu2") + embed_dim = _e("EMBED_DIM", 380, int) + embed_lr = _e("EMBED_LR", 0.6, float) + head_lr = _e("HEAD_LR", 0.02, float) + adam_lr = _e("ADAM_LR", 0.05, float) + adam_wd = _e("ADAM_WD", 0.05, float) + tied_embed_lr = _e("TIED_EMBED_LR", 0.02, float) + seq_len_start = _e("SEQ_LEN_START", 0, int) + seq_schedule_fraction = _e("SEQ_SCHEDULE_FRACTION", 0.0, float) + batch_tokens_start = _e("BATCH_TOKENS_START", 0, int) + batch_schedule_fraction = _e("BATCH_SCHEDULE_FRACTION", 0.33, float) + churn_log_every = _e("CHURN_LOG_EVERY", 0, int) + matrix_lr = _e("MATRIX_LR", 0.035, float) + scalar_lr = _e("SCALAR_LR", 0.02, float) + muon_momentum = _e("MUON_MOMENTUM", 0.95, float) + muon_backend_steps = _e("MUON_BACKEND_STEPS", 3, int) + muon_wd = _e("MUON_WD", 0.0, float) + matrix_optimizer = _e("MATRIX_OPTIMIZER", "muon") + muon_momentum_warmup_start = _e("MUON_MOMENTUM_WARMUP_START", 0.85, float) + muon_momentum_warmup_steps = _e("MUON_MOMENTUM_WARMUP_STEPS", 500, int) + beta1 = _e("BETA1", 0.9, float) + beta2 = _e("BETA2", 0.95, float) + adam_eps = _e("ADAM_EPS", 1e-8, float) + grad_clip_norm = _e("GRAD_CLIP_NORM", 0.0, float) + bitnet_group_size = _e("BITNET_GROUP_SIZE", 192, int) + ttt_steps = _e("TTT_STEPS", 3, int) + ttt_lr = _e("TTT_LR", 0.005, float) + ttt_tokens = _e("TTT_TOKENS", 32768, int) + skip_training = _e("SKIP_TRAINING", 0, bool) + scale_quant_bits = _e("SCALE_QUANT_BITS", 5, int) # 5-bit log-delta scale quantization + fp_storage = True # FP8 storage for non-quinary (small) tensors + +# Calibration / fp16-stored scalar+vector params adapted by score-first TTT. +# `scale_correction` was previously listed here but its gradient is blocked by +# the STE detach in QuinaryLinear.forward, so including it as a TTT target was +# a no-op. Excluded from the CTP tuple so the TTT optimizer reflects what +# actually receives gradients (~42k params, not 232k). The parameter still +# exists in the state-dict and is stored fp16 in the artifact (via the ndim<2 +# bucket); it is just not selected for TTT adaptation. +CTP = ("attn_scale","mlp_scale","resid_mix","q_gain","skip_weights","vocab_bias") + +def pack_quinary(q: Tensor): + f = (q.reshape(-1).to(torch.int8) + 2).numpy() # {-2..2} -> {0..4} + n = len(f) + p = (3 - n % 3) % 3 + if p: f = np.concatenate([f, np.zeros(p, dtype=np.int8)]) + g = f.reshape(-1, 3).astype(np.uint8) + return (g[:,0] + g[:,1]*5 + g[:,2]*25).tobytes(), n # max = 4+20+100 = 124 + +def unpack_quinary(data: bytes, n: int) -> Tensor: + v = np.frombuffer(data, dtype=np.uint8).astype(np.int16) + t = np.zeros((len(v), 3), dtype=np.int8) + for i in range(3): t[:,i] = v % 5; v //= 5 + return torch.from_numpy(t.reshape(-1)[:n].astype(np.int8) - 2) + + +def pack_quinary_bitmask(q) -> tuple[bytes, int, int]: + """Pack quinary symbols ∈ {-2..2} as three concatenated bit-planes. + + Layout (big-endian within bytes via numpy.packbits): + [zero_mask: ceil(n_total / 8) bytes — 1 bit/symbol, 1=zero] + [sign_bits: ceil(n_nonzero / 8) bytes — 1 bit/nonzero, 1=negative] + [mag2_bits: ceil(n_nonzero / 8) bytes — 1 bit/nonzero, 1=|symbol|==2] + + Returns: (concat_bytes, n_total, n_nonzero). + Each plane has homogeneous bit statistics so the downstream compressor + can model them independently rather than fighting a multimodal mixture. + """ + if isinstance(q, torch.Tensor): + a = q.reshape(-1).to(torch.int8).numpy() + else: + a = np.asarray(q, dtype=np.int8).reshape(-1) + n_total = a.size + nz_mask = (a != 0) # bool, len n_total + n_nonzero = int(nz_mask.sum()) + nz_vals = a[nz_mask] # int8, len n_nonzero + sign_bits = (nz_vals < 0).astype(np.uint8) # 1 = negative + mag2_bits = (np.abs(nz_vals) == 2).astype(np.uint8) + zero_packed = np.packbits((~nz_mask).astype(np.uint8)) # 1 bit means "this symbol is zero" + sign_packed = np.packbits(sign_bits) + mag2_packed = np.packbits(mag2_bits) + return (zero_packed.tobytes() + sign_packed.tobytes() + mag2_packed.tobytes(), + n_total, n_nonzero) + + +def unpack_quinary_bitmask(data: bytes, n_total: int, n_nonzero: int) -> Tensor: + """Inverse of pack_quinary_bitmask. Returns int8 tensor of length n_total.""" + z_len = (n_total + 7) // 8 + s_len = (n_nonzero + 7) // 8 + m_len = (n_nonzero + 7) // 8 + expected = z_len + s_len + m_len + if len(data) != expected: + raise ValueError(f"bitmask data size {len(data)} != expected {expected} " + f"(n_total={n_total}, n_nonzero={n_nonzero})") + z_bytes = np.frombuffer(data[:z_len], dtype=np.uint8) + s_bytes = np.frombuffer(data[z_len:z_len + s_len], dtype=np.uint8) + m_bytes = np.frombuffer(data[z_len + s_len:], dtype=np.uint8) + is_zero = np.unpackbits(z_bytes)[:n_total].astype(bool) + sign = np.unpackbits(s_bytes)[:n_nonzero].astype(bool) + mag2 = np.unpackbits(m_bytes)[:n_nonzero].astype(bool) + nz_vals = np.where(mag2, 2, 1).astype(np.int8) + nz_vals = np.where(sign, -nz_vals, nz_vals) + out = np.zeros(n_total, dtype=np.int8) + out[~is_zero] = nz_vals + return torch.from_numpy(out) + + +# Artifact archive: layout-aware per-stream v2 (header byte 0x03). +# +# - For each quinary tensor: screen the 4 layouts {base5, base5_T, +# bitmask, bitmask_T} by LZMA9-compressed size, then run LZMA9 vs +# lrzip-zpaq -L9 only on the winning layout. Bounded heuristic +# with an LZMA floor — *not* an exhaustive 4×2 search. +# - For c_qkv.weight: split rows into Q / K / V sub-payloads each chosen +# independently (Q, K, V have different trained distributions). +# - For other bulk fields (FP8 embeddings, large fp16 tensors): torch.save +# the value and compress with min(lzma, lrzip). +# - Robust to the seed-dependent lrzip cliff observed on full-blob +# compression: even if lrzip ZPAQ underperforms on one tensor's bytes, +# lzma takes over for that tensor specifically. +_COMPRESSOR_LZMA = 0 +_COMPRESSOR_LRZIP_ZPAQ = 1 +_COMPRESSOR_PER_STREAM_V2 = 3 + +# Threshold for treating a value as "bulk" (compressed independently). +_PER_STREAM_BULK_BYTES = 64 * 1024 + +# v2 layout IDs — what kind of body is stored in a bulk payload. +_LAYOUT_RAW = 0 # opaque bytes (torch.save output for non-quinary fields) +_LAYOUT_Q_BASE5 = 1 # base-5 packed quinary symbols, canonical row-major order +_LAYOUT_Q_BASE5_T = 2 # base-5 packed quinary symbols, transposed (column-major) +_LAYOUT_Q_BITMASK = 3 # bitmask packed (zero|sign|mag2), canonical +_LAYOUT_Q_BITMASK_T = 4 # bitmask packed, transposed +_LAYOUT_NAMES = { + _LAYOUT_RAW: "raw", + _LAYOUT_Q_BASE5: "base5", + _LAYOUT_Q_BASE5_T: "base5_T", + _LAYOUT_Q_BITMASK: "bitmask", + _LAYOUT_Q_BITMASK_T: "bitmask_T", +} + +# v2 sentinel for metadata refs (distinct string from v1 to avoid version confusion). +_BULK_SENTINEL_V2 = "__BULK_REF_V2__" + + +def _lrzip_compress_bytes(data: bytes, level: int = 9) -> bytes: + """Compress raw bytes via `lrzip -z` (ZPAQ). Returns body only (no header).""" + with tempfile.NamedTemporaryFile(delete=False) as f: + f.write(data); in_path = f.name + out_path = in_path + ".lrz" + try: + try: + subprocess.run(["lrzip", "-z", "-L", str(level), "-q", "-f", in_path], + check=True, capture_output=True) + except FileNotFoundError as e: + raise RuntimeError("lrzip binary not found; run `apt-get install lrzip`") from e + with open(out_path, "rb") as f: + return f.read() + finally: + for p in (in_path, out_path): + try: os.unlink(p) + except FileNotFoundError: pass + + +def _lrzip_decompress_bytes(body: bytes) -> bytes: + """Decompress an lrzip ZPAQ body (no header).""" + with tempfile.NamedTemporaryFile(delete=False, suffix=".lrz") as f: + f.write(body); in_path = f.name + out_path = in_path[:-4] + try: + try: + subprocess.run(["lrzip", "-d", "-q", "-f", in_path], + check=True, capture_output=True) + except FileNotFoundError as e: + raise RuntimeError("lrzip binary not found; required to load this artifact") from e + with open(out_path, "rb") as f: + return f.read() + finally: + for p in (in_path, out_path): + try: os.unlink(p) + except FileNotFoundError: pass + + +def _pick_best_compressor(data: bytes, level: int = 9, has_lrzip: bool = True) -> tuple[int, bytes]: + """Try lzma + (optionally) lrzip; return (method_id, body) for the smaller. + + This is the per-stream robustness: even if one compressor cliffs on a + given byte distribution (the seed=7 issue), the other usually doesn't. + Any failure (missing binary, runtime error, broken lrzip backend) on + lrzip is silently skipped — lzma is the always-available floor. + """ + candidates: list[tuple[int, bytes]] = [(_COMPRESSOR_LZMA, lzma.compress(data, preset=level))] + if has_lrzip: + try: + candidates.append((_COMPRESSOR_LRZIP_ZPAQ, _lrzip_compress_bytes(data, level))) + except (RuntimeError, subprocess.CalledProcessError, OSError): + pass + return min(candidates, key=lambda c: len(c[1])) + + +def _is_bulk(value) -> bool: + """Decide whether a state_dict field should be peeled off for per-stream compression.""" + if isinstance(value, (bytes, bytearray)): + return len(value) >= _PER_STREAM_BULK_BYTES + if isinstance(value, torch.Tensor): + return value.numel() * value.element_size() >= _PER_STREAM_BULK_BYTES + return False + + +def _load_artifact(blob: bytes) -> dict: + """Load a per-stream v2 archive (header byte 0x03) into a state_dict.""" + return _deserialize_per_stream_v2(blob) + + +# --------------------------------------------------------------------------- +# v2 archive: layout-aware per-stream compression +# --------------------------------------------------------------------------- + +def _gen_quinary_layout_candidates(symbols_2d: np.ndarray) -> list[tuple[int, bytes, dict]]: + """For a 2D int8 symbol matrix in {-2..2}, return a list of + (layout_id, body_bytes, layout_meta) tuples — every supported layout. + + layout_meta carries the per-layout fields needed at deserialize time + (n_quins, n_nonzero, etc.). + """ + rows, cols = symbols_2d.shape + flat = symbols_2d.reshape(-1) + transposed = symbols_2d.T.reshape(-1).copy() # contiguous transpose + + # base-5 canonical + body_b5, n_b5 = pack_quinary(torch.from_numpy(flat)) + # base-5 transposed + body_b5_t, n_b5_t = pack_quinary(torch.from_numpy(transposed)) + # bitmask canonical + body_bm, nt_bm, nz_bm = pack_quinary_bitmask(torch.from_numpy(flat)) + # bitmask transposed + body_bm_t, nt_bm_t, nz_bm_t = pack_quinary_bitmask(torch.from_numpy(transposed)) + + return [ + (_LAYOUT_Q_BASE5, body_b5, {"n_quins": n_b5}), + (_LAYOUT_Q_BASE5_T, body_b5_t, {"n_quins": n_b5_t}), + (_LAYOUT_Q_BITMASK, body_bm, {"n_total": nt_bm, "n_nonzero": nz_bm}), + (_LAYOUT_Q_BITMASK_T, body_bm_t, {"n_total": nt_bm_t, "n_nonzero": nz_bm_t}), + ] + + +def _layout_to_canonical_bytes(layout: int, body: bytes, rows: int, cols: int, + layout_meta: dict) -> bytes: + """Inverse of _gen_quinary_layout_candidates: take a body in one of the + layout encodings and return the canonical base-5-packed bytes that + `deq_sd` expects in entry["packed"].""" + if layout == _LAYOUT_Q_BASE5: + # Already canonical. + return body + if layout == _LAYOUT_Q_BASE5_T: + symbols_t = unpack_quinary(body, layout_meta["n_quins"]).numpy() + symbols = symbols_t.reshape(cols, rows).T.reshape(-1).copy() + out, _ = pack_quinary(torch.from_numpy(symbols)) + return out + if layout == _LAYOUT_Q_BITMASK: + symbols = unpack_quinary_bitmask(body, layout_meta["n_total"], layout_meta["n_nonzero"]).numpy() + out, _ = pack_quinary(torch.from_numpy(symbols)) + return out + if layout == _LAYOUT_Q_BITMASK_T: + symbols_t = unpack_quinary_bitmask(body, layout_meta["n_total"], layout_meta["n_nonzero"]).numpy() + symbols = symbols_t.reshape(cols, rows).T.reshape(-1).copy() + out, _ = pack_quinary(torch.from_numpy(symbols)) + return out + raise ValueError(f"unknown layout id {layout}") + + +def _qkv_split_sizes(name: str, rows: int, cols: int) -> tuple[int, int] | None: + """For a c_qkv weight, derive (q_rows, kv_rows) row-counts. + + Standard transformer convention: q_size = num_heads * head_dim = model_dim + (the square attention assumption). So q_rows == cols. The remaining rows + are split equally between K and V: kv_rows = (rows - cols) // 2. + + Returns None if name doesn't look like c_qkv or the ratio doesn't decompose + cleanly (in which case we don't split, just compress as one tensor). + """ + if not name.endswith("c_qkv.weight"): + return None + if rows <= cols: + return None # not a GQA shape; or single-head case where Q==K==V + extra = rows - cols # 2 * kv_rows + if extra % 2 != 0: + return None + kv_rows = extra // 2 + q_rows = cols + if q_rows + 2 * kv_rows != rows: + return None + return q_rows, kv_rows + + +def _serialize_per_stream_v2(state_dict: dict, level: int = 9) -> tuple[bytes, str]: + """v2 archive: layout-aware per-stream compression. + + For each quinary entry, generate up to 4 layout candidates + (base5, base5_T, bitmask, bitmask_T), screen them by LZMA9-compressed + size, then compress the winning layout with min(LZMA9, lrzip-zpaq). + For c_qkv weights, the row-block is split into Q/K/V sub-payloads + handled independently. All other bulk fields use the v1 raw-bytes path. + + This is a bounded-cost heuristic with an LZMA floor — *not* an + exhaustive 4×2 search. It can in principle miss a (layout, compressor) + pair where the LZMA-screen-loser would have won under lrzip; in + practice this is rare on this stack and the LZMA floor caps the + worst case at the canonical base5+LZMA encoding. + + Returns (archive_bytes, audit_string) so the caller can route the + audit line through the run's logger. + """ + from shutil import which + has_lrzip = which("lrzip") is not None + + bulk_bodies: list[tuple[int, bytes]] = [] # (compressor_method, compressed_body) + metadata: dict = {} + audit_lines: list[str] = [] + layout_counts = {n: 0 for n in _LAYOUT_NAMES.values()} + method_counts = {"lzma": 0, "lrzip": 0} + qkv_split_count = 0 + + def _store(layout: int, body: bytes) -> tuple[int, int, int]: + """Compress and store; return (payload_idx, method_id, compressed_size).""" + method, comp = _pick_best_compressor(body, level, has_lrzip) + bulk_bodies.append((method, comp)) + method_counts["lzma" if method == _COMPRESSOR_LZMA else "lrzip"] += 1 + layout_counts[_LAYOUT_NAMES[layout]] += 1 + return len(bulk_bodies) - 1, method, len(comp) + + def _best_quinary_layout(symbols_2d: np.ndarray) -> tuple[int, int, int, dict, int]: + """Generate all layout candidates, screen with lzma to pick best layout, + then compress winner with min(lzma, lrzip) for the final body. + + Cheap-screen + refine cuts serialize compute from O(layouts*compressors) + to O(layouts) lzma + 1 lrzip, keeping serialize time bounded even when + lrzip is slow on bad streams. + + Returns (chosen_layout, payload_idx, compressed_size, layout_meta, n_candidates_evaluated). + """ + cands = _gen_quinary_layout_candidates(symbols_2d) + best_layout, best_body, best_meta = None, None, None + best_lzma_size = None + for layout, body, meta in cands: + lzma_size = len(lzma.compress(body, preset=level)) + if best_lzma_size is None or lzma_size < best_lzma_size: + best_lzma_size = lzma_size + best_layout, best_body, best_meta = layout, body, meta + # Now compress the winning layout with min(lzma, lrzip) for the actual stored body. + comp_method, comp_body = _pick_best_compressor(best_body, level, has_lrzip) + bulk_bodies.append((comp_method, comp_body)) + method_counts["lzma" if comp_method == _COMPRESSOR_LZMA else "lrzip"] += 1 + layout_counts[_LAYOUT_NAMES[best_layout]] += 1 + return best_layout, len(bulk_bodies) - 1, len(comp_body), best_meta, len(cands) + + for name, entry in state_dict.items(): + if not isinstance(entry, dict): + metadata[name] = entry + continue + new_entry = dict(entry) # shallow copy + + if entry.get("type") == "quinary" and "packed" in entry: + # Reconstruct full symbol matrix from canonical packed bytes. + rows, cols = entry["shape"][0], entry["padded_cols"] + symbols_full = unpack_quinary(entry["packed"], entry["n_quins"]).numpy() + symbols_2d = symbols_full[:rows * cols].reshape(rows, cols) + + split = _qkv_split_sizes(name, rows, cols) + if split is not None: + q_rows, kv_rows = split + qkv_split_count += 1 + refs = [] + for sub_name, sub_2d in ( + ("Q", symbols_2d[:q_rows]), + ("K", symbols_2d[q_rows:q_rows + kv_rows]), + ("V", symbols_2d[q_rows + kv_rows:]), + ): + layout, idx, sz, meta, _ = _best_quinary_layout(sub_2d.copy()) + sub_rows, sub_cols = sub_2d.shape + refs.append({ + "_v2": _BULK_SENTINEL_V2, "idx": idx, "layout": layout, + "rows": sub_rows, "cols": sub_cols, "meta": meta, + }) + new_entry["packed"] = ("__QKV_SPLIT__", refs) + audit_lines.append(f" {name}: qkv_split layouts={[_LAYOUT_NAMES[r['layout']] for r in refs]}") + else: + layout, idx, sz, meta, _ = _best_quinary_layout(symbols_2d.copy()) + new_entry["packed"] = { + "_v2": _BULK_SENTINEL_V2, "idx": idx, "layout": layout, + "rows": rows, "cols": cols, "meta": meta, + } + audit_lines.append(f" {name}: {_LAYOUT_NAMES[layout]} ({sz/1e6:.3f}MB)") + + # Other bulk fields (fp8 data, fp16 large tensors, scale_delta tensors, + # etc.) — fall back to v1's torch.save+pick path. + for key, value in list(new_entry.items()): + if key == "packed": # already handled + continue + if _is_bulk(value): + buf = io.BytesIO(); torch.save(value, buf) + method, comp = _pick_best_compressor(buf.getvalue(), level, has_lrzip) + bulk_bodies.append((method, comp)) + method_counts["lzma" if method == _COMPRESSOR_LZMA else "lrzip"] += 1 + layout_counts["raw"] += 1 + new_entry[key] = { + "_v2": _BULK_SENTINEL_V2, "idx": len(bulk_bodies) - 1, + "layout": _LAYOUT_RAW, + } + metadata[name] = new_entry + + # Compress metadata. + meta_buf = io.BytesIO(); torch.save(metadata, meta_buf) + meta_compressed = lzma.compress(meta_buf.getvalue(), preset=level) + + # Assemble archive (header 0x03). + out = bytearray() + out.append(_COMPRESSOR_PER_STREAM_V2) + out.extend(len(meta_compressed).to_bytes(4, "little")) + out.extend(meta_compressed) + out.extend(len(bulk_bodies).to_bytes(4, "little")) + for method, body in bulk_bodies: + out.append(method) + out.extend(len(body).to_bytes(4, "little")) + out.extend(body) + + layout_summary = ", ".join(f"{n}={c}" for n, c in layout_counts.items() if c) + audit = (f"per_stream_v2: meta={len(meta_compressed)/1e6:.2f}MB, " + f"{len(bulk_bodies)} bulk payloads " + f"(lzma={method_counts['lzma']}, lrzip={method_counts['lrzip']}; " + f"{layout_summary}), qkv_splits={qkv_split_count}, " + f"total={len(out)/1e6:.2f}MB") + return bytes(out), audit + + +def _deserialize_per_stream_v2(blob: bytes) -> dict: + """Inverse of _serialize_per_stream_v2. Returns a state_dict whose quinary + entries have canonical entry["packed"] bytes — `deq_sd` is unaware of the + archive layer.""" + cursor = 0 + assert blob[cursor] == _COMPRESSOR_PER_STREAM_V2, \ + f"expected v2 header 0x03, got {blob[0]:#x}" + cursor += 1 + meta_size = int.from_bytes(blob[cursor:cursor + 4], "little"); cursor += 4 + meta_blob = blob[cursor:cursor + meta_size]; cursor += meta_size + metadata = torch.load(io.BytesIO(lzma.decompress(meta_blob)), + map_location="cpu", weights_only=False) + n_payloads = int.from_bytes(blob[cursor:cursor + 4], "little"); cursor += 4 + decompressed: list[bytes] = [] + for _ in range(n_payloads): + method = blob[cursor]; cursor += 1 + size = int.from_bytes(blob[cursor:cursor + 4], "little"); cursor += 4 + body = blob[cursor:cursor + size]; cursor += size + if method == _COMPRESSOR_LZMA: + decompressed.append(lzma.decompress(body)) + elif method == _COMPRESSOR_LRZIP_ZPAQ: + decompressed.append(_lrzip_decompress_bytes(body)) + else: + raise ValueError(f"unknown compressor method {method:#x}") + + def _is_v2_ref(v): + return isinstance(v, dict) and v.get("_v2") == _BULK_SENTINEL_V2 + + for name, entry in metadata.items(): + if not isinstance(entry, dict): + continue + for key, value in list(entry.items()): + if _is_v2_ref(value): + if value["layout"] == _LAYOUT_RAW: + entry[key] = torch.load(io.BytesIO(decompressed[value["idx"]]), + map_location="cpu", weights_only=False) + else: + entry[key] = _layout_to_canonical_bytes( + value["layout"], decompressed[value["idx"]], + value["rows"], value["cols"], value["meta"]) + elif (isinstance(value, tuple) and len(value) == 2 + and value[0] == "__QKV_SPLIT__"): + # Reconstruct each part's symbol matrix, concat along row dim, + # repack as canonical base-5. + refs = value[1] + sub_symbols = [] + for ref in refs: + canonical_body = _layout_to_canonical_bytes( + ref["layout"], decompressed[ref["idx"]], + ref["rows"], ref["cols"], ref["meta"]) + n = ref["rows"] * ref["cols"] + sub_syms = unpack_quinary(canonical_body, n).numpy()[:n] + sub_symbols.append(sub_syms.reshape(ref["rows"], ref["cols"])) + full = np.concatenate(sub_symbols, axis=0).reshape(-1).copy() + entry[key], _ = pack_quinary(torch.from_numpy(full)) + return metadata + + +def _quant_scale(scale_fp16: Tensor, bits: int) -> dict: + """Quantize per-group scales to bits-bit log-deltas. Returns dict with + `scale_anchor` (fp16), `scale_step` (fp16), `scale_delta` (packed bytes + or int8 tensor), plus `scale_bits` to dispatch in deq_sd.""" + scale_f = scale_fp16.float().reshape(-1) + n_groups = scale_f.numel() + log_scale = torch.log2(scale_f.clamp(min=1e-12)) + lo, hi = float(log_scale.min().item()), float(log_scale.max().item()) + anchor = (lo + hi) / 2.0 + spread = hi - lo + levels = 1 << bits + half = levels // 2 + step = max(spread / max(levels - 1, 1), 1e-9) + delta = ((log_scale - anchor) / step).round().clamp(-half, half - 1).to(torch.int32) + if bits == 4: + d = (delta + half).to(torch.uint8).numpy() + if len(d) % 2: + d = np.concatenate([d, np.zeros(1, dtype=np.uint8)]) + packed = (d[::2] | (d[1::2] << 4)).astype(np.uint8) + delta_stored = torch.from_numpy(packed.copy()) + elif bits == 5: + delta_stored = (delta + half).to(torch.uint8) + else: # 8 + delta_stored = delta.to(torch.int8) + return { + "scale_anchor": torch.tensor(anchor, dtype=torch.float16), + "scale_step": torch.tensor(step, dtype=torch.float16), + "scale_delta": delta_stored, + "scale_n_groups": n_groups, + "scale_bits": bits, + } + +def _dequant_scale(entry: dict) -> Tensor: + """Reconstruct fp16 per-group scales from the quantized representation.""" + bits = int(entry["scale_bits"]) + half = (1 << bits) // 2 + n_groups = int(entry["scale_n_groups"]) + if bits == 4: + packed = entry["scale_delta"].to(torch.uint8).numpy() + low = packed & 0x0F + high = (packed >> 4) & 0x0F + d = np.empty(packed.size * 2, dtype=np.uint8) + d[0::2] = low + d[1::2] = high + delta = torch.from_numpy(d[:n_groups].astype(np.int32) - half) + elif bits == 5: + delta = entry["scale_delta"].to(torch.int32) - half + else: # 8 + delta = entry["scale_delta"].to(torch.int32) + anchor = entry["scale_anchor"].float() + step = entry["scale_step"].float() + return (2.0 ** (anchor + delta.float() * step)).to(torch.float16) + + +def q_sd(state_dict: dict, group_size: int = 64, + scale_quant_bits: int = 0) -> tuple[dict, dict]: + quantized = {} + stats = {"quinary_params": 0, "quinary_bytes": 0, "fp_params": 0, "fp_bytes": 0} + for name, tensor in state_dict.items(): + t = tensor.detach().cpu().float().contiguous() + t_orig_shape = list(t.shape) + if t.ndim == 3: + t = t.reshape(t.shape[0], -1) + is_quantized_candidate = ( + t.ndim == 2 and t.numel() > 65_536 + and "tok_emb" not in name and "lm_head" not in name and "embed_proj" not in name + ) + if is_quantized_candidate: + pad = (group_size - t.shape[1] % group_size) % group_size + t_padded = F.pad(t, (0, pad)) if pad > 0 else t + t_grouped = t_padded.reshape(-1, group_size) + scale = t_grouped.abs().mean(-1, keepdim=True).clamp(min=1e-8).half().float() + q = (t_grouped / scale).round().clamp(-2, 2).to(torch.int8) + + packed_bytes, n_packed = pack_quinary(q) + stats["quinary_params"] += t.numel() + + entry = { + "type": "quinary", "packed": packed_bytes, + "shape": list(t.shape), "padded_cols": t_padded.shape[1], + "group_size": group_size, "n_quins": n_packed, + "orig_shape": t_orig_shape, + } + if scale_quant_bits and scale_quant_bits in (4, 5, 8): + sq = _quant_scale(scale.half().squeeze(-1), scale_quant_bits) + entry.update(sq) + stats["quinary_bytes"] += len(packed_bytes) + sq["scale_delta"].numel() + 4 # +4 for anchor+step + else: + entry["scale"] = scale.half().squeeze(-1) + stats["quinary_bytes"] += len(packed_bytes) + scale.numel() * 2 + quantized[name] = entry + elif t.ndim == 2: + quantized[name] = {"type": "fp8", "data": t.to(torch.float8_e4m3fn)} + stats["fp_params"] += t.numel() + stats["fp_bytes"] += t.numel() + else: + quantized[name] = {"type": "fp16", "data": t.half()} + stats["fp_params"] += t.numel() + stats["fp_bytes"] += t.numel() * 2 + return quantized, stats + +def deq_sd(quantized: dict, target_dtype=torch.bfloat16): + out = {} + for name, entry in quantized.items(): + if entry["type"] == "quinary": + q = unpack_quinary(entry["packed"], entry["n_quins"]) + q = q.float().reshape(-1, entry["group_size"]) + if "scale_bits" in entry: + scale = _dequant_scale(entry).float().unsqueeze(-1) + else: + scale = entry["scale"].float().unsqueeze(-1) + q_absmean = q.abs().mean(-1, keepdim=True).clamp(min=1e-8) + t = (q * (scale / q_absmean)).reshape(-1, entry["padded_cols"]) + shape = entry["shape"] + result = t[:shape[0], :shape[1]].to(target_dtype) + orig = entry.get("orig_shape") + out[name] = result.reshape(orig).contiguous() if orig and orig != shape else result.contiguous() + elif entry["type"] == "fp8": + out[name] = entry["data"].to(torch.float32).to(target_dtype).contiguous() + else: + out[name] = entry["data"].to(target_dtype).contiguous() + return out + +def quin_stats(model: nn.Module, group_size: int = 64): + total = zeros = 0 + with torch.no_grad(): + for name, p in model.named_parameters(): + if p.ndim == 2 and ("weight" in name or "prototypes" in name) and p.shape[0] > 1 and p.numel() % group_size == 0: + w = p.detach().float().reshape(-1, group_size) + scale = w.abs().mean(-1, keepdim=True).clamp(min=1e-8).half().float() + q = (w / scale).round().clamp(-2, 2) + zeros += int((q == 0).sum().item()) + total += int(q.numel()) + return {"zero_frac": zeros / max(total, 1), "total_weights": total} + +_prev_committed: dict = {} + +def churn_fn(model: nn.Module, group_size: int = 64): + global _prev_committed + total = flipped = 0 + with torch.no_grad(): + for name, p in model.named_parameters(): + if p.ndim == 2 and ("weight" in name or "prototypes" in name) and p.shape[0] > 1 and p.numel() % group_size == 0: + w = p.detach().float().reshape(-1, group_size) + scale = w.abs().mean(-1, keepdim=True).clamp(min=1e-8).half().float() + q = (w / scale).round().clamp(-2, 2).cpu().numpy() + if name in _prev_committed: + flipped += int(np.sum(q != _prev_committed[name])) + total += q.size + _prev_committed[name] = q + return flipped / max(total, 1) + +def ns_orth(G: Tensor, steps: int = 10, eps: float = 1e-7) -> Tensor: + a, b, c = (3.4445, -4.7750, 2.0315) + X = G.bfloat16() + X /= X.norm() + eps + transposed = G.size(0) > G.size(1) + if transposed: + X = X.T + for _ in range(steps): + A = X @ X.T + B = b * A + c * A @ A + X = a * X + B @ X + return X.T if transposed else X + +class Muon(torch.optim.Optimizer): + def __init__(self, params, lr: float, momentum: float, backend_steps: int, nesterov: bool = True, wd: float = 0.0): + super().__init__(params, dict(lr=lr, momentum=momentum, backend_steps=backend_steps, nesterov=nesterov, wd=wd)) + + @torch.no_grad() + def step(self, closure=None): + loss = None + if closure is not None: + with torch.enable_grad(): + loss = closure() + distributed = dist.is_available() and dist.is_initialized() + world_size = dist.get_world_size() if distributed else 1 + rank = dist.get_rank() if distributed else 0 + for group in self.param_groups: + params = group["params"] + if not params: + continue + lr, momentum = group["lr"], group["momentum"] + backend_steps, nesterov = group["backend_steps"], group["nesterov"] + total_params = sum(int(p.numel()) for p in params) + updates_flat = torch.zeros(total_params, device=params[0].device, dtype=torch.bfloat16) + curr = 0 + for i, p in enumerate(params): + if i % world_size == rank and p.grad is not None: + g = p.grad + state = self.state[p] + if "momentum_buffer" not in state: + state["momentum_buffer"] = torch.zeros_like(g) + buf = state["momentum_buffer"] + buf.mul_(momentum).add_(g) + if nesterov: + g = g.add(buf, alpha=momentum) + g = F.rms_norm(g.float(), (g.size(-1),)).bfloat16() + g = ns_orth(g, steps=backend_steps) + g *= max(1, g.size(0) / g.size(1)) ** 0.5 + updates_flat[curr:curr + p.numel()] = g.reshape(-1) + curr += p.numel() + if distributed: + dist.all_reduce(updates_flat, op=dist.ReduceOp.SUM) + wd = group.get("wd", 0.0) + curr = 0 + for p in params: + g = updates_flat[curr : curr + p.numel()].view_as(p).to(dtype=p.dtype) + if wd > 0: + p.mul_(1 - lr * wd) + p.add_(g, alpha=-lr) + curr += p.numel() + return loss + +def ld_shard(file: Path) -> Tensor: + header_bytes = 256 * np.dtype(" Tensor: + chunks = [] + remaining = n + while remaining > 0: + avail = self.tokens.numel() - self.pos + if avail <= 0: + self._advance_file() + continue + k = min(remaining, avail) + chunks.append(self.tokens[self.pos:self.pos + k]) + self.pos += k + remaining -= k + return chunks[0] if len(chunks) == 1 else torch.cat(chunks) + +class DistributedTokenLoader: + def __init__(self, pattern: str, rank: int, world_size: int, device: torch.device): + self.rank, self.world_size, self.device = rank, world_size, device + self.stream = TokenStream(pattern) + + def next_batch(self, global_tokens: int, seq_len: int, grad_accum_steps: int) -> tuple[Tensor, Tensor]: + local_tokens = global_tokens // (self.world_size * grad_accum_steps) + per_rank_span = local_tokens + 1 + chunk = self.stream.take(per_rank_span * self.world_size) + start = self.rank * per_rank_span + local = chunk[start:start + per_rank_span].pin_memory().to(self.device, non_blocking=True).to(torch.int64) + x = local[:-1].reshape(-1, seq_len) + y = local[1:].reshape(-1, seq_len) + return x, y + +class RMSNorm(nn.Module): + def __init__(self, eps: float | None = None): + super().__init__() + self.eps = eps + + def forward(self, x: Tensor) -> Tensor: + return F.rms_norm(x, (x.size(-1),), eps=self.eps) + +def apply_fp8_ste(w: Tensor) -> Tensor: + w_sim = w.to(torch.float8_e4m3fn).to(w.dtype) + return (w_sim - w).detach() + w + +class QATLinear(nn.Linear): + def forward(self, x: Tensor) -> Tensor: + w_qat = apply_fp8_ste(self.weight) + return F.linear(x, w_qat.to(x.dtype), self.bias.to(x.dtype) if self.bias is not None else None) + +class QATEmbedding(nn.Embedding): + def forward(self, input: Tensor) -> Tensor: + w_qat = apply_fp8_ste(self.weight) + return F.embedding(input, w_qat, self.padding_idx, self.max_norm, + self.norm_type, self.scale_grad_by_freq, self.sparse) + +class QuinaryLinear(nn.Linear): + def __init__(self, in_features, out_features, bias=False, group_size=64): + super().__init__(in_features, out_features, bias=bias) + self.group_size = group_size + num_groups = (in_features * out_features) // group_size + # Inert by design: the STE detach below blocks gradients to + # `scale_correction`. Kept as a fp32 buffer at value 1.0 for + # backwards-compatibility with the ternary-base state-dict layout. + # An attempt to fix the STE so this parameter receives gradients + # was tested 2026-05-01 (commit b9c…) and showed a small + # training-time regression with no TTT benefit, so reverted. + self.scale_correction = nn.Parameter(torch.ones(num_groups, dtype=torch.float32)) + + def forward(self, x: Tensor) -> Tensor: + w = self.weight.bfloat16() + g = self.group_size + w_g = w.reshape(-1, g) + scale = w_g.abs().mean(-1, keepdim=True).clamp(min=1e-8) * self.scale_correction.to(w.dtype).unsqueeze(-1) + q = (w_g / scale).round().clamp(-2, 2) + w_quantized = w + ((q * scale).reshape(w.shape) - w).detach() + return F.linear(x, w_quantized, + self.bias.to(x.dtype) if self.bias is not None else None) + + +class NormedQuinaryLinear(QuinaryLinear): + def forward(self, x: Tensor) -> Tensor: + return super().forward(F.rms_norm(x, (x.size(-1),))) + +def restore_low_dim_params_to_fp32(module: nn.Module) -> None: + with torch.no_grad(): + for name, param in module.named_parameters(): + if (param.ndim < 2 or any(p in name for p in CTP)) and param.dtype != torch.float32: + param.data = param.data.float() + +class Rotary(nn.Module): + def __init__(self, dim: int, base: float = 10000.0, no_cache: bool = False, + rope_type: str = "rope", yarn_max_len: int = 4096, train_seq_len: int = 1024): + super().__init__() + self.no_cache = no_cache + inv_freq = 1.0 / (base ** (torch.arange(0, dim, 2, dtype=torch.float32) / dim)) + if rope_type == "yarn": + scale = train_seq_len / yarn_max_len + freq_idx = torch.arange(0, dim, 2, dtype=torch.float32) + ramp = torch.clamp((freq_idx / dim - 0.25) / 0.75, 0.0, 1.0) + inv_freq = inv_freq / (ramp * (1.0 / scale - 1.0) + 1.0) + self.register_buffer("inv_freq", inv_freq, persistent=False) + self._seq_len_cached = 0 + self._cos_cached: Tensor | None = None + self._sin_cached: Tensor | None = None + + def forward(self, seq_len, device, dtype): + if self.no_cache: + t = torch.arange(seq_len, device=device, dtype=self.inv_freq.dtype) + freqs = torch.outer(t, self.inv_freq.to(device)) + return freqs.cos()[None, :, None, :].to(dtype=dtype), freqs.sin()[None, :, None, :].to(dtype=dtype) + if ( + self._cos_cached is None + or self._sin_cached is None + or self._seq_len_cached != seq_len + or self._cos_cached.device != device + ): + t = torch.arange(seq_len, device=device, dtype=self.inv_freq.dtype) + freqs = torch.outer(t, self.inv_freq.to(device)) + self._cos_cached = freqs.cos()[None, :, None, :] + self._sin_cached = freqs.sin()[None, :, None, :] + self._seq_len_cached = seq_len + return self._cos_cached.to(dtype=dtype), self._sin_cached.to(dtype=dtype) + +def apply_rotary_emb(x: Tensor, cos: Tensor, sin: Tensor) -> Tensor: + half = x.size(-1) // 2 + x1, x2 = x[..., :half], x[..., half:] + return torch.cat((x1 * cos + x2 * sin, x1 * (-sin) + x2 * cos), dim=-1) + +class CausalSelfAttention(nn.Module): + def __init__(self, dim, num_heads, num_kv_heads, rope_base, qk_gain_init, + group_size=64, no_cache=False, rope_type="rope", + yarn_max_len=4096, train_seq_len=1024): + super().__init__() + self.num_heads, self.num_kv_heads = num_heads, num_kv_heads + self.head_dim = dim // num_heads + self.q_size = self.num_heads * self.head_dim + self.kv_size = self.num_kv_heads * self.head_dim + + self.c_qkv = QuinaryLinear(dim, self.q_size + 2 * self.kv_size, bias=False, group_size=group_size) + self.proj = NormedQuinaryLinear(dim, dim, bias=False, group_size=group_size) + self.proj._zero_init = True + self.q_gain = nn.Parameter(torch.full((num_heads,), qk_gain_init, dtype=torch.float32)) + self.rotary = Rotary(self.head_dim, base=rope_base, no_cache=no_cache, + rope_type=rope_type, yarn_max_len=yarn_max_len, + train_seq_len=train_seq_len) + + def forward(self, x: Tensor) -> Tensor: + bsz, seqlen, dim = x.shape + qkv_out = self.c_qkv(x) + q_out, k_out, v_out = qkv_out.split([self.q_size, self.kv_size, self.kv_size], dim=-1) + q = q_out.reshape(bsz, seqlen, self.num_heads, self.head_dim) + k = k_out.reshape(bsz, seqlen, self.num_kv_heads, self.head_dim) + v = v_out.reshape(bsz, seqlen, self.num_kv_heads, self.head_dim) + q, k = F.rms_norm(q, (q.size(-1),)), F.rms_norm(k, (k.size(-1),)) + cos, sin = self.rotary(seqlen, x.device, q.dtype) + q, k = apply_rotary_emb(q, cos, sin), apply_rotary_emb(k, cos, sin) + q = q * self.q_gain.to(dtype=q.dtype)[None, None, :, None] + y = flash_attn_func(q.contiguous(), k.contiguous(), v.contiguous(), causal=True) + y = y.reshape(bsz, seqlen, dim) + return self.proj(y) + +class MLP(nn.Module): + def __init__(self, dim, mlp_mult, group_size=64, activation="swiglu"): + super().__init__() + hidden = mlp_mult * dim + self.activation = activation + if activation == "swiglu": + self.gate_up = QuinaryLinear(dim, hidden * 2, bias=False, group_size=group_size) + else: + self.fc = QuinaryLinear(dim, hidden, bias=False, group_size=group_size) + self.proj = NormedQuinaryLinear(hidden, dim, bias=False, group_size=group_size) + self.proj._zero_init = True + + def forward(self, x: Tensor) -> Tensor: + if self.activation == "swiglu": + gu = self.gate_up(x) + gate, up = gu.chunk(2, dim=-1) + return self.proj(F.silu(gate) * up) + elif self.activation == "relu": + return self.proj(torch.relu(self.fc(x))) + elif self.activation == "leaky_relu": + return self.proj(F.leaky_relu(self.fc(x), negative_slope=0.01)) + elif self.activation == "leaky_relu2": + return self.proj(F.leaky_relu(self.fc(x), negative_slope=0.5).square()) + else: # relu2 + return self.proj(torch.relu(self.fc(x)).square()) + +class Block(nn.Module): + def __init__(self, dim: int, num_heads: int, num_kv_heads: int, mlp_mult: int, + rope_base: float, qk_gain_init: float, group_size: int=64, + activation: str="swiglu", no_cache: bool=False, + rope_type: str="rope", yarn_max_len: int=4096, + train_seq_len: int=1024): + super().__init__() + self.attn_norm = RMSNorm() + self.mlp_norm = RMSNorm() + self.attn = CausalSelfAttention(dim, num_heads, num_kv_heads, rope_base, qk_gain_init, + group_size, no_cache, rope_type, yarn_max_len, train_seq_len) + self.mlp = MLP(dim, mlp_mult, group_size, activation) + self.attn_scale = nn.Parameter(torch.ones(dim, dtype=torch.float32)) + self.mlp_scale = nn.Parameter(torch.ones(dim, dtype=torch.float32)) + self.resid_mix = nn.Parameter(torch.stack((torch.ones(dim), torch.zeros(dim))).float()) + + def forward(self, x: Tensor, x0: Tensor) -> Tensor: + mix = self.resid_mix.to(dtype=x.dtype) + x_in = mix[0] * x + mix[1] * x0 + n = self.attn_norm(x_in) + x = x_in + self.attn_scale.to(dtype=x_in.dtype) * self.attn(n) + x = x + self.mlp_scale.to(dtype=x.dtype) * self.mlp(self.mlp_norm(x)) + return x + +class GPT(nn.Module): + def __init__(self, vocab_size, num_layers, model_dim, num_heads, num_kv_heads, mlp_mult, + tie_embeddings, tied_embed_init_std, logit_softcap, rope_base, qk_gain_init, + group_size: int = 64, activation: str = "swiglu", + embed_dim: int = 0, softcap_type: str="poly", no_cache: bool=False, + rope_type: str="rope", yarn_max_len: int=4096, train_seq_len: int=1024): + super().__init__() + self.tie_embeddings = tie_embeddings + self.logit_softcap = logit_softcap + self.softcap_type = softcap_type + self.embed_dim = embed_dim if embed_dim > 0 else model_dim + self.tok_emb = QATEmbedding(vocab_size, self.embed_dim) + self.embed_proj = QATLinear(self.embed_dim, model_dim, bias=False) if self.embed_dim != model_dim else None + self.embed_proj_rev = QATLinear(model_dim, self.embed_dim, bias=False) if self.embed_dim != model_dim else None + + self.blocks = nn.ModuleList([ + Block(model_dim, num_heads, num_kv_heads, mlp_mult, rope_base, qk_gain_init, + group_size, activation, no_cache, rope_type, yarn_max_len, train_seq_len) + for _ in range(num_layers) + ]) + + # U-Net split: first half encoder, second half decoder, decoder layers + # add a learned-weighted skip from the symmetric encoder layer. + self.num_encoder_layers = num_layers // 2 + self.num_decoder_layers = num_layers - self.num_encoder_layers + self.num_skip_weights = min(self.num_encoder_layers, self.num_decoder_layers) + self.skip_weights = nn.Parameter(torch.ones(self.num_skip_weights, model_dim, dtype=torch.float32)) + + self.final_norm = RMSNorm() + self.lm_head = QATLinear(model_dim, vocab_size, bias=False) + self.lm_head._zero_init = True + if tie_embeddings: + self.lm_head.weight.requires_grad_(False) + + self.vocab_bias = nn.Parameter(torch.zeros(vocab_size, dtype=torch.float32)) + self._init_weights(tied_embed_init_std) + + def _init_weights(self, tied_embed_init_std: float) -> None: + if self.tie_embeddings: + nn.init.normal_(self.tok_emb.weight, mean=0.0, std=tied_embed_init_std) + for module in self.modules(): + if isinstance(module, QuinaryLinear) and not getattr(module, "_zero_init", False): + nn.init.normal_(module.weight, mean=0.0, std=0.02) + elif isinstance(module, nn.Linear) and getattr(module, "_zero_init", False): + nn.init.zeros_(module.weight) + + def _compute_logits(self, x: Tensor) -> Tensor: + if self.tie_embeddings: + proj = self.embed_proj_rev(x) if self.embed_proj_rev is not None else x + logits_raw = F.linear(proj, self.tok_emb.weight.to(x.dtype)) + else: + logits_raw = self.lm_head(x) + return logits_raw + self.vocab_bias.to(x.dtype) + + def _softcap(self, logits: Tensor) -> Tensor: + s = self.logit_softcap + if self.softcap_type == "tanh": + return s * torch.tanh(logits / s) + x_sc = torch.clamp(logits / s, -2.0, 2.0) + x2 = x_sc * x_sc + return s * torch.clamp(x_sc * (1.0 - x2 / 3.0 + x2 * x2 / 15.0), -1.0, 1.0) + + def forward(self, input_ids: Tensor, target_ids: Tensor, reduction: str = "mean") -> Tensor: + x = self.tok_emb(input_ids).float() + if self.embed_proj is not None: + x = self.embed_proj(x) + x = F.rms_norm(x, (x.size(-1),)) + x0 = x + + # U-Net encoder/decoder with skip connections. + skips = [] + for vi in range(self.num_encoder_layers): + x = self.blocks[vi](x, x0) + skips.append(x) + for vi in range(self.num_decoder_layers): + if vi < self.num_skip_weights: + x = x + self.skip_weights[vi].to(dtype=x.dtype) * skips.pop() + x = self.blocks[self.num_encoder_layers + vi](x, x0) + + x_normed = self.final_norm(x) + x_flat = x_normed.reshape(-1, x_normed.size(-1)) + targets = target_ids.reshape(-1) + logits = self._softcap(self._compute_logits(x_flat)) + + if reduction == "none": + return F.cross_entropy(logits.float(), targets, reduction="none").reshape(input_ids.shape) + + # Fused CE: single logsumexp; z-loss only during training (not eval BPB). + logits_f = logits.float() + lse = torch.logsumexp(logits_f, dim=-1) + target_logits = logits_f.gather(1, targets.unsqueeze(1)).squeeze(1) + main_loss = (lse - target_logits).mean() + if self.training: + main_loss = main_loss + 1e-4 * (lse ** 2).mean() + return main_loss + + +def build_luts(sp, vocab_size: int, device: torch.device): + sp_vocab_size = int(sp.vocab_size()) + table_size = max(sp_vocab_size, vocab_size) + base_bytes_np = np.zeros((table_size,), dtype=np.int16) + has_leading_space_np = np.zeros((table_size,), dtype=np.bool_) + is_boundary_token_np = np.ones((table_size,), dtype=np.bool_) + for token_id in range(sp_vocab_size): + if sp.is_control(token_id) or sp.is_unknown(token_id) or sp.is_unused(token_id): + continue + is_boundary_token_np[token_id] = False + if sp.is_byte(token_id): + base_bytes_np[token_id] = 1 + continue + piece = sp.id_to_piece(token_id) + if piece.startswith("\u2581"): + has_leading_space_np[token_id] = True + piece = piece[1:] + base_bytes_np[token_id] = len(piece.encode("utf-8")) + return ( + torch.tensor(base_bytes_np, dtype=torch.int16, device=device), + torch.tensor(has_leading_space_np, dtype=torch.bool, device=device), + torch.tensor(is_boundary_token_np, dtype=torch.bool, device=device), + ) + +def ld_val(pattern, seq_len, max_tok=int(os.environ.get("VAL_MAX_TOKENS", 0))): + files = sorted(glob.glob(pattern)) + assert files, f"No files: {pattern}" + tok = torch.cat([ld_shard(Path(p)) for p in files]).contiguous() + if max_tok > 0: tok = tok[:max_tok + 1] + u = ((tok.numel() - 1) // seq_len) * seq_len + return tok[:u + 1] + + +def eval_val(args, model, rank, world_size, device, grad_accum_steps, val_tokens, + base_bytes_lut, has_leading_space_lut, is_boundary_token_lut): + local_batch_tokens = args.val_batch_size // (world_size * grad_accum_steps) + local_batch_seqs = max(1, local_batch_tokens // args.train_seq_len) + total_seqs = (val_tokens.numel() - 1) // args.train_seq_len + seq_start = (total_seqs * rank) // world_size + seq_end = (total_seqs * (rank + 1)) // world_size + loss_sum = torch.zeros((), device=device, dtype=torch.float64) + token_count = torch.zeros((), device=device, dtype=torch.float64) + byte_count = torch.zeros((), device=device, dtype=torch.float64) + model.eval() + with torch.no_grad(): + for batch_start in range(seq_start, seq_end, local_batch_seqs): + batch_end = min(batch_start + local_batch_seqs, seq_end) + raw_start = batch_start * args.train_seq_len + raw_end = batch_end * args.train_seq_len + 1 + local = val_tokens[raw_start:raw_end].to(device=device, dtype=torch.int64) + x, y = local[:-1].reshape(-1, args.train_seq_len), local[1:].reshape(-1, args.train_seq_len) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + batch_loss = model(x, y).detach() + n = float(y.numel()) + loss_sum += batch_loss.to(torch.float64) * n + token_count += n + prev_ids, tgt_ids = x.reshape(-1), y.reshape(-1) + tok_bytes = base_bytes_lut[tgt_ids].to(torch.int16) + tok_bytes += (has_leading_space_lut[tgt_ids] & ~is_boundary_token_lut[prev_ids]).to(torch.int16) + byte_count += tok_bytes.to(torch.float64).sum() + if dist.is_available() and dist.is_initialized(): + for t in (loss_sum, token_count, byte_count): + dist.all_reduce(t, op=dist.ReduceOp.SUM) + val_loss = loss_sum / token_count + bpb = (val_loss.item() / math.log(2.0)) * (token_count.item() / byte_count.item()) + model.train() + return float(val_loss.item()), float(bpb), int(token_count.item()), int(byte_count.item()) + +def main() -> None: + args = Hyperparameters() + code = Path(__file__).read_text(encoding="utf-8") + + if args.matrix_optimizer != "adamw": + global ns_orth + ns_orth = torch.compile(ns_orth) + + distributed = "RANK" in os.environ and "WORLD_SIZE" in os.environ + rank = int(os.environ.get("RANK", "0")) + world_size = int(os.environ.get("WORLD_SIZE", "1")) + local_rank = int(os.environ.get("LOCAL_RANK", "0")) + grad_accum_steps = max(1, 8 // world_size) + grad_scale = 1.0 / grad_accum_steps + + if not torch.cuda.is_available(): + raise RuntimeError("CUDA is required") + device = torch.device("cuda", local_rank) + torch.cuda.set_device(device) + if distributed: + dist.init_process_group(backend="nccl", device_id=device) + dist.barrier() + master_process = rank == 0 + torch.backends.cuda.matmul.allow_tf32 = True + torch.backends.cudnn.allow_tf32 = True + + os.makedirs("logs/cuda/", exist_ok=True) + logfile = f"logs/cuda/{args.run_id}.txt" if master_process else None + if master_process: + print(logfile) + # Truncate any pre-existing logfile from a previous run with the same + # RUN_ID so the per-RUN_ID log file isn't append-stacked across reruns. + if logfile: + open(logfile, "w", encoding="utf-8").close() + + def log0(msg: str, console: bool = True) -> None: + if not master_process: + return + if console: + print(msg) + if logfile: + with open(logfile, "a", encoding="utf-8") as f: + print(msg, file=f) + + log0(code, console=False) + log0("=" * 100, console=False) + + log0(f"Python {sys.version}", console=False) + log0(f"PyTorch {torch.__version__}", console=False) + + random.seed(args.seed) + np.random.seed(args.seed) + torch.manual_seed(args.seed) + torch.cuda.manual_seed_all(args.seed) + + sp = spm.SentencePieceProcessor(model_file=args.tokenizer_path) + # Guard against an obvious tokenizer/vocab-size mismatch that would + # silently produce a wrong byte-count LUT (and therefore a wrong BPB + # denominator) without crashing. + assert sp.vocab_size() == args.vocab_size, ( + f"tokenizer vocab_size ({sp.vocab_size()}) != args.vocab_size " + f"({args.vocab_size}); check TOKENIZER_PATH / VOCAB_SIZE") + val_tokens = ld_val(args.val_files, args.train_seq_len) + base_bytes_lut, has_leading_space_lut, is_boundary_token_lut = build_luts( + sp, args.vocab_size, device) + + # --- Model --- + base_model = GPT( + vocab_size=args.vocab_size, num_layers=args.num_layers, model_dim=args.model_dim, + num_heads=args.num_heads, num_kv_heads=args.num_kv_heads, mlp_mult=args.mlp_mult, + tie_embeddings=args.tie_embeddings, tied_embed_init_std=args.tied_embed_init_std, + logit_softcap=args.logit_softcap, rope_base=args.rope_base, qk_gain_init=args.qk_gain_init, + group_size=args.bitnet_group_size, activation=args.activation_type, + embed_dim=args.embed_dim, + softcap_type=args.softcap_type, no_cache=(args.compile_mode == "reduce-overhead"), + rope_type=args.rope_type, yarn_max_len=args.yarn_max_len, train_seq_len=args.train_seq_len, + ).to(device).bfloat16() + + for module in base_model.modules(): + if isinstance(module, nn.Linear): + module.float() + restore_low_dim_params_to_fp32(base_model) + if args.tie_embeddings: + base_model.lm_head.weight.requires_grad_(False) + + torch._dynamo.config.optimize_ddp = False + + compiled_model = torch.compile(base_model, mode=args.compile_mode if args.compile_mode != "default" else None) + use_find_unused = not args.tie_embeddings + model = DDP(compiled_model, device_ids=[local_rank], broadcast_buffers=False, + find_unused_parameters=use_find_unused, + static_graph=not use_find_unused, + gradient_as_bucket_view=True) if distributed else compiled_model + + # --- Optimizers --- + _excl = {"tok_emb.weight", "lm_head.weight"} + all_other_params = [(n, p) for n, p in base_model.named_parameters() + if not any(eh in n for eh in _excl)] + matrix_params = [p for n, p in all_other_params + if p.ndim == 2 and not any(pat in n for pat in CTP)] + scalar_params = [p for n, p in all_other_params + if p.ndim < 2 or any(pat in n for pat in CTP)] + + token_lr = args.tied_embed_lr if args.tie_embeddings else args.embed_lr + opt_tok = torch.optim.Adam( + [{"params": [base_model.tok_emb.weight], "lr": token_lr, "base_lr": token_lr}], + betas=(args.beta1, args.beta2), eps=args.adam_eps, fused=True) + if args.matrix_optimizer == "adamw": + opt_muon = torch.optim.AdamW( + [{"params": matrix_params, "lr": args.adam_lr, "base_lr": args.adam_lr}], + betas=(args.beta1, args.beta2), eps=args.adam_eps, weight_decay=args.adam_wd, fused=True) + else: + opt_muon = Muon(matrix_params, lr=args.matrix_lr, momentum=args.muon_momentum, + backend_steps=args.muon_backend_steps, wd=args.muon_wd) + for g in opt_muon.param_groups: + g["base_lr"] = args.matrix_lr + opt_scalar = torch.optim.Adam( + [{"params": scalar_params, "lr": args.scalar_lr, "base_lr": args.scalar_lr}], + betas=(args.beta1, args.beta2), eps=args.adam_eps, fused=True) + opt_head = torch.optim.Adam( + [{"params": [base_model.lm_head.weight], "lr": 0.0, "base_lr": 0.0}], + betas=(args.beta1, args.beta2), eps=args.adam_eps, fused=True) + + optimizers = [opt_tok, opt_muon, opt_scalar, opt_head] + + # --- Log all hyperparameters --- + log0("--- Hyperparameters ---", console=False) + log0(" ".join(f"{a}={getattr(args,a)}" for a in sorted(dir(args)) if not a.startswith("_") and a not in ("train_files","val_files") and not callable(getattr(args,a))), console=False) + n_params = sum(p.numel() for p in base_model.parameters()) + log0(f"params:{n_params} L:{args.num_layers} d:{args.model_dim} h:{args.num_heads} kv:{args.num_kv_heads} ws:{world_size} ga:{grad_accum_steps} s:{args.seed}") + + # --- Data loader & helpers --- + train_loader = DistributedTokenLoader(args.train_files, rank, world_size, device) + + def zero_grad_all(): + for opt in optimizers: + opt.zero_grad(set_to_none=True) + + max_wallclock_ms = 1000.0 * args.max_wallclock_seconds if args.max_wallclock_seconds > 0 else None + + def lr_mul(step: int, elapsed_ms: float): + if args.warmdown_fraction <= 0: + return 1.0 + if max_wallclock_ms is None: + warmdown_start = int(args.iterations * (1.0 - args.warmdown_fraction)) + ratio = max((args.iterations - step) / max(args.iterations * args.warmdown_fraction, 1), 0.0) if step >= warmdown_start else 1.0 + else: + warmdown_ms = max_wallclock_ms * args.warmdown_fraction + remaining_ms = max(max_wallclock_ms - elapsed_ms, 0.0) + ratio = remaining_ms / max(warmdown_ms, 1e-9) if remaining_ms <= warmdown_ms else 1.0 + return max(ratio, args.min_lr) + + _seq_switched = False + _batch_switched = False + active_seq_len = args.seq_len_start if args.seq_len_start > 0 else args.train_seq_len + active_batch_tokens = args.batch_tokens_start if args.batch_tokens_start > 0 else args.train_batch_tokens + + if args.skip_training: + log0("skip_training=1, using existing artifact final_model.quinary.ptz") + + # --- Compiler warmup --- + if args.warmup_steps > 0 and not args.skip_training: + _ms = {n: t.detach().cpu().clone() for n, t in base_model.state_dict().items()} + _os = [copy.deepcopy(o.state_dict()) for o in optimizers] + model.train() + for ws in range(args.warmup_steps): + zero_grad_all() + for mi in range(grad_accum_steps): + if distributed: model.require_backward_grad_sync = mi == grad_accum_steps - 1 + x, y = train_loader.next_batch(active_batch_tokens, active_seq_len, grad_accum_steps) + torch.compiler.cudagraph_mark_step_begin() + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): loss = model(x, y) + (loss * grad_scale).backward() + for o in optimizers: o.step() + zero_grad_all() + log0(f"warmup:{ws+1}/{args.warmup_steps}") + base_model.load_state_dict(_ms, strict=True) + for o, s in zip(optimizers, _os): o.load_state_dict(s) + zero_grad_all() + train_loader = DistributedTokenLoader(args.train_files, rank, world_size, device) + + # --- Main training loop --- + training_time_ms = 0.0 + stop_after_step: int | None = None + train_loss = torch.zeros((), device=device) + torch.cuda.synchronize() + t0 = time.perf_counter() + step = 0 + + while not args.skip_training: + last_step = step == args.iterations or (stop_after_step is not None and step >= stop_after_step) + + if last_step or (args.val_loss_every > 0 and step % args.val_loss_every == 0): + torch.cuda.synchronize() + training_time_ms += 1000.0 * (time.perf_counter() - t0) + val_loss, val_bpb, val_tok_count, val_byte_count = eval_val( + args, model, rank, world_size, device, grad_accum_steps, + val_tokens, base_bytes_lut, has_leading_space_lut, is_boundary_token_lut) + tstats = quin_stats(base_model, group_size=args.bitnet_group_size) + log0(f"step:{step}/{args.iterations} val_loss:{val_loss:.4f} val_bpb:{val_bpb:.4f} " + f"train_time:{training_time_ms:.0f}ms zero_frac:{tstats['zero_frac']:.3f} " + f"eval_tokens:{val_tok_count} eval_bytes:{val_byte_count}") + torch.cuda.synchronize() + t0 = time.perf_counter() + + if last_step: + if stop_after_step is not None and step < args.iterations: + log0(f"stopping_early: wallclock_cap train_time:{training_time_ms:.0f}ms step:{step}/{args.iterations}") + break + + elapsed_ms = training_time_ms + 1000.0 * (time.perf_counter() - t0) + scale = lr_mul(step, elapsed_ms) + + # Sequence length schedule + if args.seq_len_start > 0 and not _seq_switched: + if max_wallclock_ms is not None: + should_switch_seq = elapsed_ms >= args.seq_schedule_fraction * max_wallclock_ms + else: + should_switch_seq = step >= int(args.iterations * args.seq_schedule_fraction) + if should_switch_seq: + active_seq_len = args.train_seq_len + _seq_switched = True + torch._dynamo.reset() + train_loader = DistributedTokenLoader(args.train_files, rank, world_size, device) + log0(f"step:{step} seq_len_switch:{args.seq_len_start}->{active_seq_len}") + + # Batch size schedule + if args.batch_tokens_start > 0 and not _batch_switched: + if max_wallclock_ms is not None: + should_switch_batch = elapsed_ms >= args.batch_schedule_fraction * max_wallclock_ms + else: + should_switch_batch = step >= int(args.iterations * args.batch_schedule_fraction) + if should_switch_batch: + active_batch_tokens = args.train_batch_tokens + _batch_switched = True + log0(f"step:{step} batch_switch:{args.batch_tokens_start}->{active_batch_tokens}") + + zero_grad_all() + train_loss.zero_() + + for micro in range(grad_accum_steps): + if distributed: + model.require_backward_grad_sync = micro == grad_accum_steps - 1 + x, y = train_loader.next_batch(active_batch_tokens, active_seq_len, grad_accum_steps) + torch.compiler.cudagraph_mark_step_begin() + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + loss = model(x, y) + train_loss.add_(loss.detach()) + (loss * grad_scale).backward() + train_loss /= grad_accum_steps + + # Muon momentum warmup (skip when AdamW is the matrix optimizer) + if args.matrix_optimizer != "adamw": + frac = min(step / args.muon_momentum_warmup_steps, 1.0) if args.muon_momentum_warmup_steps > 0 else 1.0 + for g in opt_muon.param_groups: + g["momentum"] = (1 - frac) * args.muon_momentum_warmup_start + frac * args.muon_momentum + + # LR scheduling + for opt in optimizers: + for g in opt.param_groups: + g["lr"] = g["base_lr"] * scale + opt.step() + zero_grad_all() + step += 1 + approx_ms = training_time_ms + 1000.0 * (time.perf_counter() - t0) + + if args.train_log_every > 0 and step % args.train_log_every == 0: + log0(f"step:{step}/{args.iterations} loss:{train_loss.item():.4f} t:{approx_ms:.0f}ms avg:{approx_ms/step:.1f}ms") + if args.churn_log_every > 0 and step % args.churn_log_every == 0: + log0(f"step:{step} churn:{churn_fn(base_model, args.bitnet_group_size):.4f} zero:{quin_stats(base_model, args.bitnet_group_size)['zero_frac']:.3f}") + + # Wallclock cap sync + if stop_after_step is None and max_wallclock_ms is not None and step % 10 == 0: + reached_cap = approx_ms >= max_wallclock_ms + if distributed: + cap_t = torch.tensor(int(reached_cap), device=device) + dist.all_reduce(cap_t, op=dist.ReduceOp.MAX) + reached_cap = bool(cap_t.item()) + if reached_cap: + stop_after_step = step + + # --- Serialization --- + if master_process and not args.skip_training: + sd = base_model.state_dict() + if base_model.tie_embeddings: + sd.pop("lm_head.weight", None) + + log0("serialize: start") + t_qsd = time.perf_counter() + q_obj, q_stats = q_sd(sd, group_size=args.bitnet_group_size, + scale_quant_bits=args.scale_quant_bits) + qsd_time = time.perf_counter() - t_qsd + log0(f"serialize: q_sd in {qsd_time:.1f}s " + f"(quinary={q_stats['quinary_params']/1e6:.2f}M params/{q_stats['quinary_bytes']/1e6:.2f}MB, " + f"fp={q_stats['fp_params']/1e6:.2f}M params/{q_stats['fp_bytes']/1e6:.2f}MB)") + + t_save = time.perf_counter() + buf = io.BytesIO() + torch.save(q_obj, buf) + raw_bytes = len(buf.getvalue()) + log0(f"serialize: torch.save raw={raw_bytes/1e6:.2f}MB in {time.perf_counter()-t_save:.1f}s") + + t_compress = time.perf_counter() + final_blob, per_stream_audit = _serialize_per_stream_v2(q_obj, level=9) + compress_time = time.perf_counter() - t_compress + artifact_bytes = len(final_blob) + log0(per_stream_audit) + log0(f"serialize: per_stream_v2 {raw_bytes/1e6:.2f}MB -> {artifact_bytes/1e6:.2f}MB " + f"(ratio {artifact_bytes/raw_bytes:.1%}, saved {(raw_bytes-artifact_bytes)/1e6:.2f}MB) " + f"in {compress_time:.1f}s") + + with open("final_model.quinary.ptz", "wb") as f: + f.write(final_blob) + + code_bytes = len(code.encode("utf-8")) + + total = artifact_bytes + code_bytes + log0(f"artifact:{artifact_bytes/1e6:.2f}MB quinary:{q_stats['quinary_params']}({q_stats['quinary_bytes']}B) fp:{q_stats['fp_params']}({q_stats['fp_bytes']}B) code:{code_bytes}") + log0(f"budget:{total}/{16000000} ({total/1e6:.2f}/{16.00:.2f}MB) {'FITS' if total <= 16000000 else 'OVER'}") + + # --- All ranks load roundtrip weights and evaluate --- + if distributed: + dist.barrier() + + with open("final_model.quinary.ptz", "rb") as f: + loaded = _load_artifact(f.read()) + missing, unexpected = base_model.load_state_dict(deq_sd(loaded), strict=False) + # The only "missing" key we expect is `lm_head.weight` when tied, since + # q_sd drops it from the saved state-dict. Any other missing/unexpected + # key is a serialization-roundtrip bug and should fail loudly. + expected_missing = {"lm_head.weight"} if base_model.tie_embeddings else set() + assert set(missing) <= expected_missing, f"unexpected missing keys after artifact load: {set(missing) - expected_missing}" + assert not unexpected, f"unexpected extra keys after artifact load: {unexpected}" + torch._dynamo.reset() + + q_val_loss, q_val_bpb, q_tok_count, q_byte_count = eval_val( + args, model, rank, world_size, device, grad_accum_steps, + val_tokens, base_bytes_lut, has_leading_space_lut, is_boundary_token_lut) + log0(f"final_quinary_roundtrip val_loss:{q_val_loss:.4f} val_bpb:{q_val_bpb:.4f} " + f"eval_tokens:{q_tok_count} eval_bytes:{q_byte_count}") + + # --- Score-first chunk-based CTP TTT --- + if args.ttt_steps > 0: + torch.cuda.synchronize() + t_ttt = time.perf_counter() + seq_len = args.train_seq_len + # TTT_STRIDE: sliding-window step used to slice the val stream into TTT + # examples. Hardcoded at 16 (canonical sp16384 quinary submission); a + # smaller stride yields more overlapping windows but more compute. + stride = 16 + ttt_chunk_tokens = args.ttt_tokens if args.ttt_tokens > 0 else 32768 + ttt_epochs = args.ttt_steps + batch_seqs = 32 + total_tokens = val_tokens.numel() - 1 + + # Select TTT params (CTP only — quinary weights are frozen) + for p in base_model.parameters(): + p.requires_grad_(False) + ttt_params = [] + for name, p in base_model.named_parameters(): + if any(pat in name for pat in CTP): + p.requires_grad_(True) + ttt_params.append(p) + n_ttt = sum(p.numel() for p in ttt_params) + ttt_opt = torch.optim.SGD(ttt_params, lr=args.ttt_lr, momentum=0.9) + for pg in ttt_opt.param_groups: + pg["initial_lr"] = pg["lr"] + log0(f"ttt: {n_ttt} CTP params, lr={args.ttt_lr}") + + # Assign sliding windows to chunks + context_size = seq_len - stride + window_starts = [ws for ws in range(0, total_tokens, stride) + if ws + context_size < total_tokens] + num_chunks = (total_tokens + ttt_chunk_tokens - 1) // ttt_chunk_tokens + chunk_windows = [[] for _ in range(num_chunks)] + for ws in window_starts: + s = 0 if ws == 0 else context_size + ci = min((ws + s) // ttt_chunk_tokens, num_chunks - 1) + chunk_windows[ci].append(ws) + log0(f"ttt: {n_ttt} CTP params, {num_chunks} chunks, {ttt_epochs} epochs, lr={args.ttt_lr}") + + loss_sum = torch.zeros((), device=device, dtype=torch.float64) + token_count = torch.zeros((), device=device, dtype=torch.float64) + byte_count = torch.zeros((), device=device, dtype=torch.float64) + + # Recompile for TTT (find_unused_parameters needed with frozen params) + torch._dynamo.reset() + compiled_ttt = torch.compile(base_model, mode=args.compile_mode if args.compile_mode != "default" else None) + + for ci in range(num_chunks): + windows = chunk_windows[ci] + if not windows: + continue + chunk_start = ci * ttt_chunk_tokens + chunk_end = min((ci + 1) * ttt_chunk_tokens, total_tokens) + my_s = len(windows) * rank // world_size + my_e = len(windows) * (rank + 1) // world_size + my_windows = windows[my_s:my_e] + + # Phase 1: SCORE (no_grad, compiled) + base_model.eval() + with torch.no_grad(): + for bi in range(0, len(my_windows), batch_seqs): + batch_ws = my_windows[bi:bi + batch_seqs] + bsz = len(batch_ws) + x_batch = torch.zeros(bsz, seq_len, dtype=torch.int64, device=device) + y_batch = torch.zeros(bsz, seq_len, dtype=torch.int64, device=device) + wlens = [] + for i, ws in enumerate(batch_ws): + we = min(ws + seq_len, total_tokens) + wlen = we - ws + wlens.append(wlen) + chunk_tok = val_tokens[ws:we + 1].to(dtype=torch.int64, device=device) + x_batch[i, :wlen] = chunk_tok[:-1] + y_batch[i, :wlen] = chunk_tok[1:] + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + nll = compiled_ttt(x_batch, y_batch, reduction="none").detach() + for i, ws in enumerate(batch_ws): + wlen = wlens[i] + s = 0 if ws == 0 else context_size + scored_nll = nll[i, s:wlen].to(torch.float64) + loss_sum += scored_nll.sum() + token_count += float(wlen - s) + tgt, prev = y_batch[i, s:wlen], x_batch[i, s:wlen] + tb = base_bytes_lut[tgt].to(torch.float64) + tb += (has_leading_space_lut[tgt] & ~is_boundary_token_lut[prev]).to(torch.float64) + byte_count += tb.sum() + + # Phase 2: TRAIN on chunk (score-first: already scored) + if ci < num_chunks - 1 and ttt_epochs > 0: + base_model.train() + chunk_seqs = (chunk_end - chunk_start) // seq_len + if chunk_seqs > 0: + cos_mul = 0.5 * (1.0 + math.cos(math.pi * ci / max(num_chunks - 1, 1))) + for pg in ttt_opt.param_groups: + pg["lr"] = pg.get("initial_lr", pg["lr"]) * cos_mul + my_seq_s = chunk_seqs * rank // world_size + my_seq_e = chunk_seqs * (rank + 1) // world_size + for _ep in range(ttt_epochs): + for bs in range(0, my_seq_e - my_seq_s, batch_seqs): + be = min(bs + batch_seqs, my_seq_e - my_seq_s) + start_tok = chunk_start + (my_seq_s + bs) * seq_len + end_tok = chunk_start + (my_seq_s + be) * seq_len + 1 + if end_tok > val_tokens.numel(): + continue + local = val_tokens[start_tok:end_tok].to(device=device, dtype=torch.int64) + x = local[:-1].reshape(-1, seq_len) + y = local[1:].reshape(-1, seq_len) + ttt_opt.zero_grad(set_to_none=True) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + loss = compiled_ttt(x, y) + loss.backward() + if distributed: + for p in ttt_params: + if p.grad is not None: + dist.all_reduce(p.grad, op=dist.ReduceOp.AVG) + torch.nn.utils.clip_grad_norm_(ttt_params, 1.0) + ttt_opt.step() + + if master_process and ci % max(1, num_chunks // 5) == 0: + log0(f"ttt chunk:{ci+1}/{num_chunks}") + + if distributed: + for t in (loss_sum, token_count, byte_count): + dist.all_reduce(t, op=dist.ReduceOp.SUM) + ttt_val_loss = (loss_sum / token_count).item() + ttt_bpb = (ttt_val_loss / math.log(2.0)) * (token_count.item() / byte_count.item()) + for p in base_model.parameters(): + p.requires_grad_(True) + torch._dynamo.reset() + torch.cuda.synchronize() + ttt_time_ms = 1000.0 * (time.perf_counter() - t_ttt) + log0(f"ttt_eval val_loss:{ttt_val_loss:.4f} val_bpb:{ttt_bpb:.4f} " + f"time:{ttt_time_ms:.0f}ms " + f"eval_tokens:{int(token_count.item())} eval_bytes:{int(byte_count.item())}") + + if distributed: + dist.destroy_process_group() + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/verify_bpb.py b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/verify_bpb.py new file mode 100644 index 0000000000..2dcb034429 --- /dev/null +++ b/records/track_non_record_16mb/2026-04-30_Quinary_53M_10L_576d_SP16384_TTT/verify_bpb.py @@ -0,0 +1,295 @@ +"""Standalone BPB-denominator correctness check. + +Verifies that train_gpt.py's per-token byte-count LUT matches independent +SentencePiece decoding on the *same token stream and same scored slice* used by +training/evaluation. + +Run from the submission directory after setup.sh: + + python3 verify_bpb.py + +Optional env vars: + TOKENIZER_PATH, DATA_PATH, VOCAB_SIZE, TRAIN_SEQ_LEN +""" + +from __future__ import annotations + +import glob +import hashlib +import os +import sys +from pathlib import Path + +import numpy as np +import sentencepiece as spm + + +SP_PATH = os.environ.get( + "TOKENIZER_PATH", "./data/canonical/tokenizers/fineweb_16384_bpe.model" +) +DATA_PATH = os.environ.get( + "DATA_PATH", "./data/canonical/datasets/fineweb10B_sp16384" +) +VAL_GLOB = os.environ.get("VAL_GLOB", str(Path(DATA_PATH) / "fineweb_val_*.bin")) +VOCAB_SIZE = int(os.environ.get("VOCAB_SIZE", "16384")) +TRAIN_SEQ_LEN = int(os.environ.get("TRAIN_SEQ_LEN", "1024")) + +MAGIC = 20240520 +VERSION = 1 +HEADER_INTS = 256 +HEADER_BYTES = HEADER_INTS * np.dtype(" str: + h = hashlib.sha256() + with path.open("rb") as f: + for chunk in iter(lambda: f.read(1 << 20), b""): + h.update(chunk) + return h.hexdigest() + + +def ld_shard(path: Path) -> np.ndarray: + """Same shard loader as train_gpt.py: 256-int32 header, then u16 tokens.""" + header = np.fromfile(path, dtype=" int: + """Sum eval bytes over tokens[1:], with tokens[:-1] as previous context.""" + tgt = tokens[1:] + prev = tokens[:-1] + base = base_bytes[tgt].astype(np.int64) + extra = (has_leading_space[tgt] & ~is_boundary_token[prev]).astype(np.int64) + return int((base + extra).sum()) + + +def bos_positions(tokens: np.ndarray, bos_id: int) -> np.ndarray: + return np.flatnonzero(tokens == bos_id) + + +def decoded_bytes_docwise( + tokens: np.ndarray, + sp: spm.SentencePieceProcessor, + bos_id: int, + is_boundary_token: np.ndarray, +) -> int: + """Decode bytes for tokens[1:] using BOS as an explicit document reset. + + This mirrors the LUT convention: BOS/control tokens emit zero bytes, and the + first text token after BOS does not pay an artificial leading-space byte. + + The normal canonical validation stream should start with BOS. If it does not, + we fall back to a prefix-subtraction check for the initial partial segment. + """ + if len(tokens) < 2: + return 0 + + bpos = bos_positions(tokens, bos_id) + total = 0 + + # Initial partial segment before the first BOS in this slice. This should not + # happen for the exact eval slice; if it does, compare by prefix subtraction. + first_bos = int(bpos[0]) if len(bpos) else len(tokens) + if first_bos > 0: + initial = tokens[:first_bos] + full = sp.decode(initial.tolist()).encode("utf-8") + prefix = sp.decode(initial[:1].tolist()).encode("utf-8") + total += len(full) - len(prefix) + + # BOS-delimited document segments. For a BOS at position s, target bytes start + # at s+1 and continue until the next BOS or end of slice. + for i, s0 in enumerate(bpos): + s = int(s0) + e = int(bpos[i + 1]) if i + 1 < len(bpos) else len(tokens) + if e > s + 1: + total += len(sp.decode(tokens[s + 1 : e].tolist()).encode("utf-8")) + # If e == s+1, the document is empty or the slice ends at BOS; emits 0. + return total + + +def check_slice( + label: str, + tokens: np.ndarray, + sp: spm.SentencePieceProcessor, + base_bytes: np.ndarray, + has_leading_space: np.ndarray, + is_boundary_token: np.ndarray, + bos_id: int, +) -> bool: + lut_sum = lut_byte_count(tokens, base_bytes, has_leading_space, is_boundary_token) + decoded_bytes = decoded_bytes_docwise(tokens, sp, bos_id, is_boundary_token) + delta = lut_sum - decoded_bytes + status = "PASS" if delta == 0 else "FAIL" + starts_boundary = bool(is_boundary_token[int(tokens[0])]) + print( + f" {status} {label:36s} targets={len(tokens)-1:>10,d} " + f"lut_bytes={lut_sum:>12,d} decoded_bytes={decoded_bytes:>12,d} " + f"delta={delta:+d} start_boundary={starts_boundary}" + ) + return delta == 0 + + +def main() -> int: + print("=" * 78) + print("BPB byte-count LUT verification") + print("=" * 78) + + sp_path = Path(SP_PATH) + if not sp_path.exists(): + print(f"Tokenizer not found at {sp_path}; run setup.sh first.") + return 2 + sp = spm.SentencePieceProcessor(model_file=str(sp_path)) + print(f"tokenizer : {sp_path} (vocab={sp.vocab_size()})") + print(f"tokenizer sha: {sha256_file(sp_path)}") + + bos = sp.bos_id() + eos = sp.eos_id() + unk = sp.unk_id() + print(f"BOS id={bos} ({sp.id_to_piece(bos)!r}) EOS id={eos} ({sp.id_to_piece(eos)!r}) UNK id={unk}") + + val_files = sorted(glob.glob(VAL_GLOB)) + if not val_files: + print(f"No val shards matched {VAL_GLOB}") + return 2 + print(f"val shards : {len(val_files)} ({val_files[0]} ... )") + + base_bytes, has_leading_space, is_boundary_token = build_luts(sp, VOCAB_SIZE) + + n_byte = sum(1 for tid in range(sp.vocab_size()) if sp.is_byte(tid)) + n_control = sum( + 1 + for tid in range(sp.vocab_size()) + if sp.is_control(tid) or sp.is_unknown(tid) or sp.is_unused(tid) + ) + n_lead = int(has_leading_space[: sp.vocab_size()].sum()) + n_bound = int(is_boundary_token[: sp.vocab_size()].sum()) + print( + f"LUT stats : byte-fallback={n_byte} control/unknown/unused={n_control} " + f"with-leading-space={n_lead} boundary={n_bound}" + ) + + tokens_full = np.concatenate([ld_shard(Path(f)) for f in val_files]) + print(f"shard tokens : {tokens_full.size:,}") + if tokens_full.size < 2: + print("Too few tokens") + return 2 + + bpos = bos_positions(tokens_full, bos) + print( + f"BOS positions: {len(bpos):,}; first={int(bpos[0]) if len(bpos) else 'NONE'} " + f"(expect 50,000 docs and normally first=0)" + ) + unk_count = int((tokens_full == unk).sum()) if unk >= 0 else 0 + print(f"UNK count : {unk_count}") + + all_pass = True + print("\nPer-slice LUT-vs-decode parity:") + + # Exact scored slice used by train_gpt.py: truncate to a TRAIN_SEQ_LEN target multiple. + u = ((tokens_full.size - 1) // TRAIN_SEQ_LEN) * TRAIN_SEQ_LEN + tokens_eval = tokens_full[: u + 1] + omitted_targets = (tokens_full.size - 1) - u + print( + f"eval slice : train_seq_len={TRAIN_SEQ_LEN} target_count={u:,} " + f"omitted_tail_targets={omitted_targets:,}" + ) + all_pass &= check_slice( + "exact eval slice", + tokens_eval, + sp, + base_bytes, + has_leading_space, + is_boundary_token, + bos, + ) + + # Whole untruncated stream. + all_pass &= check_slice( + "full untruncated stream", + tokens_full, + sp, + base_bytes, + has_leading_space, + is_boundary_token, + bos, + ) + + # BOS-delimited prefix document checks. + if len(bpos) >= 2: + all_pass &= check_slice( + "doc at first BOS", + tokens_full[int(bpos[0]) : int(bpos[1])], + sp, + base_bytes, + has_leading_space, + is_boundary_token, + bos, + ) + for n_docs in (10, 100, 1000): + if len(bpos) >= n_docs + 1: + all_pass &= check_slice( + f"first {n_docs} BOS docs", + tokens_full[int(bpos[0]) : int(bpos[n_docs])], + sp, + base_bytes, + has_leading_space, + is_boundary_token, + bos, + ) + + print() + if all_pass and unk_count == 0: + print("ALL CHECKS PASS — LUT bytes match SentencePiece decoder bytes on the eval slice.") + return 0 + if unk_count != 0: + print("FAIL — validation contains UNK tokens; zero-byte UNK accounting is unsafe.") + else: + print("AT LEAST ONE CHECK FAILED — investigate before trusting BPB scores.") + return 1 + + +if __name__ == "__main__": + sys.exit(main())