diff --git a/scripts/nn/optim/shampoo.dml b/scripts/nn/optim/shampoo.dml new file mode 100644 index 00000000000..e8832bdaf9a --- /dev/null +++ b/scripts/nn/optim/shampoo.dml @@ -0,0 +1,499 @@ +#------------------------------------------------------------- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +#------------------------------------------------------------- + +/* + * Shampoo optimizer. + * + * Implementation of the Shampoo optimizer as described in: + * + * Gupta et al., "Shampoo: Preconditioned Stochastic Tensor Optimization" + * https://arxiv.org/abs/1802.09568 + * + * Shampoo is a second-order optimization method that preconditions + * gradients using estimates of the row- and column-wise covariance + * of the gradients. Compared to first-order optimizers (SGD, Adam), + * Shampoo can converge faster but is significantly more memory-intensive. + * + * This implementation supports: + * - Full-matrix Shampoo (exact preconditioning) + * - Diagonal Shampoo (memory-efficient approximation) + * + * The choice between the two modes is determined by the shape of X + * and the preconditioner initialization. + */ + +update = function(matrix[double] X, matrix[double] dX, double lr, + matrix[double] preconL, matrix[double] preconR, boolean useDiag) + return(matrix[double] X, matrix[double] preconL, matrix[double] preconR){ + /* + * Performs one optimization step using the Shampoo update rule. + * + * + * Inputs: + * - X: Parameter matrix to be updated (n × m) + * - dX: Gradient of the loss w.r.t. X (n × m) + * - lr: Learning rate. + * - preconL: Left (row) preconditioner + * - Full: (n × n) + * - Diagonal: (n × 1) + * - preconR: Right (column) preconditioner + * - Full: (m × m) + * - Diagonal: (1 × m) + * - useDiag: Boolean flag indicating whether diagonal Shampoo is used + * + * Outputs: + * - X: Updated parameter matrix (n × m) + * - preconL: Updated left preconditioner + * - Full: (n × n) + * - Diagonal: (n × 1) + * - preconR: Updated right preconditioner + * - Full: (m × m) + * - Diagonal: (1 × m) + */ + + # Full-matrix Shampoo: + # Only used if both dimensions are small enough + if(useDiag==FALSE){ + + preconL = preconL + dX %*% t(dX) + preconR = preconR + t(dX) %*% dX + + [LEigenvalue, LEigenvector] = eigen(preconL) + preconLInvPowerRoot = LEigenvector %*% diag(LEigenvalue^(-0.25)) %*% t(LEigenvector) + + [REigenvalue, REigenvector] = eigen(preconR) + preconRInvPowerRoot = REigenvector %*% diag(REigenvalue^(-0.25)) %*% t(REigenvector) + + X = X - lr * preconLInvPowerRoot %*% dX %*% preconRInvPowerRoot + + # Diagonal Shampoo: + # Memory-efficient approximation for large parameter matrices + } else{ + n = nrow(dX) + m = ncol(dX) + + preconL = preconL + rowSums(dX^2) + preconR = preconR + colSums(dX^2) + + preconLScale = preconL^(-0.25) + preconRScale = preconR^(-0.25) + + preconLMatrix = preconLScale %*% matrix(1, rows=1, cols=m) + preconRMatrix = matrix(1, rows=n, cols=1) %*% preconRScale + + scaledGrad = dX * preconLMatrix; + scaledGrad = scaledGrad * preconRMatrix; + + X = X - lr * scaledGrad; + + } +} + +init = function(matrix[double] X, double epsilon, int useDiagThreshold) + return (matrix[double] preconL, matrix[double] preconR, boolean useDiag) { + /* + * Initializes the Shampoo preconditioners for a given parameter matrix. + * + * Depending on the size of X, this function initializes either: + * - Full identity matrices (exact Shampoo), or + * - Diagonal vectors (approximate Shampoo) + * + * This threshold is crucial to avoid excessive memory usage, + * as full Shampoo requires O(n^2 + m^2) memory per parameter matrix. + * + * Inputs: + * - X: Parameter matrix to be optimized (n, m) + * - epsilon: Numerical stability constant + * - useDiagThreshold: Dimension threshold above which diagonal + * preconditioning is used + * + * Outputs: + * - preconL: Initial left preconditioner + * - Full: (n × n) identity scaled by epsilon + * - Diagonal: (n × 1) filled with epsilon + * - preconR: Initial right preconditioner + * - Full: (m × m) identity scaled by epsilon + * - Diagonal: (1 × m) filled with epsilon + * - useDiag: Boolean flag indicating whether diagonal Shampoo is used + */ + + # Use diagonal Shampoo if parameter matrix is too large + if((nrow(X) > useDiagThreshold) | (ncol(X) > useDiagThreshold)){ + preconL = matrix(epsilon, rows=nrow(X), cols=1); + preconR = matrix(epsilon, rows=1, cols=ncol(X)); + useDiag = TRUE + + # Use full Shampoo if parameter matrix is small enough + } else { + preconL = matrix(0, rows=nrow(X), cols=nrow(X)); + index = 1; + while (index <= nrow(X)){ + preconL[index, index] = epsilon * 1 + index = index + 1 + } + preconR = matrix(0, rows=ncol(X), cols=ncol(X)); + index = 1; + while (index <= ncol(X)){ + preconR[index, index] = epsilon * 1 + index = index + 1 + } + useDiag = FALSE + } +} + +update_momentum = function(matrix[double] X, matrix[double] dX, double lr, + matrix[double] preconL, matrix[double] preconR, + matrix[double] momentum, boolean useDiag) + return(matrix[double] X, matrix[double] preconL, matrix[double] preconR, + matrix[double] momentum){ + /* + * Performs one optimization step using the Shampoo update rule, while using momentum. + * + * + * Inputs: + * - X: Parameter matrix to be updated (n × m) + * - dX: Gradient of the loss w.r.t. X (n × m) + * - lr: Learning rate. + * - preconL: Left (row) preconditioner + * - Full: (n × n) + * - Diagonal: (n × 1) + * - preconR: Right (column) preconditioner + * - Full: (m × m) + * - Diagonal: (1 × m) + * - momentum: momentum (n × m) + * - useDiag: Boolean flag indicating whether diagonal Shampoo is used + * + * Outputs: + * - X: Updated parameter matrix (n × m) + * - preconL: Updated left preconditioner + * - Full: (n × n) + * - Diagonal: (n × 1) + * - preconR: Updated right preconditioner + * - Full: (m × m) + * - Diagonal: (1 × m) + * - momentum: Updated momentum (n × m) + */ + + # calculating the updated momentum + momentum = 0.9 * momentum + (0.1)*dX + + # Full-matrix Shampoo: + # Only used if both dimensions are small enough + if(useDiag==FALSE){ + + preconL = preconL + dX %*% t(dX) + preconR = preconR + t(dX) %*% dX + + [LEigenvalue, LEigenvector] = eigen(preconL) + preconLInvPowerRoot = LEigenvector %*% diag(LEigenvalue^(-0.25)) %*% t(LEigenvector) + + [REigenvalue, REigenvector] = eigen(preconR) + preconRInvPowerRoot = REigenvector %*% diag(REigenvalue^(-0.25)) %*% t(REigenvector) + + X = X - lr * preconLInvPowerRoot %*% momentum %*% preconRInvPowerRoot + + # Diagonal Shampoo: + # Memory-efficient approximation for large parameter matrices + } else{ + n = nrow(dX) + m = ncol(dX) + + preconL = preconL + rowSums(dX ^ 2) + preconR = preconR + colSums(dX ^ 2) + + preconLScale = preconL^(-0.25) + preconRScale = preconR^(-0.25) + + preconLMatrix = preconLScale %*% matrix(1, rows=1, cols=m) + preconRMatrix = matrix(1, rows=n, cols=1) %*% preconRScale + + scaledGrad = momentum * preconLMatrix + scaledGrad = scaledGrad * preconRMatrix + + X = X - lr * scaledGrad + } +} + +init_momentum = function(matrix[double] X, double epsilon, int useDiagThreshold) + return (matrix[double] preconL, matrix[double] preconR, + matrix[double] momentum, boolean useDiag) { + /* + * Initializes the Shampoo preconditioners and momentum for a given parameter matrix. + * + * Depending on the size of X, this function initializes either: + * - Full identity matrices (exact Shampoo), or + * - Diagonal vectors (approximate Shampoo) + * + * This threshold is crucial to avoid excessive memory usage, + * as full Shampoo requires O(n² + m²) memory per parameter matrix. + * + * Inputs: + * - X: Parameter matrix to be optimized (n, m) + * - epsilon: Numerical stability constant + * - useDiagThreshold: Dimension threshold above which diagonal + * preconditioning is used + * + * Outputs: + * - preconL: Initial left preconditioner + * - Full: (n × n) identity scaled by epsilon + * - Diagonal: (n × 1) filled with epsilon + * - preconR: Initial right preconditioner + * - Full: (m × m) identity scaled by epsilon + * - Diagonal: (1 × m) filled with epsilon + * - momentum: Initial momentum (n × m), initialized to zeros + * - useDiag: Boolean flag indicating whether diagonal Shampoo is used + */ + + # Use diagonal Shampoo if parameter matrix is too large + if((nrow(X) > useDiagThreshold) | (ncol(X) > useDiagThreshold)){ + preconL = matrix(epsilon, rows=nrow(X), cols=1); + preconR = matrix(epsilon, rows=1, cols=ncol(X)); + useDiag = TRUE + + # Use full Shampoo if parameter matrix is small enough + } else { + preconL = matrix(0, rows=nrow(X), cols=nrow(X)); + index = 1; + while (index <= nrow(X)){ + preconL[index, index] = epsilon * 1 + index = index + 1 + } + preconR = matrix(0, rows=ncol(X), cols=ncol(X)); + index = 1; + while (index <= ncol(X)){ + preconR[index, index] = epsilon * 1 + index = index + 1 + } + useDiag = FALSE + } + momentum = X * 0 +} + +update_heuristic = function(matrix[double] X, matrix[double] dX, double lr, + matrix[double] preconL, matrix[double] preconR, matrix[double] momentum, + int stepCounter, int rootEvery, int preconEvery, matrix[double] bufferL, + matrix[double] bufferR, matrix[double] preconLInvPowerRoot, + matrix[double] preconRInvPowerRoot, boolean useDiag) + return (matrix[double] X, matrix[double] preconL, matrix[double] preconR, + matrix[double] momentum, int stepCounter, matrix[double] bufferL, + matrix[double] bufferR, matrix[double] preconLInvPowerRoot, + matrix[double] preconRInvPowerRoot){ + /* + * Performs one optimization step using the Shampoo update rule, while using momentum + * and a heuristic for runtime improvements. + * + * + * Inputs: + * - X: Parameter matrix to be updated (n × m) + * - dX: Gradient of the loss w.r.t. X (n × m) + * - lr: Learning rate. + * - preconL: Left (row) preconditioner + * - Full: (n × n) + * - Diagonal: (n × 1) + * - preconR: Right (column) preconditioner + * - Full: (m × m) + * - Diagonal: (1 × m) + * - momentum: momentum (n × m) + * - stepCounter: Step counter (int), incremented each call + * - rootEvery: Frequency for recomputing inverse roots (int) + * - preconEvery: Frequency for applying buffered updates to preconditioners (int) + * - bufferL: Buffer accumulating left curvature updates + * - Full: (n × n) + * - Diagonal: (n × 1) + * - bufferR: Buffer accumulating right curvature updates + * - Full: (m × m) + * - Diagonal: (1 × m) + * - preconLInvPowerRoot: Cached preconL^{-1/4} + * - Full: (n × n) + * - Diagonal: (n × 1) + * - preconRInvPowerRoot: Cached preconR^{-1/4} + * - Full: (m × m) + * - Diagonal: (1 × m) + * - useDiag: Boolean flag indicating whether diagonal Shampoo is used + * + * Outputs: + * - X: Updated parameter matrix (n × m) + * - preconL: Updated left preconditioner + * - Full: (n × n) + * - Diagonal: (n × 1) + * - preconR: Updated right preconditioner + * - Full: (m × m) + * - Diagonal: (1 × m) + * - momentum: momentum (n × m) + * - stepCounter: Updated step counter (int) + * - bufferL: Updated bufferL (reset to 0 when applied) + * - Full: (n × n) + * - Diagonal: (n × 1) + * - bufferR: Updated bufferR (reset to 0 when applied) + * - Full: (m × m) + * - Diagonal: (1 × m) + * - preconLInvPowerRoot: Updated cached inverse root (when recomputed) + * - Full: (n × n) + * - Diagonal: (n × 1) + * - preconRInvPowerRoot: Updated cached inverse root (when recomputed) + * - Full: (m × m) + * - Diagonal: (1 × m) + */ + + # calculating the updated momentum + momentum = 0.9 * momentum + (0.1)*dX + + # Full-matrix Shampoo: + # Only used if both dimensions are small enough + if(useDiag==FALSE){ + bufferL = bufferL + (dX %*% t(dX)) + bufferR = bufferR + (t(dX) %*% dX) + + if ((stepCounter > 0) & (stepCounter %% preconEvery == 0)){ + preconL = preconL + bufferL + preconR = preconR + bufferR + bufferL = bufferL * 0 + bufferR = bufferR * 0 + } + + + if ((stepCounter > 0) & (stepCounter %% rootEvery == 0)){ + [LEigenvalue, LEigenvector] = eigen(preconL) + preconLInvPowerRoot = LEigenvector %*% diag(LEigenvalue^(-0.25)) %*% t(LEigenvector) + + [REigenvalue, REigenvector] = eigen(preconR) + preconRInvPowerRoot = REigenvector %*% diag(REigenvalue^(-0.25)) %*% t(REigenvector) + } + + X = X - lr * preconLInvPowerRoot %*% momentum %*% preconRInvPowerRoot + + stepCounter = stepCounter + 1 + + # Diagonal Shampoo: + # Memory-efficient approximation for large parameter matrices + } else{ + n = nrow(dX) + m = ncol(dX) + + bufferL = bufferL + rowSums(dX ^ 2) + bufferR = bufferR + colSums(dX ^ 2) + + if ((stepCounter > 0) & (stepCounter %% preconEvery == 0)){ + preconL = preconL + bufferL + preconR = preconR + bufferR + bufferL = bufferL * 0 + bufferR = bufferR * 0 + } + + if ((stepCounter > 0) & (stepCounter %% rootEvery == 0)){ + preconLInvPowerRoot = (preconL^(-0.25)) + preconRInvPowerRoot = (preconR^(-0.25)) + } + preconLMatrix = preconLInvPowerRoot %*% matrix(1, rows=1, cols=m) + preconRMatrix = matrix(1, rows=n, cols=1) %*% preconRInvPowerRoot + + scaledGrad = momentum * preconLMatrix + scaledGrad = scaledGrad * preconRMatrix + + X = X - lr * scaledGrad + stepCounter = stepCounter + 1 + } +} + + + +init_heuristic = function(matrix[double] X, double epsilon, int useDiagThreshold) + return (matrix[double] preconL, matrix[double] preconR, int stepCounter, + matrix[double] bufferL, matrix[double] bufferR, matrix[double] momentum, + matrix[double] preconLInvPowerRoot, matrix[double] preconRInvPowerRoot, + boolean useDiag) { + /* + * Initializes Shampoo preconditioners, buffers, cached inverse roots, + * and momentum for the heuristic variant. + * + * Depending on the size of X, this function initializes either: + * - Full identity matrices (exact Shampoo), or + * - Diagonal vectors (approximate Shampoo) + * + * This threshold is crucial to avoid excessive memory usage, + * as full Shampoo requires O(n^2 + m^2) memory per parameter matrix. + * + * Inputs: + * - X: Parameter matrix to be optimized (n, m) + * - epsilon: Numerical stability constant + * - useDiagThreshold: Dimension threshold above which diagonal + * preconditioning is used + * + * Outputs: + * - preconL: Initial left preconditioner (n × n) or (n × 1) + * - preconR: Initial right preconditioner (m × m) or (1 × m) + * - stepCounter: Initialized to 0 + * - bufferL: Initialized to zeros, same shape as preconL + * - bufferR: Initialized to zeros, same shape as preconR + * - momentum: Initialized to zeros, same shape as X (n × m) + * - preconLInvPowerRoot: Cached inverse fourth root of preconL + * - Full: initialized to epsilon^{-1/4} * I (n × n) + * - Diagonal: initialized to preconL^{-1/4} (n × 1) + * - preconRInvPowerRoot: Cached inverse fourth root of preconR + * - Full: initialized to epsilon^{-1/4} * I (m × m) + * - Diagonal: initialized to preconR^{-1/4} (1 × m) + * - useDiag: Boolean flag indicating whether diagonal Shampoo is used + */ + + # Use diagonal Shampoo if parameter matrix is too large + if((nrow(X) > useDiagThreshold) | (ncol(X) > useDiagThreshold)){ + preconL = matrix(epsilon, rows=nrow(X), cols=1); + preconR = matrix(epsilon, rows=1, cols=ncol(X)); + preconLInvPowerRoot = preconL^(-0.25) + preconRInvPowerRoot = preconR^(-0.25) + useDiag = TRUE + + # Use full Shampoo if parameter matrix is small enough + } else { + preconL = matrix(0, rows=nrow(X), cols=nrow(X)); + index = 1; + while (index <= nrow(X)){ + preconL[index, index] = epsilon * 1 + index = index + 1 + } + preconR = matrix(0, rows=ncol(X), cols=ncol(X)); + index = 1; + while (index <= ncol(X)){ + preconR[index, index] = epsilon * 1 + index = index + 1 + } + + preconLInvPowerRoot = preconL + i = 1 + while(i <= nrow(preconLInvPowerRoot)) { + preconLInvPowerRoot[i,i] = epsilon^(-0.25) + i = i + 1 + } + + preconRInvPowerRoot = preconR + j = 1 + while(j <= nrow(preconRInvPowerRoot)) { + preconRInvPowerRoot[j,j] = epsilon^(-0.25) + j = j + 1 + } + + useDiag = FALSE + } + bufferR = preconR * 0 + bufferL = preconL * 0 + stepCounter = 0 + momentum = X * 0 +} \ No newline at end of file diff --git a/scripts/staging/shampoo_optimizer/diagram_creation.ipynb b/scripts/staging/shampoo_optimizer/diagram_creation.ipynb new file mode 100644 index 00000000000..251f7e6a4f3 --- /dev/null +++ b/scripts/staging/shampoo_optimizer/diagram_creation.ipynb @@ -0,0 +1,461 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "id": "400c10c6", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " epochs train_losses train_accuracies val_losses val_accuracies \\\n", + "0 1.0 1.529923 0.615625 0.416351 0.873 \n", + "1 2.0 0.250601 0.927000 0.229524 0.916 \n", + "2 3.0 0.154800 0.954250 0.162009 0.940 \n", + "3 4.0 0.113356 0.964625 0.135608 0.955 \n", + "4 5.0 0.090430 0.971625 0.123288 0.963 \n", + "5 6.0 0.076361 0.976625 0.114561 0.968 \n", + "6 7.0 0.066914 0.979625 0.109109 0.969 \n", + "7 8.0 0.059269 0.982500 0.105038 0.971 \n", + "8 9.0 0.053242 0.983750 0.101710 0.971 \n", + "9 10.0 0.048538 0.985125 0.099977 0.971 \n", + "10 11.0 0.044405 0.985750 0.098380 0.971 \n", + "11 12.0 0.040600 0.987250 0.097781 0.972 \n", + "12 13.0 0.037342 0.988250 0.097132 0.974 \n", + "13 14.0 0.034315 0.988750 0.096922 0.974 \n", + "14 15.0 0.031616 0.989750 0.097324 0.976 \n", + "15 16.0 0.029328 0.990125 0.096786 0.974 \n", + "16 17.0 0.027317 0.990875 0.096020 0.975 \n", + "17 18.0 0.025400 0.991250 0.094932 0.975 \n", + "18 19.0 0.023850 0.992375 0.094315 0.975 \n", + "19 20.0 0.022360 0.992875 0.094448 0.975 \n", + "20 21.0 0.021041 0.993375 0.094068 0.976 \n", + "21 22.0 0.019816 0.993750 0.093970 0.976 \n", + "22 23.0 0.018703 0.994375 0.093682 0.976 \n", + "23 24.0 0.017558 0.995000 0.093167 0.976 \n", + "24 25.0 0.016504 0.995375 0.093128 0.976 \n", + "25 26.0 0.015547 0.996000 0.093137 0.976 \n", + "26 27.0 0.014641 0.996250 0.092622 0.975 \n", + "27 28.0 0.013850 0.996375 0.092436 0.975 \n", + "28 29.0 0.013045 0.996750 0.091737 0.974 \n", + "29 30.0 0.012308 0.996875 0.091197 0.974 \n", + "30 31.0 0.011605 0.997375 0.090616 0.973 \n", + "31 32.0 0.010950 0.997625 0.089798 0.973 \n", + "32 33.0 0.010404 0.997625 0.089589 0.973 \n", + "33 34.0 0.009935 0.997750 0.089403 0.973 \n", + "34 35.0 0.009448 0.997750 0.088787 0.973 \n", + "35 36.0 0.008966 0.997875 0.088232 0.974 \n", + "36 37.0 0.008522 0.998250 0.088025 0.973 \n", + "37 38.0 0.008150 0.998375 0.088028 0.973 \n", + "38 39.0 0.007784 0.998500 0.087388 0.973 \n", + "39 40.0 0.007437 0.998500 0.087394 0.973 \n", + "40 41.0 0.007119 0.998625 0.087162 0.973 \n", + "41 42.0 0.006813 0.998625 0.086621 0.974 \n", + "42 43.0 0.006518 0.998750 0.086615 0.975 \n", + "43 44.0 0.006219 0.999000 0.086452 0.973 \n", + "44 45.0 0.005936 0.999250 0.086182 0.973 \n", + "45 46.0 0.005689 0.999250 0.085994 0.973 \n", + "46 47.0 0.005452 0.999250 0.086067 0.973 \n", + "47 48.0 0.005212 0.999250 0.085938 0.973 \n", + "48 49.0 0.004984 0.999250 0.085857 0.973 \n", + "49 50.0 0.004777 0.999375 0.085784 0.973 \n", + "50 51.0 0.004596 0.999375 0.086051 0.973 \n", + "51 52.0 0.004398 0.999375 0.085878 0.974 \n", + "52 53.0 0.004203 0.999500 0.085930 0.974 \n", + "53 54.0 0.004026 0.999500 0.085631 0.974 \n", + "54 55.0 0.003847 0.999625 0.085347 0.975 \n", + "55 56.0 0.003690 0.999750 0.085356 0.975 \n", + "56 57.0 0.003528 0.999875 0.085206 0.975 \n", + "57 58.0 0.003375 1.000000 0.085179 0.976 \n", + "58 59.0 0.003247 1.000000 0.085019 0.976 \n", + "59 60.0 0.003124 1.000000 0.084813 0.977 \n", + "\n", + " optimizer \n", + "0 shampoo \n", + "1 shampoo \n", + "2 shampoo \n", + "3 shampoo \n", + "4 shampoo \n", + "5 shampoo \n", + "6 shampoo \n", + "7 shampoo \n", + "8 shampoo \n", + "9 shampoo \n", + "10 shampoo \n", + "11 shampoo \n", + "12 shampoo \n", + "13 shampoo \n", + "14 shampoo \n", + "15 shampoo \n", + "16 shampoo \n", + "17 shampoo \n", + "18 shampoo \n", + "19 shampoo \n", + "20 shampoo \n", + "21 shampoo \n", + "22 shampoo \n", + "23 shampoo \n", + "24 shampoo \n", + "25 shampoo \n", + "26 shampoo \n", + "27 shampoo \n", + "28 shampoo \n", + "29 shampoo \n", + "30 shampoo \n", + "31 shampoo \n", + "32 shampoo \n", + "33 shampoo \n", + "34 shampoo \n", + "35 shampoo \n", + "36 shampoo \n", + "37 shampoo \n", + "38 shampoo \n", + "39 shampoo \n", + "40 shampoo \n", + "41 shampoo \n", + "42 shampoo \n", + "43 shampoo \n", + "44 shampoo \n", + "45 shampoo \n", + "46 shampoo \n", + "47 shampoo \n", + "48 shampoo \n", + "49 shampoo \n", + "50 shampoo \n", + "51 shampoo \n", + "52 shampoo \n", + "53 shampoo \n", + "54 shampoo \n", + "55 shampoo \n", + "56 shampoo \n", + "57 shampoo \n", + "58 shampoo \n", + "59 shampoo \n", + " epochs train_losses train_accuracies val_losses val_accuracies \\\n", + "0 1.0 0.639533 0.801500 0.327239 0.902 \n", + "1 2.0 0.184752 0.945375 0.221243 0.937 \n", + "2 3.0 0.134242 0.959750 0.188511 0.941 \n", + "3 4.0 0.100303 0.970375 0.166702 0.951 \n", + "4 5.0 0.079379 0.976000 0.156010 0.957 \n", + "5 6.0 0.069202 0.979125 0.164982 0.957 \n", + "6 7.0 0.060592 0.981250 0.171788 0.958 \n", + "7 8.0 0.054129 0.982750 0.163366 0.962 \n", + "8 9.0 0.048112 0.985375 0.149752 0.963 \n", + "9 10.0 0.042714 0.987625 0.135282 0.964 \n", + "10 11.0 0.037989 0.989000 0.122572 0.966 \n", + "11 12.0 0.033369 0.990125 0.114374 0.968 \n", + "12 13.0 0.029285 0.991125 0.106545 0.973 \n", + "13 14.0 0.025636 0.992250 0.100020 0.971 \n", + "14 15.0 0.022314 0.993375 0.093786 0.970 \n", + "15 16.0 0.019570 0.994750 0.088490 0.973 \n", + "16 17.0 0.017252 0.996125 0.084034 0.976 \n", + "17 18.0 0.015548 0.996250 0.082103 0.976 \n", + "18 19.0 0.014139 0.996875 0.080609 0.976 \n", + "19 20.0 0.012960 0.997750 0.078689 0.976 \n", + "20 21.0 0.012004 0.997500 0.077240 0.977 \n", + "21 22.0 0.011114 0.997500 0.076127 0.977 \n", + "22 23.0 0.010281 0.998000 0.075846 0.976 \n", + "23 24.0 0.009678 0.998000 0.074256 0.976 \n", + "24 25.0 0.008997 0.998000 0.073674 0.977 \n", + "25 26.0 0.008524 0.998000 0.072962 0.977 \n", + "26 27.0 0.007974 0.998250 0.072336 0.976 \n", + "27 28.0 0.007519 0.998625 0.071772 0.975 \n", + "28 29.0 0.007067 0.998750 0.070253 0.974 \n", + "29 30.0 0.006566 0.999000 0.069656 0.974 \n", + "30 31.0 0.006089 0.999000 0.068200 0.977 \n", + "31 32.0 0.005608 0.999125 0.066946 0.977 \n", + "32 33.0 0.005141 0.999250 0.065950 0.978 \n", + "33 34.0 0.004759 0.999250 0.064440 0.978 \n", + "34 35.0 0.004381 0.999375 0.063538 0.978 \n", + "35 36.0 0.004047 0.999750 0.062533 0.979 \n", + "36 37.0 0.003787 0.999750 0.061590 0.979 \n", + "37 38.0 0.003497 1.000000 0.060816 0.979 \n", + "38 39.0 0.003270 1.000000 0.060142 0.979 \n", + "39 40.0 0.003096 1.000000 0.059449 0.979 \n", + "40 41.0 0.002915 1.000000 0.059109 0.979 \n", + "41 42.0 0.002747 1.000000 0.058858 0.980 \n", + "42 43.0 0.002602 1.000000 0.058758 0.980 \n", + "43 44.0 0.002453 1.000000 0.058751 0.980 \n", + "44 45.0 0.002325 1.000000 0.058833 0.980 \n", + "45 46.0 0.002200 1.000000 0.059313 0.980 \n", + "46 47.0 0.002075 1.000000 0.059344 0.980 \n", + "47 48.0 0.001972 1.000000 0.059808 0.980 \n", + "48 49.0 0.001866 1.000000 0.060284 0.980 \n", + "49 50.0 0.001785 1.000000 0.060728 0.980 \n", + "50 51.0 0.001703 1.000000 0.061150 0.980 \n", + "51 52.0 0.001626 1.000000 0.061556 0.979 \n", + "52 53.0 0.001560 1.000000 0.061923 0.979 \n", + "53 54.0 0.001492 1.000000 0.062277 0.979 \n", + "54 55.0 0.001441 1.000000 0.062653 0.979 \n", + "55 56.0 0.001381 1.000000 0.063042 0.979 \n", + "56 57.0 0.001333 1.000000 0.063426 0.979 \n", + "57 58.0 0.001282 1.000000 0.063787 0.979 \n", + "58 59.0 0.001239 1.000000 0.064086 0.979 \n", + "59 60.0 0.001195 1.000000 0.064313 0.980 \n", + "\n", + " optimizer \n", + "0 shampoo_momentum \n", + "1 shampoo_momentum \n", + "2 shampoo_momentum \n", + "3 shampoo_momentum \n", + "4 shampoo_momentum \n", + "5 shampoo_momentum \n", + "6 shampoo_momentum \n", + "7 shampoo_momentum \n", + "8 shampoo_momentum \n", + "9 shampoo_momentum \n", + "10 shampoo_momentum \n", + "11 shampoo_momentum \n", + "12 shampoo_momentum \n", + "13 shampoo_momentum \n", + "14 shampoo_momentum \n", + "15 shampoo_momentum \n", + "16 shampoo_momentum \n", + "17 shampoo_momentum \n", + "18 shampoo_momentum \n", + "19 shampoo_momentum \n", + "20 shampoo_momentum \n", + "21 shampoo_momentum \n", + "22 shampoo_momentum \n", + "23 shampoo_momentum \n", + "24 shampoo_momentum \n", + "25 shampoo_momentum \n", + "26 shampoo_momentum \n", + "27 shampoo_momentum \n", + "28 shampoo_momentum \n", + "29 shampoo_momentum \n", + "30 shampoo_momentum \n", + "31 shampoo_momentum \n", + "32 shampoo_momentum \n", + "33 shampoo_momentum \n", + "34 shampoo_momentum \n", + "35 shampoo_momentum \n", + "36 shampoo_momentum \n", + "37 shampoo_momentum \n", + "38 shampoo_momentum \n", + "39 shampoo_momentum \n", + "40 shampoo_momentum \n", + "41 shampoo_momentum \n", + "42 shampoo_momentum \n", + "43 shampoo_momentum \n", + "44 shampoo_momentum \n", + "45 shampoo_momentum \n", + "46 shampoo_momentum \n", + "47 shampoo_momentum \n", + "48 shampoo_momentum \n", + "49 shampoo_momentum \n", + "50 shampoo_momentum \n", + "51 shampoo_momentum \n", + "52 shampoo_momentum \n", + "53 shampoo_momentum \n", + "54 shampoo_momentum \n", + "55 shampoo_momentum \n", + "56 shampoo_momentum \n", + "57 shampoo_momentum \n", + "58 shampoo_momentum \n", + "59 shampoo_momentum \n", + " epochs train_losses train_accuracies val_losses val_accuracies \\\n", + "0 1.0 0.660083 0.791992 0.458462 0.844 \n", + "1 2.0 0.256014 0.923000 0.306052 0.909 \n", + "2 3.0 0.179122 0.947125 0.213270 0.933 \n", + "3 4.0 0.139948 0.957750 0.173651 0.942 \n", + "4 5.0 0.107507 0.967500 0.150592 0.952 \n", + "5 6.0 0.090241 0.971875 0.152722 0.949 \n", + "6 7.0 0.081093 0.974000 0.182535 0.943 \n", + "7 8.0 0.073433 0.976500 0.196313 0.936 \n", + "8 9.0 0.064927 0.979250 0.161399 0.948 \n", + "9 10.0 0.055647 0.982500 0.158130 0.949 \n", + "10 11.0 0.048851 0.984625 0.136853 0.958 \n", + "11 12.0 0.043453 0.986000 0.129564 0.963 \n", + "12 13.0 0.037899 0.988125 0.128100 0.964 \n", + "13 14.0 0.034403 0.989750 0.120332 0.967 \n", + "14 15.0 0.031140 0.990500 0.144659 0.956 \n", + "15 16.0 0.027824 0.992000 0.139351 0.959 \n", + "16 17.0 0.025904 0.992875 0.138057 0.960 \n", + "17 18.0 0.023049 0.994000 0.139043 0.958 \n", + "18 19.0 0.021960 0.993750 0.145674 0.953 \n", + "19 20.0 0.022683 0.992875 0.131017 0.962 \n", + "20 21.0 0.019511 0.993750 0.163879 0.953 \n", + "21 22.0 0.020723 0.993375 0.131990 0.963 \n", + "22 23.0 0.015733 0.995500 0.115951 0.966 \n", + "23 24.0 0.016863 0.994625 0.124647 0.964 \n", + "24 25.0 0.011906 0.996750 0.095556 0.975 \n", + "25 26.0 0.013877 0.995000 0.075135 0.973 \n", + "26 27.0 0.014348 0.995250 0.097998 0.973 \n", + "27 28.0 0.014791 0.995000 0.099138 0.970 \n", + "28 29.0 0.017454 0.995000 0.108553 0.967 \n", + "29 30.0 0.016810 0.994625 0.091436 0.972 \n", + "30 31.0 0.013777 0.995125 0.099723 0.973 \n", + "31 32.0 0.012082 0.995875 0.104254 0.971 \n", + "32 33.0 0.009674 0.997000 0.081663 0.976 \n", + "33 34.0 0.010131 0.997750 0.083600 0.975 \n", + "34 35.0 0.009486 0.996375 0.094651 0.969 \n", + "35 36.0 0.011784 0.996498 0.122534 0.964 \n", + "36 37.0 0.012858 0.995500 0.077891 0.973 \n", + "37 38.0 0.005532 0.998750 0.085279 0.978 \n", + "38 39.0 0.004174 0.999250 0.062567 0.980 \n", + "39 40.0 0.003725 0.999375 0.065963 0.977 \n", + "40 41.0 0.003737 0.999000 0.106305 0.975 \n", + "41 42.0 0.004466 0.998750 0.069676 0.983 \n", + "42 43.0 0.003353 0.999125 0.063304 0.984 \n", + "43 44.0 0.002823 0.999375 0.082333 0.979 \n", + "44 45.0 0.002750 0.999750 0.069678 0.983 \n", + "45 46.0 0.002190 0.999500 0.122289 0.971 \n", + "46 47.0 0.014585 0.994625 0.117514 0.973 \n", + "47 48.0 0.003940 0.999125 0.072986 0.982 \n", + "48 49.0 0.002838 0.999375 0.057956 0.986 \n", + "49 50.0 0.002429 0.999250 0.071051 0.980 \n", + "50 51.0 0.000884 1.000000 0.063533 0.984 \n", + "51 52.0 0.000639 1.000000 0.062502 0.986 \n", + "52 53.0 0.001002 0.999750 0.085928 0.977 \n", + "53 54.0 0.003981 0.998750 0.142024 0.964 \n", + "54 55.0 0.009453 0.997250 0.112729 0.969 \n", + "55 56.0 0.020304 0.994000 0.221027 0.961 \n", + "56 57.0 0.020196 0.992625 0.074609 0.980 \n", + "57 58.0 0.012778 0.996250 0.084888 0.980 \n", + "58 59.0 0.005098 0.998625 0.070511 0.983 \n", + "59 60.0 0.002206 0.999500 0.067209 0.985 \n", + "\n", + " optimizer \n", + "0 adam \n", + "1 adam \n", + "2 adam \n", + "3 adam \n", + "4 adam \n", + "5 adam \n", + "6 adam \n", + "7 adam \n", + "8 adam \n", + "9 adam \n", + "10 adam \n", + "11 adam \n", + "12 adam \n", + "13 adam \n", + "14 adam \n", + "15 adam \n", + "16 adam \n", + "17 adam \n", + "18 adam \n", + "19 adam \n", + "20 adam \n", + "21 adam \n", + "22 adam \n", + "23 adam \n", + "24 adam \n", + "25 adam \n", + "26 adam \n", + "27 adam \n", + "28 adam \n", + "29 adam \n", + "30 adam \n", + "31 adam \n", + "32 adam \n", + "33 adam \n", + "34 adam \n", + "35 adam \n", + "36 adam \n", + "37 adam \n", + "38 adam \n", + "39 adam \n", + "40 adam \n", + "41 adam \n", + "42 adam \n", + "43 adam \n", + "44 adam \n", + "45 adam \n", + "46 adam \n", + "47 adam \n", + "48 adam \n", + "49 adam \n", + "50 adam \n", + "51 adam \n", + "52 adam \n", + "53 adam \n", + "54 adam \n", + "55 adam \n", + "56 adam \n", + "57 adam \n", + "58 adam \n", + "59 adam \n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "C:\\Users\\nicol\\AppData\\Local\\Temp\\ipykernel_9880\\3072135520.py:12: FutureWarning: The behavior of DataFrame concatenation with empty or all-NA entries is deprecated. In a future version, this will no longer exclude empty or all-NA columns when determining the result dtypes. To retain the old behavior, exclude the relevant entries before the concat operation.\n", + " collected_data = pd.concat([collected_data, df], axis=0)\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAsEAAAK9CAYAAADFUbHOAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAD4BElEQVR4nOzdd3iTZdsG8PNJ2ibpLi1dUFto2VCKIAiIoIyCbAcgKuNVUJFPkBdRHMhQmQIKKIKCiAPE9aIiIBWUJShbNqWlrA4K3Tu5vz/SPCRtWpo2adL2/B1HD9pn5U6acXH1uq9bEkIIEBERERHVIQp7D4CIiIiIqLoxCCYiIiKiOodBMBERERHVOQyCiYiIiKjOYRBMRERERHUOg2AiIiIiqnMYBBMRERFRncMgmIiIiIjqHAbBRERERFTnMAgmE5IkYebMmfYehllhYWEYMGCAvYdBZQgLC8OYMWMqdGyPHj3Qo0ePSt3OmDFjEBYWVqlz7a0mj72iFi5ciMaNG0OpVCIqKsri83ft2gVJkvDtt99af3AWmjlzJiRJwo0bN+w9FALw2WefQZIkxMfHW+2aht9xXWN4ne3atcumt2OL35k1MQguITY2Fs8++ywaN24MtVoNT09PdO3aFe+//z5yc3PtPTyyopycHMycOdPmbwJ11alTpzBz5kyHffOrrfbt24eZM2ciLS2t2m97+/btmDZtGrp27Yq1a9fi3XffLfPYr776CkuXLq2+wVGN8e677+LHH3+09zCoDnCy9wAcyS+//ILHHnsMKpUKo0aNQuvWrVFQUIA9e/bg5ZdfxsmTJ7Fq1Sp7D9OmcnNz4eRUN54WOTk5mDVrFgBUOitJZTt16hRmzZqFHj16lMp+bt++3T6DsrPVq1dDp9PZ9Db27duHWbNmYcyYMfD29rbpbZX0+++/Q6FQ4NNPP4WLi0u5x3711Vf4999/MXny5OoZHNUY7777Lh599FEMGTLEZPtTTz2FESNGQKVSWe223njjDbz66qtWux7VLHUj2qmAuLg4jBgxAqGhofj9998RFBQk73vhhRdw4cIF/PLLL3Ycoe3odDoUFBRArVZDrVbbezhUB9wpQKptsrOz4ebmBmdnZ3sPxaaSk5Oh0Wjq3O+XqodSqYRSqbTqNZ2cnKo18WP8eUv2x3KIYgsWLEBWVhY+/fRTkwDYICIiApMmTZJ/Lioqwpw5cxAeHg6VSoWwsDC89tpryM/PNznPUMe6a9cudOjQARqNBm3atJH/BP/999+jTZs2UKvVaN++PY4cOWJy/pgxY+Du7o6LFy8iOjoabm5uCA4OxuzZsyGEMDl20aJF6NKlC3x9faHRaNC+fXuzdXWSJGHixIn48ssv0apVK6hUKmzdulXeZ1wTnJmZicmTJyMsLAwqlQr+/v7o3bs3Dh8+bHLNTZs2oX379tBoNPDz88OTTz6Jq1evmr0vV69exZAhQ+Du7o769etj6tSp0Gq1ZfxmStu+fTuioqKgVqvRsmVLfP/996WOSUtLw+TJkxESEgKVSoWIiAjMnz9fzsLFx8ejfv36AIBZs2ZBkiT5vm/evBmSJOH48ePy9b777jtIkoSHH37Y5HZatGiB4cOHm2z74osv5MeiXr16GDFiBC5fvlxqjAcOHEDfvn3h5eUFV1dXdO/eHXv37jU5xlCvduHCBTmz5+XlhbFjxyInJ+eOj1WPHj3QunVrHD9+HN27d4erqysiIiLk58Uff/yBTp06QaPRoFmzZtixY4fJ+WXVsN6pju6zzz7DY489BgB44IEH5MfX8LwvWRNsqE/buHEjXnvtNQQGBsLNzQ2DBg0y+9iVpNPpsHTpUrRq1QpqtRoBAQF49tlncevWrXLPW7RoESRJwqVLl0rtmz59OlxcXORr7N69G4899hjuuusuqFQqhISE4KWXXipVJmV4nsfGxuKhhx6Ch4cHnnjiCXlfycfT0tftjz/+iNatW0OlUqFVq1byaxfQ/15efvllAECjRo3kx91QkvLbb7/hvvvug7e3N9zd3dGsWTO89tpr5T+4qNj7nSRJWLt2LbKzs+Xb/eyzz8xer0ePHvjll19w6dIl+diSj4tOp8M777yDhg0bQq1Wo2fPnrhw4UKpa1XkdVSWZcuWoVWrVnB1dYWPjw86dOiAr776qtRxaWlpd3z9rV27Fg8++CD8/f2hUqnQsmVLfPTRR6WuVZ2fCdnZ2fjvf/8rvw82a9YMixYtKnVcRT/PyvL777+jW7ducHNzg7e3NwYPHozTp0+bHGN4zzhz5gyGDRsGT09P+Pr6YtKkScjLy5OPkyQJ2dnZWLdunfzcMMw1MFdfWtXHs+R72ZgxY+TbLfll/NmYn5+Pt956CxEREfL7wbRp00o9ZuV93przv//9D/3790dwcDBUKhXCw8MxZ86cUp+Rhvf2U6dO4YEHHoCrqysaNGiABQsWlLrmlStXMGTIELi5ucHf3x8vvfRShX+3ly5dwoQJE9CsWTNoNBr4+vriscceM1vmdvLkSTz44IPQaDRo2LAh3n77bbN/+bL0Plb286tCBAkhhGjQoIFo3LhxhY8fPXq0ACAeffRRsWLFCjFq1CgBQAwZMsTkuNDQUNGsWTMRFBQkZs6cKZYsWSIaNGgg3N3dxRdffCHuuusuMW/ePDFv3jzh5eUlIiIihFarNbkdtVotmjRpIp566imxfPlyMWDAAAFAvPnmmya31bBhQzFhwgSxfPlysXjxYtGxY0cBQPz8888mxwEQLVq0EPXr1xezZs0SK1asEEeOHJH3vfXWW/KxI0eOFC4uLmLKlCnik08+EfPnzxcDBw4UX3zxhXzM2rVrBQBxzz33iCVLlohXX31VaDQaERYWJm7dulXqvrRq1Ur85z//ER999JF45JFHBADx4Ycf3vExDw0NFU2bNhXe3t7i1VdfFYsXLxZt2rQRCoVCbN++XT4uOztbREZGCl9fX/Haa6+JlStXilGjRglJksSkSZOEEEJkZWWJjz76SAAQQ4cOFevXrxfr168Xx44dE6mpqUKSJLFs2TL5mpMmTRIKhULUr19f3pacnCwAiOXLl8vb3n77bSFJkhg+fLj48MMPxaxZs4Sfn1+pxyImJka4uLiIzp07i/fee08sWbJEREZGChcXF3HgwAH5uLfeeksAEO3atRMPP/yw+PDDD8UzzzwjAIhp06bd8THr3r27CA4OFiEhIeLll18Wy5YtEy1bthRKpVJs2LBBBAYGipkzZ4qlS5eKBg0aCC8vL5GRkWHyOwsNDS11XcO4Sv5+Ro8eLYQQIjY2Vrz44osCgHjttdfkxzcxMVEeV/fu3eVzd+7cKQCINm3aiMjISLF48WLx6quvCrVaLZo2bSpycnLKHdMzzzwjnJycxLhx48TKlSvFK6+8Itzc3MQ999wjCgoKynx8Ll26JCRJEgsWLCi1r3HjxqJ///7yz//3f/8nHnroIfHuu++Kjz/+WDz99NNCqVSKRx991OS80aNHC5VKJcLDw8Xo0aPFypUrxeeff17m2C153bZt21YEBQWJOXPmiKVLl4rGjRsLV1dXcePGDSGEEMeOHROPP/64ACCWLFkiP+5ZWVni33//FS4uLqJDhw7i/fffFytXrhRTp04V999/f5mPj/F9utP73fr160W3bt2ESqWSbzc2Ntbs9bZv3y6ioqKEn5+ffOwPP/wghLj9XGjXrp1o3769WLJkiZg5c6ZwdXUVHTt2NLlORV9H5qxatUq+Tx9//LF4//33xdNPPy1efPFF+RhLXn/33HOPGDNmjFiyZIlYtmyZ6NOnT6n3ByGq7zNBp9OJBx98UEiSJJ555hmxfPlyMXDgQAFATJ482eLfb1l+++034eTkJJo2bSoWLFggv+f5+PiIuLi4Uo9lmzZtxMCBA8Xy5cvFk08+KQCIp556Sj5u/fr1QqVSiW7dusnPjX379gkhbn/WGF+3qo9nyfeyffv2ybdr+HriiScEALFixQohhBBarVb06dNHuLq6ismTJ4uPP/5YTJw4UTg5OYnBgwebPD7lfd6aM2TIEDFs2DCxcOFC8dFHH4nHHntMABBTp041Oc74vX3SpEniww8/FA8++KAAILZs2SIfl5OTI5o2bSrUarWYNm2aWLp0qWjfvr2IjIwUAMTOnTvL/f1u2rRJtG3bVsyYMUOsWrVKvPbaa8LHx0eEhoaK7Oxs+bjr16+L+vXrCx8fHzFz5kyxcOFC0aRJE/l2jH9nlbmPlfn8qggGwUKI9PR0AaDUk7csR48eFQDEM888Y7J96tSpAoD4/fff5W2hoaECgPwiFkKIbdu2CQBCo9GIS5cuyds//vjjUk9Kw5vT//3f/8nbdDqd6N+/v3BxcREpKSnyduNAQQghCgoKROvWrcWDDz5osh2AUCgU4uTJk6XuW8kg2MvLS7zwwgtlPhYFBQXC399ftG7dWuTm5srbf/75ZwFAzJgxo9R9mT17tsk1DB92d2J4LL/77jt5W3p6uggKChLt2rWTt82ZM0e4ubmJc+fOmZz/6quvCqVSKRISEoQQQqSkpJS6vwatWrUSw4YNk3++++675Rfq6dOnhRBCfP/99wKAOHbsmBBCiPj4eKFUKsU777xjcq0TJ04IJycnebtOpxNNmjQR0dHRQqfTycfl5OSIRo0aid69e8vbDG/Q//nPf0yuOXToUOHr63vHx6x79+4CgPjqq6/kbWfOnJGfA3/99Ze83fC8XLt2rbytskGwEPo3z7LeZMsKghs0aGDyJvbNN98IAOL9998vc0y7d+8WAMSXX35pchtbt241u72kzp07l3r+HTx4UACQg1chSr++hBBi7ty5QpIkk9ex4Xn+6quvljre3ONpyevWxcVFXLhwQd527NgxAcDkP2wLFy4s9aEjhBBLliwRAEzeMyrCkve70aNHCzc3twpdt3///mafW4bnQosWLUR+fr68/f333xcAxIkTJ4QQlr2OzBk8eLBo1apVucdY8voz9/yIjo4ulVyprs+EH3/8UQAQb7/9tsntP/roo0KSJPl5ZMnv15yoqCjh7+8vUlNT5W3Hjh0TCoVCjBo1St5meCwHDRpkcv6ECRNM3keFEMLNzc3kvcSgrCC4Ko+nufcyY+fPnxdeXl6id+/eoqioSAihD9QVCoXYvXu3ybErV64UAMTevXvlbeV93ppj7nn07LPPCldXV5GXlydvM7y3G79H5efni8DAQPHII4/I25YuXSoAiG+++Ubelp2dLSIiIioUBJsbz/79+0vd9uTJkwUAk/98JicnCy8vr1K/M0vvY2U/vyqC5RAAMjIyAAAeHh4VOn7Lli0AgClTpphs/+9//wsApWqHW7Zsic6dO8s/d+rUCQDw4IMP4q677iq1/eLFi6Vuc+LEifL3hj+vFBQUmKT/NRqN/P2tW7eQnp6Obt26lSpdAIDu3bujZcuWd7ingLe3Nw4cOIBr166Z3f/PP/8gOTkZEyZMMKlx6t+/P5o3b262jvq5554z+blbt25m77M5wcHBGDp0qPyzp6cnRo0ahSNHjiAxMRGAvjSjW7du8PHxwY0bN+SvXr16QavV4s8//7zj7XTr1g27d+8GoC8JOXbsGMaPHw8/Pz95++7du+Ht7Y3WrVsD0P/ZTafTYdiwYSa3GxgYiCZNmmDnzp0AgKNHj+L8+fMYOXIkUlNT5eOys7PRs2dP/Pnnn6X+hGTuMUtNTZWfu+Vxd3fHiBEj5J+bNWsGb29vtGjRQn7OAeU//6rLqFGjTF6Hjz76KIKCguTXnDmbNm2Cl5cXevfubfK4t2/fHu7u7vLjXpbhw4fj0KFDiI2Nlbdt3LgRKpUKgwcPlrcZv76ys7Nx48YNdOnSBUKIUn9iBYDnn3++QvfZktdtr169EB4eLv8cGRkJT0/PCv3ODJPk/ve//1k0Oc/S9ztrGTt2rEltcbdu3QDcfn5W5nVkzNvbG1euXMHff/99x7FU5PVn/HtMT0/HjRs30L17d1y8eBHp6ekm51fHZ8KWLVugVCrx4osvmpz33//+F0II/Prrr/JxQOV+v9evX8fRo0cxZswY1KtXT94eGRmJ3r17m33dvvDCCyY//9///Z/JOCrDGo+nOdnZ2Rg6dCh8fHzw9ddfy/XImzZtQosWLdC8eXOT95wHH3wQAEq951T08xYwfR5lZmbixo0b6NatG3JycnDmzBmTY93d3fHkk0/KP7u4uKBjx44m92/Lli0ICgrCo48+Km9zdXXF+PHjLR5PYWEhUlNTERERAW9vb5P3qC1btuDee+9Fx44d5W3169eXS8Gqch9t+fnFiXHQB1KA/pdREZcuXYJCoUBERITJ9sDAQHh7e5eqLzR+EQKAl5cXACAkJMTs9pJ1jAqFAo0bNzbZ1rRpUwAwqcv5+eef8fbbb+Po0aOlavVKatSoUZn3z9iCBQswevRohISEoH379njooYcwatQoeTyG+9qsWbNS5zZv3hx79uwx2aZWq+VaXAMfH5871m4aRERElLo/xo9FYGAgzp8/j+PHj5e6HYPk5OQ73k63bt2wcuVKXLhwAbGxsZAkCZ07d5aD43HjxmH37t3o2rUrFAr9/yXPnz8PIQSaNGli9pqGSVHnz58HAIwePbrM209PT4ePj4/8c8nnkGHfrVu35OdvWRo2bFjqMfPy8qrw8686lXzsJElCREREuW3Wzp8/j/T0dPj7+5vdf6ff92OPPYYpU6bI9chCCGzatAn9+vUzeWwTEhIwY8YMbN68udRjVDLIcXJyQsOGDcu9XQNLXrclnwdAxV8/w4cPxyeffIJnnnkGr776Knr27ImHH34Yjz76qPwcNsfS9ztrKe85D1TudWTslVdewY4dO9CxY0dERESgT58+GDlyJLp27WrRWAzPkb179+Ktt97C/v37S9ULp6eny68vc9ezxWfCpUuXEBwcXCq506JFC3m/4d/K/n7Le/9v0aIFtm3bJk8KNSj5Gg8PD4dCoahSK8WqPp5lGTduHGJjY7Fv3z74+vrK28+fP4/Tp09X+DOmop+3gL6u9o033sDvv/9eKslR8n3G3Hu7j4+PyXyWS5cumf3cNPc7Myc3Nxdz587F2rVrcfXqVZN6cuPxXLp0ySQoLe92qnofrfn5xSAY+iA4ODgY//77r0XnVbTBdlmzWcvabvwkq6jdu3dj0KBBuP/++/Hhhx8iKCgIzs7OWLt2rdmJHsb/EyvPsGHD0K1bN/zwww/Yvn07Fi5ciPnz5+P7779Hv379LB6ntWf2mqPT6dC7d29MmzbN7H7Dh0V57rvvPgDAn3/+iYsXL+Luu++Gm5sbunXrhg8++ABZWVk4cuQI3nnnHZPblSQJv/76q9n76e7uLh8H6BcVKGsxAcOxBlV5rlTl+VfWc9ySiYy2ptPp4O/vjy+//NLs/rI+qAyCg4PRrVs3fPPNN3jttdfw119/ISEhAfPnz5eP0Wq16N27N27evIlXXnkFzZs3h5ubG65evYoxY8aUyjiqVKpyA0sDS1+3VXkeaDQa/Pnnn9i5cyd++eUXbN26FRs3bsSDDz6I7du33/G1Wd0LCtzpvlbmdWSsRYsWOHv2LH7++Wds3boV3333HT788EPMmDFDbp1Y0bHExsaiZ8+eaN68ORYvXoyQkBC4uLhgy5YtWLJkSannR3V8JljKXgtGWON2bfF4vv/++/j666/xxRdflHp+6XQ6tGnTBosXLzZ7bskAraKft2lpaejevTs8PT0xe/ZshIeHQ61W4/Dhw3jllVcq/Dyy5vPl//7v/7B27VpMnjwZnTt3hpeXFyRJwogRIyrV7tFa99Fa951BcLEBAwZg1apV2L9/v8mfVcwJDQ2FTqfD+fPn5f9VA0BSUhLS0tIQGhpq1bHpdDpcvHjRJHg7d+4cAMgzqr/77juo1Wps27bNpIfi2rVrq3z7QUFBmDBhAiZMmIDk5GTcfffdeOedd9CvXz/5vp49e1b+U5DB2bNnrf5YXLhwAUIIkzfOko9FeHg4srKy0KtXr3KvVd6b71133YW77roLu3fvxsWLF+U/xd5///2YMmUKNm3aBK1Wi/vvv18+Jzw8HEIINGrUqNxA2/DnbE9PzzuO0d58fHzMLrpQkexfZT7cDNk9AyEELly4gMjIyDLPCQ8Px44dO9C1a9cKf9iUNHz4cEyYMAFnz57Fxo0b4erqioEDB8r7T5w4gXPnzmHdunUYNWqUvP23336r1O0Z2OJ1W97jrlAo0LNnT/Ts2ROLFy/Gu+++i9dffx07d+4s87loq/e7qgY/1ngdubm5Yfjw4Rg+fDgKCgrw8MMP45133sH06dMtamH1008/IT8/H5s3bzbJSt6pFKeyKvKZEBoaih07diAzM9MkG2z4c7Ph91aV36/x+39JZ86cgZ+fn0kWGNC/xo0zoxcuXIBOpzPpDmLvFdx2796NqVOnYvLkyWb/nB8eHo5jx46hZ8+eVh3rrl27kJqaiu+//97ksyUuLq7S1wwNDcW///5b6nPT3O/MnG+//RajR4/Ge++9J2/Ly8sr9bkQGhpa6v3b3O3Y4j5WBWuCi02bNg1ubm545plnkJSUVGp/bGws3n//fQDAQw89BAClVjsy/K+wf//+Vh/f8uXL5e+FEFi+fDmcnZ3Rs2dPAPr/FUmSZJKhi4+Pr9KqO1qtttSfJvz9/REcHCz/2bZDhw7w9/fHypUrTf6U++uvv+L06dNWfyyuXbuGH374Qf45IyMDn3/+OaKiohAYGAhAn73ev38/tm3bVur8tLQ0FBUVAdDXRRm2mdOtWzf8/vvvOHjwoBwER0VFwcPDA/PmzZPbWRk8/PDDUCqVmDVrVqn/jQohkJqaCgBo3749wsPDsWjRImRlZZW63ZSUlIo+HDYXHh6O9PR0kz+vXb9+3eR3UBbDh58lK5d9/vnnJmVJ3377La5fv17uXx2GDRsGrVaLOXPmlNpXVFRUodt/5JFHoFQq8fXXX2PTpk0YMGCAyYe3Ietg/HsVQsjvCZVli9dtWY/7zZs3Sx1ryHCV1y7JVu93bm5upd5fLFHV15Hh9Wjg4uKCli1bQgiBwsJCi8Zi7vmRnp5ulSREWe70mfDQQw9Bq9WaHAcAS5YsgSRJ8muqKr/foKAgREVFYd26dSbPt3///Rfbt2+Xr21sxYoVJj8vW7YMAExe425ubnZZ8RDQv78NGzYM9913HxYuXGj2mGHDhuHq1atYvXp1qX25ubnIzs6u1G2bex4VFBTgww8/rNT1AP3v99q1ayZtF3Nyciq88JdSqSz1ebZs2bJSfw186KGH8Ndff+HgwYPytpSUlFJ/obPFfawKZoKLhYeH46uvvsLw4cPRokULkxXj9u3bh02bNsm9Ctu2bYvRo0dj1apVcmr/4MGDWLduHYYMGYIHHnjAqmNTq9XYunUrRo8ejU6dOuHXX3/FL7/8gtdee03+U2///v2xePFi9O3bFyNHjkRycjJWrFiBiIgIkwDGEpmZmWjYsCEeffRRtG3bFu7u7tixYwf+/vtv+X+Fzs7OmD9/PsaOHYvu3bvj8ccfR1JSEt5//32EhYXhpZdestrjAOhLGZ5++mn8/fffCAgIwJo1a5CUlGTyYfPyyy9j8+bNGDBgAMaMGYP27dsjOzsbJ06cwLfffov4+Hj4+flBo9GgZcuW2LhxI5o2bYp69eqhdevW8kS3bt264csvv4QkSXJ5hFKpRJcuXbBt2zb06NHDZOJOeHg43n77bUyfPh3x8fEYMmQIPDw8EBcXhx9++AHjx4/H1KlToVAo8Mknn6Bfv35o1aoVxo4diwYNGuDq1avYuXMnPD098dNPP1n1causESNG4JVXXsHQoUPx4osvIicnBx999BGaNm1qduKWsaioKCiVSsyfPx/p6elQqVRyH9Wy1KtXD/fddx/Gjh2LpKQkLF26FBERERg3blyZ53Tv3h3PPvss5s6di6NHj6JPnz5wdnbG+fPnsWnTJrz//vsmk0LM8ff3xwMPPIDFixcjMzOzVO/n5s2bIzw8HFOnTsXVq1fh6emJ7777rsr107Z43Rr+Y/b6669jxIgRcHZ2xsCBAzF79mz8+eef6N+/P0JDQ5GcnIwPP/wQDRs2lJ/f5tjq/a59+/bYuHEjpkyZgnvuuQfu7u4m2fc7qerrqE+fPggMDETXrl0REBCA06dPY/ny5ejfv3+FJ0kbX8vFxQUDBw7Es88+i6ysLKxevRr+/v64fv26RdeqiIp8JgwcOBAPPPAAXn/9dcTHx6Nt27bYvn07/ve//2Hy5MlyJr2qv9+FCxeiX79+6Ny5M55++mnk5uZi2bJl8PLyMumraxAXF4dBgwahb9++2L9/P7744guMHDkSbdu2lY9p3749duzYgcWLFyM4OBiNGjUyW29qCy+++CJSUlIwbdo0bNiwwWRfZGQkIiMj8dRTT+Gbb77Bc889h507d6Jr167QarU4c+YMvvnmG2zbtg0dOnSw+La7dOkCHx8fjB49Gi+++CIkScL69eurVN4wbtw4LF++HKNGjcKhQ4cQFBSE9evXy0mgOxkwYADWr18PLy8vtGzZEvv378eOHTtMaqQBfSJx/fr16Nu3LyZNmgQ3NzesWrUKoaGhJu9ltriPVWJRL4k64Ny5c2LcuHEiLCxMuLi4CA8PD9G1a1exbNkyk9YdhYWFYtasWaJRo0bC2dlZhISEiOnTp5scI4S+fYtxr1EDAKVaj8XFxQkAYuHChfI2Q8uh2NhYuS9hQECAeOutt0x6HQohxKeffiqaNGkiVCqVaN68uVi7dq3Z9i/mbtt4n6FlWH5+vnj55ZdF27ZthYeHh3BzcxNt27Y129N348aNol27dkKlUol69eqJJ554Qly5csXkmLLaJ92pRY2B4bHctm2biIyMlO/npk2bSh2bmZkppk+fLiIiIoSLi4vw8/MTXbp0EYsWLTLpG7tv3z7Rvn174eLiUqpd2smTJ+VWTcbefvttATN9mg2+++47cd999wk3Nzfh5uYmmjdvLl544QVx9uxZk+OOHDkiHn74YeHr6ytUKpUIDQ0Vw4YNEzExMaUem5Jtrcy1CjKne/fuZttAWfK83L59u2jdurVwcXERzZo1E1988UWFWqQJIcTq1atF48aNhVKpNGnHU1aLtK+//lpMnz5d+Pv7C41GI/r372/S4kiIstu2rVq1SrRv315oNBrh4eEh2rRpI6ZNmyauXbtWziNkOlYAwsPDw6Tdn8GpU6dEr169hLu7u/Dz8xPjxo2TW5SVbCtXVpswc2Ov6uvW3OM+Z84c0aBBA6FQKOTnSUxMjBg8eLAIDg4WLi4uIjg4WDz++OOlWgmaU9H3O0tapGVlZYmRI0cKb29vAUB+XAzPhZKva8P7Y8kWSBV5HZnz8ccfi/vvv18+Lzw8XLz88ssiPT1dPsaS19/mzZtFZGSkUKvVIiwsTMyfP1+sWbPGbEuv6vpMyMzMFC+99JIIDg4Wzs7OokmTJmLhwoUmLeWEqPjvtyw7duwQXbt2FRqNRnh6eoqBAweKU6dOmRxjeCxPnTolHn30UeHh4SF8fHzExIkTS73ezpw5I+6//36h0WgEAPn5XVaLtKo8niVfa4a2XOa+jD8fCgoKxPz580WrVq2ESqUSPj4+on379mLWrFkmz6HyPm/N2bt3r7j33nuFRqMRwcHBYtq0aXL7L+N2ZmW9t5t7j7l06ZIYNGiQcHV1FX5+fmLSpElyC8k7tUi7deuWGDt2rPDz8xPu7u4iOjpanDlzxuz7zvHjx0X37t2FWq0WDRo0EHPmzBGffvppqd9ZVe+jJb/zO5GKTyQHNWbMGHz77bdm/9xHVFvs2rULDzzwADZt2nTHrC1RXVZTPxNmzpyJWbNmISUlBX5+fvYeDhEA1gQTERERUR3EIJiIiIiI6hwGwURERERU57AmmIiIiIjqHGaCiYiIiKjOYRBMRERERHUOF8swQ6fT4dq1a/Dw8LD78o1EREREVJoQApmZmQgODoZCYXlel0GwGdeuXUNISIi9h0FEREREd3D58mU0bNjQ4vMYBJthWDLz8uXL8PT0tPNoiIiIiKikjIwMhISEWLzUuQGDYDMMJRCenp4MgomIiIgcWGVLVzkxjoiIiIjqHAbBRERERFTnMAgmIiIiojqHNcFERER2ptVqUVhYaO9hEDkUpVIJJycnm7WrZRBMRERkR1lZWbhy5QqEEPYeCpHDcXV1RVBQEFxcXKx+bQbBREREdqLVanHlyhW4urqifv36XKCJqJgQAgUFBUhJSUFcXByaNGlSqQUxysMgmIiIyE4KCwshhED9+vWh0WjsPRwih6LRaODs7IxLly6hoKAAarXaqtfnxDgiIiI7YwaYyDxrZ39Nrm2zKxMREREROSgGwURERERU5zAIJiIiIqsZM2YMhgwZYu9hEN0Rg2AiIiIiqnMYBBMRERFRncMgmIiIyEEIIZBTUGSXL0sX6/j222/Rpk0baDQa+Pr6olevXsjOzpb3L1q0CEFBQfD19cULL7xgsiLe+vXr0aFDB3h4eCAwMBAjR45EcnKyvH/Xrl2QJAnbtm1Du3btoNFo8OCDDyI5ORm//vorWrRoAU9PT4wcORI5OTnyeT169MDEiRMxceJEeHl5wc/PD2+++abJfbt16xZGjRoFHx8fuLq6ol+/fjh//rzJffvuu+/QqlUrqFQqhIWF4b333rPosaGagX2CiYiIHERuoRYtZ2yzy22fmh0NV5eKhQXXr1/H448/jgULFmDo0KHIzMzE7t275WBz586dCAoKws6dO3HhwgUMHz4cUVFRGDduHAB9f+Q5c+agWbNmSE5OxpQpUzBmzBhs2bLF5HZmzpyJ5cuXw9XVFcOGDcOwYcOgUqnw1VdfISsrC0OHDsWyZcvwyiuvyOesW7cOTz/9NA4ePIh//vkH48ePx1133SXf9pgxY3D+/Hls3rwZnp6eeOWVV/DQQw/h1KlTcHZ2xqFDhzBs2DDMnDkTw4cPx759+zBhwgT4+vpizJgxVnikyVFIgus0lpKRkQEvLy+kp6fD09PT3sMhIqJaKi8vD3FxcWjUqBHUajVyCopqRBB8+PBhtG/fHvHx8QgNDTXZN2bMGOzatQuxsbFQKpUAgGHDhkGhUGDDhg1mr/fPP//gnnvuQWZmJtzd3bFr1y488MAD2LFjB3r27AkAmDdvHqZPn47Y2Fg0btwYAPDcc88hPj4eW7duBaDPBCcnJ+PkyZNy7+VXX30VmzdvxqlTp3D+/Hk0bdoUe/fuRZcuXQAAqampCAkJwbp16/DYY4/hiSeeQEpKCrZv3y6Pb9q0afjll19w8uTJij6cZCUlXyPGqhqvMRNMRETkIDTOSpyaHW23266otm3bomfPnmjTpg2io6PRp08fPProo/Dx8QEAtGrVSg6AASAoKAgnTpyQfz506BBmzpyJY8eO4datW9DpdACAhIQEtGzZUj4uMjJS/j4gIACurq5yAGzYdvDgQZOx3XvvvSaLj3Tu3BnvvfcetFotTp8+DScnJ3Tq1Ene7+vri2bNmuH06dMAgNOnT2Pw4MEm1+zatSuWLl0KrVZrcr+oZmMQTERE5CAkSapwNtaelEolfvvtN+zbtw/bt2/HsmXL8Prrr+PAgQMAAGdnZ5PjJUmSA93s7GxER0cjOjoaX375JerXr4+EhARER0ejoKDA5Dzj60iSVO51iSzFiXFERERkMUmS0LVrV8yaNQtHjhyBi4sLfvjhhzued+bMGaSmpmLevHno1q0bmjdvbjIprqoMgbjBX3/9hSZNmkCpVKJFixYoKioyOSY1NRVnz56VM9AtWrTA3r17Ta6xd+9eNG3alFngWoZBMBEREVnkwIEDePfdd/HPP/8gISEB33//PVJSUtCiRYs7nnvXXXfBxcUFy5Ytw8WLF7F582bMmTPHamNLSEjAlClTcPbsWXz99ddYtmwZJk2aBABo0qQJBg8ejHHjxmHPnj04duwYnnzySTRo0EAugfjvf/+LmJgYzJkzB+fOncO6deuwfPlyTJ061WpjJMfAIJiIiIgs4unpiT///BMPPfQQmjZtijfeeAPvvfce+vXrd8dz69evj88++wybNm1Cy5YtMW/ePCxatMhqYxs1ahRyc3PRsWNHvPDCC5g0aRLGjx8v71+7di3at2+PAQMGoHPnzhBCYMuWLXKpxd13341vvvkGGzZsQOvWrTFjxgzMnj2bnSFqIXaHMIPdIYiIqDqUN/OdLNejRw9ERUVh6dKl9h4KWYktu0MwE0xEREREdY7jT0GtAw5cP4Bb+bfQKbATfNQ+9h4OERERUa3HINgBvP3X24jPiMfa6LXoENjB3sMhIiKqkXbt2mXvIVANwnIIB6Bx0gAAcoty7TwSIiIiorqBQbADMATBOUU5dh4JERERUd3AINgBuDq7AmAmmIiIiKi6MAh2AHImuJCZYCIiIqLqwCDYAbg6MRNMREREVJ0YBDsA1gQTERERVS8GwQ5A48zuEEREVDuMGTMGQ4YMsfcwiO6IQbADYDkEEREROaL4+HhIkoSjR4/aeyhW5xBB8IoVKxAWFga1Wo1OnTrh4MGDFTpvw4YNkCSp1P84x4wZA0mSTL769u1rg5FbByfGEREREVUvuwfBGzduxJQpU/DWW2/h8OHDaNu2LaKjo5GcnFzuefHx8Zg6dSq6detmdn/fvn1x/fp1+evrr7+2xfCtgi3SiIgIACAEUJBtny8hLBrqt99+izZt2kCj0cDX1xe9evVCdna2vH/RokUICgqCr68vXnjhBRQWFsr71q9fjw4dOsDDwwOBgYEYOXKkyef+rl27IEkStm3bhnbt2kGj0eDBBx9EcnIyfv31V7Ro0QKenp4YOXIkcnJuJ5B69OiBiRMnYuLEifDy8oKfnx/efPNNCKP7duvWLYwaNQo+Pj5wdXVFv379cP78eZP79t1336FVq1ZQqVQICwvDe++9V+HHJSwsDG+//TZGjRoFd3d3hIaGYvPmzUhJScHgwYPh7u6OyMhI/PPPPxbdZmWvu2fPHnTr1g0ajQYhISF48cUXTX5PYWFhePfdd/Gf//wHHh4euOuuu7Bq1Sp5f6NGjQAA7dq1gyRJ6NGjh/xYT5482eS2hgwZgjFjxlR5zNVG2FnHjh3FCy+8IP+s1WpFcHCwmDt3bpnnFBUViS5duohPPvlEjB49WgwePNhkv7ltlkhPTxcARHp6eqWvYYmfYn8SrT9rLZ7e9nS13B4RETmG3NxccerUKZGbm6vfkJ8lxFue9vnKz6rwuK9duyacnJzE4sWLRVxcnDh+/LhYsWKFyMzMFKNHjxaenp7iueeeE6dPnxY//fSTcHV1FatWrZLP//TTT8WWLVtEbGys2L9/v+jcubPo16+fvH/nzp0CgLj33nvFnj17xOHDh0VERITo3r276NOnjzh8+LD4888/ha+vr5g3b558Xvfu3YW7u7uYNGmSOHPmjPjiiy9K3fagQYNEixYtxJ9//imOHj0qoqOjRUREhCgoKBBCCPHPP/8IhUIhZs+eLc6ePSvWrl0rNBqNWLt2bYUem9DQUFGvXj2xcuVKce7cOfH8888LT09P0bdvX/HNN9+Is2fPiiFDhogWLVoInU5X4duszHUvXLgg3NzcxJIlS8S5c+fE3r17Rbt27cSYMWNKXXfFihXi/PnzYu7cuUKhUIgzZ84IIYQ4ePCgACB27Nghrl+/LlJTU+XHetKkSSb3ffDgwWL06NFVGnNJpV4jRqoar9k1CM7PzxdKpVL88MMPJttHjRolBg0aVOZ5M2bMEEOGDBFCmA94R48eLby8vET9+vVF06ZNxXPPPSdu3LhR5vXy8vJEenq6/HX58uVqDYJjLsWI1p+1FiN/GVktt0dERI6hpgbBhw4dEgBEfHx8qX2jR48WoaGhoqioSN722GOPieHDh5d5vb///lsAEJmZmUKI20Hwjh075GPmzp0rAIjY2Fh527PPPiuio6Pln7t3714qoHrllVdEixYthBBCnDt3TgAQe/fulfffuHFDaDQa8c033wghhBg5cqTo3bu3yfhefvll0bJly/IflGKhoaHiySeflH++fv26ACDefPNNedv+/fsFAHH9+vUK32Zlrvv000+L8ePHm1x39+7dQqFQyM+5ktfV6XTC399ffPTRR0IIIeLi4gQAceTIEZPrVDQItnTMJdkyCHaq7syzsRs3bkCr1SIgIMBke0BAAM6cOWP2nD179uDTTz8tt0C7b9++ePjhh9GoUSPExsbitddeQ79+/bB//34olcpSx8+dOxezZs2q0n2pCtYEExERAMDZFXjtmv1uu4Latm2Lnj17ok2bNoiOjkafPn3w6KOPwsfHBwDQqlUrk8/boKAgnDhxQv750KFDmDlzJo4dO4Zbt25Bp9MBABISEtCyZUv5uMjISPn7gIAAuLq6onHjxibbSs4juvfeeyFJkvxz586d8d5770Gr1eL06dNwcnJCp06d5P2+vr5o1qwZTp8+DQA4ffo0Bg8ebHLNrl27YunSpdBqtWbjiJJKjhsA2rRpU2pbcnIyAgMDK3ybll732LFjOH78OL788kv5GCEEdDod4uLi0KJFi1LXlSQJgYGBdyxLrShLx1yd7BoEWyozMxNPPfUUVq9eDT8/vzKPGzFihPx9mzZtEBkZifDwcOzatQs9e/Ysdfz06dMxZcoU+eeMjAyEhIRYd/DlYE0wEREBACQJcHGz9yjuSKlU4rfffsO+ffuwfft2LFu2DK+//joOHDgAAHB2djY5XpIkOdDNzs5GdHQ0oqOj8eWXX6J+/fpISEhAdHQ0CgoKTM4zvo4kSeVe15GUHHdZ2ywdu6XXzcrKwrPPPosXX3yx1LXuuusus9c1XOdOY1MoFCa11gBM6r4rO+bqZNcg2M/PD0qlEklJSSbbk5KSzP5vIDY2FvHx8Rg4cKC8zfCgOTk54ezZswgPDy91XuPGjeHn54cLFy6YDYJVKhVUKlVV706lGTLBDIKJiKimkCQJXbt2RdeuXTFjxgyEhobihx9+uON5Z86cQWpqKubNmycnnKw5McoQiBv89ddfaNKkCZRKJVq0aIGioiIcOHAAXbp0AQCkpqbi7Nmzcga6RYsW2Lt3r8k19u7di6ZNm1YoC1wZtrrNu+++G6dOnUJERESlr+Hi4gIA0Gq1Jtvr16+P69evyz9rtVr8+++/eOCBByp9W9XNrt0hXFxc0L59e8TExMjbdDodYmJi0Llz51LHN2/eHCdOnMDRo0flr0GDBuGBBx7A0aNHy8zeXrlyBampqQgKCrLZfakK9gkmIqKa5MCBA3j33Xfxzz//ICEhAd9//z1SUlLkP6+X56677oKLiwuWLVuGixcvYvPmzZgzZ47VxpaQkIApU6bg7Nmz+Prrr7Fs2TJMmjQJANCkSRMMHjwY48aNw549e3Ds2DE8+eSTaNCggVyO8N///hcxMTGYM2cOzp07h3Xr1mH58uWYOnWq1cZYkq1u85VXXsG+ffswceJEHD16FOfPn8f//vc/TJw4scLX8Pf3h0ajwdatW5GUlIT09HQAwIMPPohffvkFv/zyC86cOYPnn38eaWlpVRpvdbN7i7QpU6Zg9erVWLduHU6fPo3nn38e2dnZGDt2LABg1KhRmD59OgBArVajdevWJl/e3t7w8PBA69at4eLigqysLLz88sv466+/EB8fj5iYGAwePBgRERGIjo62510tk3EmWCcc7886RERExjw9PfHnn3/ioYceQtOmTfHGG2/gvffeQ79+/e54bv369fHZZ59h06ZNaNmyJebNm4dFixZZbWyjRo1Cbm4uOnbsiBdeeAGTJk3C+PHj5f1r165F+/btMWDAAHTu3BlCCGzZskX+E/3dd9+Nb775Bhs2bEDr1q0xY8YMzJ4926T1l7XZ6jYjIyPxxx9/4Ny5c+jWrRvatWuHGTNmIDg4uMLXcHJywgcffICPP/4YwcHB8n8W/vOf/2D06NEYNWoUunfvjsaNG9eoLDAASKJkQYcdLF++HAsXLkRiYiKioqLwwQcfyEXrPXr0QFhYGD777DOz544ZMwZpaWn48ccfAQC5ubkYMmQIjhw5grS0NAQHB6NPnz6YM2dOqQl4ZcnIyICXlxfS09Ph6elpjbtYrtyiXHT8siMA4MDIA3KNMBER1W55eXmIi4tDo0aNoFar7T2cGq9Hjx6IiorC0qVL7T0UspLyXiNVjdccYmKcobG1Obt27Sr33JLBsUajwbZt26w0suqhVqohQYKAQE5RDoNgIiIiIhuzezkE6ScXyCURhawLJiIickS7d++Gu7t7mV9UszhEJpj0dcE5RTnIKWKvYCIiosq401+Pq6pDhw7lrlNANQuDYAfh6uyK1LxUdoggIiJyUBqNpkrtxsixsBzCQXDVOCIiIqLqwyDYQbBXMBEREVH1YRDsIORMMGuCiYiIiGyOQbCDMLRFYyaYiIiIyPYYBDsI41XjiIiIiMi2GAQ7CE6MIyKi2iw+Ph6SJLHFGDkMBsEOghPjiIiIiKoPg2AHoXHmxDgiIiKi6sIg2EEwE0xEREII5BTm2OVLCGHRWLdu3Yr77rsP3t7e8PX1xYABAxAbGyvvP3jwINq1awe1Wo0OHTrgyJEjJudrtVo8/fTTaNSoETQaDZo1a4b333/f5JgxY8ZgyJAhePfddxEQEABvb2/Mnj0bRUVFePnll1GvXj00bNgQa9eurfyDTnUWV4xzEKwJJiKi3KJcdPqqk11u+8DIA3KnoorIzs7GlClTEBkZiaysLMyYMQNDhw7F0aNHkZOTgwEDBqB379744osvEBcXh0mTJpmcr9Pp0LBhQ2zatAm+vr7Yt28fxo8fj6CgIAwbNkw+7vfff0fDhg3x559/Yu/evXj66aexb98+3H///Thw4AA2btyIZ599Fr1790bDhg2t9nhQ7ccg2EGwRRoREdUkjzzyiMnPa9asQf369XHq1Cns27cPOp0On376KdRqNVq1aoUrV67g+eefl493dnbGrFmz5J8bNWqE/fv345tvvjEJguvVq4cPPvgACoUCzZo1w4IFC5CTk4PXXnsNADB9+nTMmzcPe/bswYgRI2x8r6k2YRDsILhYBhERaZw0ODDygN1u2xLnz5/HjBkzcODAAdy4cQM6nQ4AkJCQgNOnTyMyMhJqtVo+vnPnzqWusWLFCqxZswYJCQnIzc1FQUEBoqKiTI5p1aoVFIrb1ZsBAQFo3bq1/LNSqYSvry+Sk5MtGj8Rg2AHwZpgIiKSJMmikgR7GjhwIEJDQ7F69WoEBwdDp9OhdevWKCgoqND5GzZswNSpU/Hee++hc+fO8PDwwMKFC3HggOl/ApydnU1+liTJ7DZDEE5UUQyCHQRrgomIqKZITU3F2bNnsXr1anTr1g0AsGfPHnl/ixYtsH79euTl5cnZ4L/++svkGnv37kWXLl0wYcIEeZvxxDoiW2N3CAfBmmAiIqopfHx84Ovri1WrVuHChQv4/fffMWXKFHn/yJEjIUkSxo0bh1OnTmHLli1YtGiRyTWaNGmCf/75B9u2bcO5c+fw5ptv4u+//67uu0J1GINgB8GaYCIiqikUCgU2bNiAQ4cOoXXr1njppZewcOFCeb+7uzt++uknnDhxAu3atcPrr7+O+fPnm1zj2WefxcMPP4zhw4ejU6dOSE1NNckKE9maJCxtDFgHZGRkwMvLC+np6fD09KyW20zJScGDmx6EQlLg6FNHIUlStdwuERHZT15eHuLi4tCoUSOTSWREpFfea6Sq8RozwQ7CkAnWCR3ytfl2Hg0RERFR7cYg2EEYt6ZhXTARERGRbTEIdhBKhRIqpQoA64KJiIiIbI1BsAORewUXMhNMREREZEsMgh0IO0QQEdVNnKNOZJ4tXxsMgh0IewUTEdUtSqUSACq8yhpRXZOTo08Mllwl0Bq4YpwDMWSCGQQTEdUNTk5OcHV1RUpKCpydnaFQMDdFBOgzwDk5OUhOToa3t7f8H0ZrYhDsQAw1wVw6mYiobpAkCUFBQYiLi8OlS5fsPRwih+Pt7Y3AwECbXJtBsANhJpiIqO5xcXFBkyZNWBJBVIKzs7NNMsAGDIIdCCfGERHVTQqFgivGEVUzFh85EE6MIyIiIqoeDIIdiJwJZk0wERERkU0xCHYgrAkmIiIiqh4Mgh2IoRyCNcFEREREtsUg2IEwE0xERERUPRgEOxD2CSYiIiKqHgyCHYjGmZlgIiIiourAINiByJlg1gQTERER2RQXy3AA3x++ghtZ+QhtoF8VhZlgIiIiIttiJtgBrNh5Ae9uOYNbWfqfWRNMREREZFsMgh2A2rl4XWyhAsBMMBEREZGtMQh2AHIQrHMGwJpgIiIiIltjEOwANMVBsNDqM8FFuiIUagvtOSQiIiKiWo1BsANQO+t/DbriTDDAbDARERGRLTEIdgCq4kxwQZECTgp9ww7WBRMRERHZDoNgB2Aoh8gr1MlLJzMTTERERGQ7DIIdgKEcIq9QKy+YwUwwERERke0wCHYAtzPBWjkTnFvIIJiIiIjIVhgEOwC1URDs6sylk4mIiIhsjUGwAzAEwbnGmWCWQxARERHZDINgB6A2mhhnqAnm0slEREREtsMg2AEYT4xjJpiIiIjI9hgEOwC1U+lyCNYEExEREdkOg2AHoHHRB8H5hTp5YhwzwURERES2wyDYAcjlEEVGmWDWBBMRERHZDINgByCXQxRwsQwiIiKi6sAg2AGoi8shTDLBrAkmIiIishkGwQ7AkAnOY00wERERUbVgEOwA5JrgAi6bTERERFQdHCIIXrFiBcLCwqBWq9GpUyccPHiwQudt2LABkiRhyJAhJtuFEJgxYwaCgoKg0WjQq1cvnD9/3gYjtw6NUTkEa4KJiIiIbM/uQfDGjRsxZcoUvPXWWzh8+DDatm2L6OhoJCcnl3tefHw8pk6dim7dupXat2DBAnzwwQdYuXIlDhw4ADc3N0RHRyMvL89Wd6NKDOUQhVoBF4UaAGuCiYiIiGzJ7kHw4sWLMW7cOIwdOxYtW7bEypUr4erqijVr1pR5jlarxRNPPIFZs2ahcePGJvuEEFi6dCneeOMNDB48GJGRkfj8889x7do1/Pjjj2avl5+fj4yMDJOv6mTIBAOAU3EQzEwwERERke3YNQguKCjAoUOH0KtXL3mbQqFAr169sH///jLPmz17Nvz9/fH000+X2hcXF4fExESTa3p5eaFTp05lXnPu3Lnw8vKSv0JCQqpwryyncrr9a1AIFwDsE0xERERkS3YNgm/cuAGtVouAgACT7QEBAUhMTDR7zp49e/Dpp59i9erVZvcbzrPkmtOnT0d6err8dfnyZUvvSpVIkiQHwpJQAWAmmIiIiMiWnOw9AEtkZmbiqaeewurVq+Hn52e166pUKqhUKqtdrzI0LkrkF+kgFWeC87R50Oq0UCqUdziTiIiIiCxl1yDYz88PSqUSSUlJJtuTkpIQGBhY6vjY2FjEx8dj4MCB8jadTgcAcHJywtmzZ+XzkpKSEBQUZHLNqKgoG9wL69BPjiuEJNTytjxtHtwUbvYbFBEREVEtZddyCBcXF7Rv3x4xMTHyNp1Oh5iYGHTu3LnU8c2bN8eJEydw9OhR+WvQoEF44IEHcPToUYSEhKBRo0YIDAw0uWZGRgYOHDhg9pqOwtAruEirhAQJAEsiiIiIiGzF7uUQU6ZMwejRo9GhQwd07NgRS5cuRXZ2NsaOHQsAGDVqFBo0aIC5c+dCrVajdevWJud7e3sDgMn2yZMn4+2330aTJk3QqFEjvPnmmwgODi7VT9iRqJ31ZQ/5RfpV47ILs/WT4zR2HhgRERFRLWT3IHj48OFISUnBjBkzkJiYiKioKGzdulWe2JaQkACFwrKE9bRp05CdnY3x48cjLS0N9913H7Zu3Qq1Wn3nk+3EEATnFeqgcdIguzCbmWAiIiIiG5GEEMLeg3A0GRkZ8PLyQnp6Ojw9PavlNkes2o+/Lt7EB4+3w8ex45CQmYDP+32Odv7tquX2iYiIiGqSqsZrdl8sg/Q0ciZYC42TvgYit5CZYCIiIiJbYBDsINRGQbCrsysALp1MREREZCsMgh2E2Uwwa4KJiIiIbIJBsINQlZgYB3DpZCIiIiJbYRDsIAx9gnMLtXB10pdDMBNMREREZBsMgh2EuXII1gQTERER2QaDYAdh3CfYMDGOmWAiIiIi22AQ7CAM5RAmmWDWBBMRERHZBINgB2FcDsGaYCIiIiLbYhDsIFSsCSYiIiKqNgyCHYShJjjXaLEMZoKJiIiIbINBsIPQsE8wERERUbVhEOwgjCfGsSaYiIiIyLYYBDsIkz7BzqwJJiIiIrIlBsEOwqRPMDPBRERERDbFINhBGC+bbKgJZhBMREREZBsMgh2E2rhPcHF3iJzCHAgh7DksIiIiolqJQbCDMATB+UU6qBRqAICAQL42357DIiIiIqqVGAQ7CEMQDACScJG/5+Q4IiIiIutjEOwg1E63fxWFWkCt1GeDWRdMREREZH0Mgh2Ek1IBZ6UEAMgrMq0LJiIiIiLrYhDsQNROxUsnF7BDBBEREZEtMQh2IGoXM0snsyaYiIiIyOoYBDsQ417B8oIZhcwEExEREVkbg2AHYlg6OZ9LJxMRERHZFINgByIvmFHEmmAiIiIiW2IQ7EBuT4wzqglmdwgiIiIiq2MQ7EBuT4wzqglmJpiIiIjI6hgEOxDDghnG5RCsCSYiIiKyPgbBDsRQE5xbcHuxDGaCiYiIiKyPQbADkbtDFLEmmIiIiMiWGAQ7EEOfYNYEExEREdkWg2AHYlwOwZpgIiIiItthEOxAjPsEsyaYiIiIyHYYBDsQOQgu1HGxDCIiIiIbYhDsQDTFNcG5RjXBnBhHREREZH0Mgh2IIROcX6iFxpmZYCIiIiJbYRDsQOSJccaZYE6MIyIiIrI6BsEOhDXBRERERNWDQbADMekTXNwdokhXhEJtoT2HRURERFTrMAh2IMblEIZMMMCSCCIiIiJrYxDsQORlkwt1cFY4w1nhDIAlEURERETWxiDYgdyuCdYCAFeNIyIiIrIRBsEORG3UJxjA7VXjCpkJJiIiIrImBsEORGOUCRZCMBNMREREZCMMgh2IqjgI1gmgUCvkXsGsCSYiIiKyLgbBDsSQCQZMO0QwE0xERERkXQyCHYizUoJC0n+fbxQEsyaYiIiIyLoYBDsQSZJMl0525tLJRERERLbAINjBaLh0MhEREZHNMQh2MMa9gg0T43IKmQkmIiIisiYGwQ5GZdQrmJlgIiIiIttgEOxgjHsFy4tlMAgmIiIisioGwQ5GbaYmmOUQRERERNbFINjBGJZONq4JZiaYiIiIyLoYBDsY43II1gQTERER2QaDYAejMlMTzD7BRERERNbFINjBaOTFMtgnmIiIiMhWHCIIXrFiBcLCwqBWq9GpUyccPHiwzGO///57dOjQAd7e3nBzc0NUVBTWr19vcsyYMWMgSZLJV9++fW19N6zCXE0wJ8YRERERWZeTvQewceNGTJkyBStXrkSnTp2wdOlSREdH4+zZs/D39y91fL169fD666+jefPmcHFxwc8//4yxY8fC398f0dHR8nF9+/bF2rVr5Z9VKlW13J+qUjuxJpiIiIjI1uyeCV68eDHGjRuHsWPHomXLlli5ciVcXV2xZs0as8f36NEDQ4cORYsWLRAeHo5JkyYhMjISe/bsMTlOpVIhMDBQ/vLx8amOu1NlGhfWBBMRERHZml2D4IKCAhw6dAi9evWStykUCvTq1Qv79++/4/lCCMTExODs2bO4//77Tfbt2rUL/v7+aNasGZ5//nmkpqaWeZ38/HxkZGSYfNmLuT7B+dp8aHVau42JiIiIqLaxaxB848YNaLVaBAQEmGwPCAhAYmJimeelp6fD3d0dLi4u6N+/P5YtW4bevXvL+/v27YvPP/8cMTExmD9/Pv744w/069cPWq35QHLu3Lnw8vKSv0JCQqxzBytB5XR72WRDJhhgSQQRERGRNdm9JrgyPDw8cPToUWRlZSEmJgZTpkxB48aN0aNHDwDAiBEj5GPbtGmDyMhIhIeHY9euXejZs2ep602fPh1TpkyRf87IyLBbIGxcDuGicIFCUkAndMgtyoW7i7tdxkRERERU29g1CPbz84NSqURSUpLJ9qSkJAQGBpZ5nkKhQEREBAAgKioKp0+fxty5c+UguKTGjRvDz88PFy5cMBsEq1Qqh5k4J0+MK9JBkiS4OrkiqzCLdcFEREREVmTXcggXFxe0b98eMTEx8jadToeYmBh07ty5wtfR6XTIz88vc/+VK1eQmpqKoKCgKo23Osg1wQX60g12iCAiIiKyPruXQ0yZMgWjR49Ghw4d0LFjRyxduhTZ2dkYO3YsAGDUqFFo0KAB5s6dC0Bfv9uhQweEh4cjPz8fW7Zswfr16/HRRx8BALKysjBr1iw88sgjCAwMRGxsLKZNm4aIiAiTFmqOSuNS3Ce4SB8Euzq7ArnsFUxERERkTXYPgocPH46UlBTMmDEDiYmJiIqKwtatW+XJcgkJCVAobiess7OzMWHCBFy5cgUajQbNmzfHF198geHDhwMAlEoljh8/jnXr1iEtLQ3BwcHo06cP5syZ4zAlD+Ux7hMMMBNMREREZAuSEELYexCOJiMjA15eXkhPT4enp2e13vbhhFt4+MN9CKmnwe5pD2L0r6NxOPkwFvdYjN6hve98ASIiIqI6oKrxmt0XyyBTtzPBOgDMBBMRERHZAoNgB6N2Lq4JLjExjjXBRERERNbDINjByH2CjSfGgZlgIiIiImtiEOxgDOUQhVqBIu3tpZPZJ5iIiIjIehgEOxhDn2BAv2CGq1NxJriQmWAiIiIia2EQ7GBUTrd/JXmFWk6MIyIiIrIBBsEORqGQ5EA4r1Ar1wSzHIKIiIjIehgEOyB56WRmgomIiIhsgkGwA9I43+4VzBZpRERERNbHINgByb2CC7W3J8YxE0xERERkNQyCHZChHCK3UAuNM1ukEREREVkbg2AHpDYqh2AmmIiIiMj6GAQ7IONyCNYEExEREVkfg2AHpDEqh2AmmIiIiMj6GAQ7IEM5RL5RTXBuUS6EEPYcFhEREVGtwSDYAanNZIIFBPK0efYcFhEREVGtwSDYARlPjFM7qeXtrAsmIiIisg4GwQ7IeGKcQlJw1TgiIiIiK2MQ7ICMyyEA3O4QwV7BRERERFbBINgBGS+bDICZYCIiIiIrYxDsgAzlEPnFmWBXZ/3kONYEExEREVkHg2AHpCmjHIKZYCIiIiLrYBDsgFRyOQRrgomIiIhsgUGwA1KXqAnmqnFERERE1sUg2BF8OQx4vy1w/RiAcsohChkEExEREVkDg2BHkH4FuBUPZN8AYNonGDCaGMdyCCIiIiKrYBDsCDTe+n/z0gEYl0NwYhwRERGRLTAIdgRqL/2/eWkASvcJNtQEs0UaERERkXUwCHYEam/9v3ImuLgcooiZYCIiIiJbYBDsCAyZ4Nw0AIDKqXhiXAFrgomIiIhsgUGwIyhRE6xx0QfB+UU6CCGYCSYiIiKyMgbBjqBETbBhYhygD4RZE0xERERkXQyCHUHJmmCn27+W3AItM8FEREREVsYg2BGUqAl2UirgrJQA6CfHGWqCGQQTERERWQeDYEdQoiYYANROt9ukGTLBnBhHREREZB0Mgh1BiZpgAFC73O4QYagJ5rLJRERERNbBINgRGNcEC6HfZNQr2LgmWBTvJyIiIqLKYxDsCAyZYF0RUJCt32Qohyi4XRNcJIpQqCu0yxCJiIiIahMGwY7AxQ1QOOm/L9Er2DgTDHByHBEREZE1MAh2BJJUulew0cQ4J4UTXBQuANgrmIiIiMgaGAQ7ihK9glXFNcGGpZM1zuwVTERERGQtDIIdRYlewRrn2+UQAG6vGsc2aURERERVxiDYUZToFWxYOjmvUKffzVXjiIiIiKyGQbCjKFETLGeCC00zwQyCiYiIiKqOQbCjKFETLPcJLjStCebEOCIiIqKqYxDsKErUBKtLZIJZDkFERERkPQyCHUUZNcG5hZwYR0RERGRtDIIdRck+wZwYR0RERGQzDIIdRRk1wXImuHjpZNYEExEREVUdg2BHUUaf4HzWBBMRERFZHYNgR3GHPsGsCSYiIiKyHgbBjkIuh0jT/1iiHELOBBcyE0xERERUVQyCHYUhCC7IArRFpVqkyTXBzAQTERERVRmDYEeh9rz9fV46+wQTERER2RCDYEehdAZc3PXf56UZLZtsWhOcXZhtl+ERERER1SYMgh2JUa/gkplgDxcPAEBmQaZdhkZERERUmzAIdiRGvYINE+MMQbCXSh8gp+Wn2WFgRERERLULg2BHYtQrWGO0bLIQQg6CMwsyodVp7TVCIiIiolqBQbAjMeoVrCoOgnUCKNTeDoIFBEsiiIiIiKrIIYLgFStWICwsDGq1Gp06dcLBgwfLPPb7779Hhw4d4O3tDTc3N0RFRWH9+vUmxwghMGPGDAQFBUGj0aBXr144f/68re9G1ZnUBN/+1eQVaeGscIa7s37iHEsiiIiIiKrG7kHwxo0bMWXKFLz11ls4fPgw2rZti+joaCQnJ5s9vl69enj99dexf/9+HD9+HGPHjsXYsWOxbds2+ZgFCxbggw8+wMqVK3HgwAG4ubkhOjoaeXl51XW3KseoJthFqYBCKv6xgHXBRERERNZk9yB48eLFGDduHMaOHYuWLVti5cqVcHV1xZo1a8we36NHDwwdOhQtWrRAeHg4Jk2ahMjISOzZsweAPgu8dOlSvPHGGxg8eDAiIyPx+eef49q1a/jxxx+r8Z5VglFNsCRJpZZO9lZ5AwDS89PtMToiIiKiWsOuQXBBQQEOHTqEXr16ydsUCgV69eqF/fv33/F8IQRiYmJw9uxZ3H///QCAuLg4JCYmmlzTy8sLnTp1KvOa+fn5yMjIMPmyC6OaYAByEGxYOtkQBDMTTERERFQ1dg2Cb9y4Aa1Wi4CAAJPtAQEBSExMLPO89PR0uLu7w8XFBf3798eyZcvQu3dvAJDPs+Sac+fOhZeXl/wVEhJSlbtVeUY1wQCMFszQB8GeKv2qcgyCiYiIiKrG7uUQleHh4YGjR4/i77//xjvvvIMpU6Zg165dlb7e9OnTkZ6eLn9dvnzZeoO1hFFNMACoSvQKZjkEERERkXU42fPG/fz8oFQqkZSUZLI9KSkJgYGBZZ6nUCgQEREBAIiKisLp06cxd+5c9OjRQz4vKSkJQUFBJteMiooyez2VSgWVSlXFe2MFRjXBAEx6BQMMgomIiIisxa6ZYBcXF7Rv3x4xMTHyNp1Oh5iYGHTu3LnC19HpdMjPzwcANGrUCIGBgSbXzMjIwIEDByy6pl2UURNsmBjH7hBERERE1mHXTDAATJkyBaNHj0aHDh3QsWNHLF26FNnZ2Rg7diwAYNSoUWjQoAHmzp0LQF+/26FDB4SHhyM/Px9btmzB+vXr8dFHHwEAJEnC5MmT8fbbb6NJkyZo1KgR3nzzTQQHB2PIkCH2upsVY1wTLITcKzi/iJlgIiIiImuyexA8fPhwpKSkYMaMGUhMTERUVBS2bt0qT2xLSEiAQnE7YZ2dnY0JEybgypUr0Gg0aN68Ob744gsMHz5cPmbatGnIzs7G+PHjkZaWhvvuuw9bt26FWq2u9vtnEUNNsK4IKMy5XQ5RwO4QRERERNYkCSGEvQfhaDIyMuDl5YX09HR4enpW3w0LAcz2BYQWeOkUXvglCb8cv46ZA1tiTNdGOHnjJEb8MgIBrgHY8diO6hsXERERkYOparxWI7tD1FqSZFIXrHYqrgku0tcEG1qksRyCiIiIqGoYBDsao7pgjYv+11OyHCJPm4e8IgdfApqIiIjIgTEIdjRGvYJvZ4L1QbC7szucJH0ZN7PBRERERJVnlSA4LS3NGpchwKRXsNwirTgTLEkSV40jIiIisgKLg+D58+dj48aN8s/Dhg2Dr68vGjRogGPHjll1cHWSUU2wxsW0TzDANmlERERE1mBxELxy5UqEhIQAAH777Tf89ttv+PXXX9GvXz+8/PLLVh9gnWNUE6xyKl42ubgcAmCbNCIiIiJrsLhPcGJiohwE//zzzxg2bBj69OmDsLAwdOrUyeoDrHOMaoI1PqZ9ggGwHIKIiIjICizOBPv4+ODy5csAgK1bt6JXr14AACEEtFpteadSRRjXBJdokQawHIKIiIjIGizOBD/88MMYOXIkmjRpgtTUVPTr1w8AcOTIEURERFh9gHWOcZ9gw8S4wtLlEAyCiYiIiCrP4iB4yZIlCAsLw+XLl7FgwQK4u7sDAK5fv44JEyZYfYB1jpk+wcZBsJdKv5/lEERERESVZ3EQ7OzsjKlTp5ba/tJLL1llQHWeuT7BzAQTERERWZXFNcHr1q3DL7/8Iv88bdo0eHt7o0uXLrh06ZJVB1cnGYLg3DSonMtukcZMMBEREVHlWRwEv/vuu9BoNACA/fv3Y8WKFViwYAH8/PyYDbYG4z7BxUFwLsshiIiIiKzK4nKIy5cvyxPgfvzxRzzyyCMYP348unbtih49elh7fHWPoSa4IBNqpT4DbK4mmOUQRERERJVncSbY3d0dqampAIDt27ejd+/eAAC1Wo3c3Fzrjq4uMgTBADS6bABl1AQXpEMndCAiIiIiy1mcCe7duzeeeeYZtGvXDufOncNDDz0EADh58iTCwsKsPb66R+kMOLsBhdlw1WYCAAq1AlqdgFIhyUGwTuiQVZgFTxdPOw6WiIiIqGayOBO8YsUKdO7cGSkpKfjuu+/g6+sLADh06BAef/xxqw+wTiquC1Zps+RNhmywi9IFGid9TXZ6HksiiIiIiCrD4kywt7c3li9fXmr7rFmzrDIggr4kIuMqXAoy5E25hVq4qfS/Lm+VN3KLcpGWn4YQhNhrlEREREQ1lsVBMACkpaXh008/xenTpwEArVq1wn/+8x94eXnd4UyqkOI2aYqCdKic1Mgv0pWqC76efZ0dIoiIiIgqyeJyiH/++Qfh4eFYsmQJbt68iZs3b2Lx4sUIDw/H4cOHbTHGuscwOS43zWjp5NuT4NgmjYiIiKhqLM4Ev/TSSxg0aBBWr14NJyf96UVFRXjmmWcwefJk/Pnnn1YfZJ1j0iv4LqTnFrJNGhEREZEVWRwE//PPPyYBMAA4OTlh2rRp6NChg1UHV2cZMsF5aVA765P15tqkMRNMREREVDkWl0N4enoiISGh1PbLly/Dw8PDKoOq8wxLJ+ell1sOwUwwERERUeVYHAQPHz4cTz/9NDZu3IjLly/j8uXL2LBhA5555hm2SLMWMzXBueYWzGAQTERERFQpFpdDLFq0CJIkYdSoUSgqKgIAODs74/nnn8e8efOsPsA6yagmmOUQRERERNZncRDs4uKC999/H3PnzkVsbCwAIDw8HK6urlYfXJ1lUhNcOhPM7hBEREREVVOpPsEA4OrqijZt2lhzLGRgVBOs8dEHwfkshyAiIiKymgoFwQ8//HCFL/j9999XejBUzLgm2J99gomIiIisrUJBMFeCq2bGNcFO+ppgcxPjcopyUKgthLPSuZoHSERERFSzVSgIXrt2ra3HQcYMmWBdITyUBQBMJ8Z5uHhAISmgEzqkF6TDT+Nnj1ESERER1VgWt0ijauDiDkj6MggvRQ4A03IIhaSAp4snACAtL63ah0dERERU0zEIdkSSJGeDvZANwLQcAmCbNCIiIqKqYBDsqIrrgj2Kg+D8EkEwV40jIiIiqjwGwY6qOBPsIbIAAHlF5oNgZoKJiIiILMcg2FEV9wp2E8XlEAUshyAiIiKylkotlhETE4OYmBgkJydDp9OZ7FuzZo1VBlbnFWeCXXXFmeBC08eZ5RBERERElWdxEDxr1izMnj0bHTp0QFBQECRJssW4qLgmWKPNBFD2xLj0AgbBRERERJayOAheuXIlPvvsMzz11FO2GA8ZFGeCDUFwXlndIdgijYiIiMhiFtcEFxQUoEuXLrYYCxkrrgl2KdKXQ+QXmS+HYE0wERERkeUsDoKfeeYZfPXVV7YYCxkrzgSrCjMAlD0xjjXBRERERJazuBwiLy8Pq1atwo4dOxAZGQlnZ2eT/YsXL7ba4Oq04ppg5+IgmC3SiIiIiKzH4iD4+PHjiIqKAgD8+++/Jvs4Sc6KijPByoLiILisiXH56RBC8LEnIiIisoDFQfDOnTttMQ4qSe0DAFDmG4JgnUmwa8gEF4kiZBdmw93F3T7jJCIiIqqBqrRYxpUrV3DlyhVrjYWMFWeCFUblDsaT4zROGqiUKgBsk0ZERERkKYuDYJ1Oh9mzZ8PLywuhoaEIDQ2Ft7c35syZU2rhDKqC4ppgqSALSuhLIUqWRLAumIiIiKhyLC6HeP311/Hpp59i3rx56Nq1KwBgz549mDlzJvLy8vDOO+9YfZB1UnEmGADqKXORonVHbqEW3kaHeKu8kZyTjPQ8ZoKJiIiILGFxELxu3Tp88sknGDRokLwtMjISDRo0wIQJExgEW4vSGXB2Awqz4eeUhxSte6mlk+UFM5gJJiIiIrKIxeUQN2/eRPPmzUttb968OW7evGmVQVGx4mywn1MuAJZDEBEREVmLxUFw27ZtsXz58lLbly9fjrZt21plUFSsuC7YtzgIzi0jCOaCGURERESWsbgcYsGCBejfvz927NiBzp07AwD279+Py5cvY8uWLVYfYJ1WnAn2VeQAKLtXMDPBRERERJaxOBPcvXt3nDt3DkOHDkVaWhrS0tLw8MMP4+zZs+jWrZstxlh3qb0BAD5K8+UQ8oIZbJFGREREZBGLM8EAEBwczAlw1aE4E+wNQybYdGIca4KJiIiIKqdCQfDx48fRunVrKBQKHD9+vNxjIyMjrTIwglwT7CllAygnE8wWaUREREQWqVAQHBUVhcTERPj7+yMqKgqSJEEIUeo4SZKg1WrNXIEqpTgT7Al9EFxyYhxrgomIiIgqp0JBcFxcHOrXry9/T9WkuCbYA4ZMsGk5hKfKEwC7QxARERFZqkJBcGhoqPz9pUuX0KVLFzg5mZ5aVFSEffv2mRxLVVScCXYXWQDKLofILMxEka4ITopKlXgTERER1TkWd4d44IEHzC6KkZ6ejgceeMAqg6JixTXBrjrzQbCni6f8PbPBRERERBVncRAshIAkSaW2p6amws3NzSqDomLFmeCygmAnhRM8XDwAsE0aERERkSUq/Pfzhx9+GIB+8tuYMWOgUqnkfVqtFsePH0eXLl2sP8K6rLgmWK01BMG6Uod4q7yRWZDJTDARERGRBSqcCfby8oKXlxeEEPDw8JB/9vLyQmBgIMaPH48vvviiUoNYsWIFwsLCoFar0alTJxw8eLDMY1evXo1u3brBx8cHPj4+6NWrV6njx4wZA0mSTL769u1bqbHZVXE5hLooA4Ao1R0CMOoQkZdWbcMiIiIiqukqnAleu3YtACAsLAxTp061WunDxo0bMWXKFKxcuRKdOnXC0qVLER0djbNnz8Lf37/U8bt27cLjjz+OLl26QK1WY/78+ejTpw9OnjyJBg0ayMf17dtXHjMAk8x1jVFcDqEURVCjoFQ5BMAFM4iIiIgqw+Ka4Lfeesuqtb+LFy/GuHHjMHbsWLRs2RIrV66Eq6sr1qxZY/b4L7/8EhMmTEBUVBSaN2+OTz75BDqdDjExMSbHqVQqBAYGyl8+Pj5WG3O1cXEHJCUAwAvZ5QbBLIcgIiIiqrhK9dT69ttv8c033yAhIQEFBQUm+w4fPlzh6xQUFODQoUOYPn26vE2hUKBXr17Yv39/ha6Rk5ODwsJC1KtXz2T7rl274O/vDx8fHzz44IN4++234evra/Ya+fn5yM/Pl3/OyMio8H2wKUnSZ4Nzb8JTyimzJhhgJpiIiIjIEhZngj/44AOMHTsWAQEBOHLkCDp27AhfX19cvHgR/fr1s+haN27cgFarRUBAgMn2gIAAJCYmVugar7zyCoKDg9GrVy95W9++ffH5558jJiYG8+fPxx9//IF+/fqVuZrd3LlzTWqcQ0JCLLofNlVcF+yFLJZDEBEREVmJxZngDz/8EKtWrcLjjz+Ozz77DNOmTUPjxo0xY8YMs/2DbWnevHnYsGEDdu3aBbVaLW8fMWKE/H2bNm0QGRmJ8PBw7Nq1Cz179ix1nenTp2PKlCnyzxkZGY4TCBuWTpZykFTOxLiMAgfJXhMRERHVABZnghMSEuRWaBqNBpmZmQCAp556Cl9//bVF1/Lz84NSqURSUpLJ9qSkJAQGBpZ77qJFizBv3jxs374dkZGR5R7buHFj+Pn54cKFC2b3q1QqeHp6mnw5jOI2aWXVBLMcgoiIiMhyFgfBgYGBcsb3rrvuwl9//QUAiIuLgxDComu5uLigffv2JpPaDJPcOnfuXOZ5CxYswJw5c7B161Z06NDhjrdz5coVpKamIigoyKLxOQSjTLC5mmCWQxARERFZzuIg+MEHH8TmzZsBAGPHjsVLL72E3r17Y/jw4Rg6dKjFA5gyZQpWr16NdevW4fTp03j++eeRnZ2NsWPHAgBGjRplMnFu/vz5ePPNN7FmzRqEhYUhMTERiYmJyMrSLyiRlZWFl19+GX/99Rfi4+MRExODwYMHIyIiAtHR0RaPz+7kmuA7dIfIY3cIIiIiooqyuCZ41apV0On0GckXXngBvr6+2LdvHwYNGoRnn33W4gEMHz4cKSkpmDFjBhITExEVFYWtW7fKk+USEhKgUNyO1T/66CMUFBTg0UcfNbnOW2+9hZkzZ0KpVOL48eNYt24d0tLSEBwcjD59+mDOnDk1ulewp3TncoiylrQmIiIiIlOSsLSGoQ7IyMiAl5cX0tPT7V8fvHsxEDMLm4rux6u653HhnX4mgW52YTbu/epeAMCBkQfg6uxqr5ESERERVZuqxmsVygQfP368whe80yQ1spBRTbBWJ1CoFXBxuh0Euzq5wknhhCJdEdLz0xkEExEREVVAhYLgqKgoSJJUoT+3l9WLlyrJUBMsZQMA8oq0cHG6XR4iSRK8Vd64kXsD6QXpCEINnPxHREREVM0qNDEuLi4OFy9eRFxcHL777js0atQIH374IY4cOYIjR47gww8/RHh4OL777jtbj7fuMWSCkQMAyCtgmzQiIiKiqqpQJjg0NFT+/rHHHsMHH3yAhx56SN4WGRmJkJAQvPnmmxgyZIjVB1mnqX0AAN6GTDDbpBERERFVmcUt0k6cOIFGjRqV2t6oUSOcOnXKKoMiI8WZYA+pOBNcZKZNmgvbpBERERFZwuIguEWLFpg7dy4KCgrkbQUFBZg7dy5atGhh1cER5JpgD+RAAR1yzZVDFK8qx0wwERERUcVY3Cd45cqVGDhwIBo2bCh3gjh+/DgkScJPP/1k9QHWecWZYEAfCJe3YAaDYCIiIqKKsTgI7tixIy5evIgvv/wSZ86cAaBf8GLkyJFwc3Oz+gDrPKUz4OwGFGbDS8pGXlHpmmDDxLj0fJZDEBEREVWExUEwALi5uWH8+PHWHguVRe0FFGbDE9nmyyEMQXABg2AiIiKiiqhQELx582b069cPzs7O2Lx5c7nHDho0yCoDIyMabyDzGrykbOSbmxjHcggiIiIii1QoCB4yZAgSExPh7+9fbgs0SZK4WIYtGPUKNlcTzHIIIiIiIstUKAjW6XRmv6dqUtz9wUsyXw5haJHGTDARERFRxVjcIo3sQM4ElzExrjhIzsjPgFbHTDwRERHRnVQoE/zBBx9U+IIvvvhipQdDZSjuFXynTLCAQGZBphwUExEREZF5FQqClyxZUqGLSZLEINgWjGqCr5iZGOesdIabsxuyC7ORlp/GIJiIiIjoDioUBMfFxdl6HFQeo5rg2ELzNdneKm9kF2azTRoRERFRBbAmuCYwygSbK4cAbrdJY4cIIiIiojur1GIZV65cwebNm5GQkICCggKTfYsXL7bKwMiIUU1wnplyCOB2mzR2iCAiIiK6M4uD4JiYGAwaNAiNGzfGmTNn0Lp1a8THx0MIgbvvvtsWYyTj7hBm+gQDRm3S8tKqa1RERERENZbF5RDTp0/H1KlTceLECajVanz33Xe4fPkyunfvjscee8wWY6TimmBPKRu5ZdQEc9U4IiIiooqzOAg+ffo0Ro0aBQBwcnJCbm4u3N3dMXv2bMyfP9/qAySYrhhXUGT2EENHCNYEExEREd2ZxUGwm5ubXAccFBSE2NhYed+NGzesNzK6rbgmWCUVQVeYY/YQ1gQTERERVZzFNcH33nsv9uzZgxYtWuChhx7Cf//7X5w4cQLff/897r33XluMkVzcISQlJKFFYVaa2UPk7hBskUZERER0RxYHwYsXL0ZWVhYAYNasWcjKysLGjRvRpEkTdoawFUmCTuUFZd5NFGSlQggBSZJMDjFkglkOQURERHRnFgfBjRs3lr93c3PDypUrrTogMk+h8QLybsJVl41bOYWo5+Zisp/lEEREREQVZ3FN8DPPPINdu3bZYChUHsmoV3BSRl6p/YYWacwEExEREd2ZxUFwSkoK+vbti5CQELz88ss4duyYLcZFJRl1iEjOzC+126t4f25RLvK1pfcTERER0W0WB8H/+9//cP36dbz55pv4+++/cffdd6NVq1Z49913ER8fb4MhEgC5V3BZmWAPZw8oJSUALphBREREdCcWB8EA4OPjg/Hjx2PXrl24dOkSxowZg/Xr1yMiIsLa4yMDo1Xjks0EwZIkccEMIiIiogqqVBBsUFhYiH/++QcHDhxAfHw8AgICrDUuKsmkJth8uYMhCM4oyKiuURERERHVSJUKgnfu3Ilx48YhICAAY8aMgaenJ37++WdcuXLF2uMjA6OaYHPlEAA7RBARERFVlMUt0ho0aICbN2+ib9++WLVqFQYOHAiVSmWLsZEx45pgMxPjgNsdIhgEExEREZXP4iB45syZeOyxx+Dt7W2D4VCZjLtDlJEJlleNY5s0IiKiWkkndDiechxNfZrC1dnV3sOp0Swuhxg3bhwDYHtw8wMABEg3kZyZD51OlDpELodgdwgiIqJaaWfCTjz161NYcmiJvYdS41VpYhxVo4A2AIDGikS46bKQml1Q6hDv4pIJlkMQERHVTpcyLwEALmddtvNIaj4GwTWFmy/gHQoAaK2IM79qHMshiIiIarWMfH0HqNzCXDuPpOZjEFyTNLgbANBWuojkzNJBsKEcIr2AQTAREVFtlFmQCUC/QixVDYPgmqRBewBAW0Usks30CmaLNCIiotrNEATnFOXYeSQ1H4PgmiRYnwmOVMSaXTDD08UTAMshiIiIaquMQn05RE4hg+CqYhBckwS1hQ4KBEs3kXOz9MIkcjlEfjqEKN09goiIiGo2ZoKth0FwTaJyR4Z7YwCA+43jpXYbukNohRaZhZnVOTIiIiKqBnIQXJjDhFcVMQiuYXLqtwUA+GeeLLVPpVRB46QBAKTnsSSCiIiotjEEwQICeVrzi2dRxTAIrmFEcV1waN4Zs/sNbdI4OY6IiKj2MbRIA1gXXFUMgmsYTaN7AADNdRdQVKQttZ9t0oiIiGqnfG0+CnS3F8tiXXDVMAiuYbxCo5AvnOEtZSPt6rlS+w1B8I3cG9U8MiIiIrIlQymEATPBVcMguIZROqtwXhEGAMiJP1hqf6inflW5i+kXq3NYREREZGMZBRkmP3PBjKphEFwDxauaAQDElcOl9jXxbgIAuHDrQrWOiYiIiGyLmWDrYhBcAyV5tAYAaFKOldrXxKc4CE5jEExERFSblAqCWRNcJQyCa6BM3zYAAJ+MU4C2yGRfuHc4AOB69nVkFWRV+9iIiIjINhgEWxeD4BpI6dcEGUIDZ10+kHLaZJ+Xygv+rv4AmA0mIiKqTYzbowEsh6gqBsE1kL+XBid0+pXjcLXsuuDzaeerc1hERERkQyVXg2UmuGoYBNdA/p5qHBP6sgdcPVRqf4R3BABOjiMiIqpNSnaHYCa4ahgE10ABHmocM2SCr5XOBEf4FAfBLIcgIiKqNUrWBLNFWtUwCK6BAjxVOKbTZ4JF0imgwPR/gnKbNAbBREREtYYhCK6nrgeA5RBVxSC4BvJxdUGq0hfJwhuS0AKJJ0z2N/ZuDAkSbubdRGpuqp1GSURERNZkCIIDXAMAsByiqhgE10AKhQR/D83tkogSdcEaJw1CPEIAMBtMRERUW5QKgpkJrhIGwTWUv1FJhNm6YG/WBRMREdUmholxAW76IDi3kDXBVcEguIYK8FDjuDCfCQZuT447f4tt0oiIiGoDQyY40C0QADPBVcUguIYynhyHmxeB3Fsm+9krmIiIqPYQQsiZYMOiWKwJrhqHCIJXrFiBsLAwqNVqdOrUCQcPHizz2NWrV6Nbt27w8fGBj48PevXqVep4IQRmzJiBoKAgaDQa9OrVC+fP165g0N9TjXS444ZLA/2Ga0dM9hv3ChZCVPfwiIiIyIrytHko0hUBYE2wtdg9CN64cSOmTJmCt956C4cPH0bbtm0RHR2N5ORks8fv2rULjz/+OHbu3In9+/cjJCQEffr0wdWrV+VjFixYgA8++AArV67EgQMH4ObmhujoaOTl5VXX3bK5AE81AOC8U1P9hhIlEaFeoXBSOCGnKAfXs69X9/CIiIjIigylEEpJCV+1LwAGwVVl9yB48eLFGDduHMaOHYuWLVti5cqVcHV1xZo1a8we/+WXX2LChAmIiopC8+bN8cknn0Cn0yEmJgaAPgu8dOlSvPHGGxg8eDAiIyPx+eef49q1a/jxxx+r8Z7ZVoCnCgBwXO4QYZoJdlY4o5FXIwCcHEdERFTTGYJgDxcPuDm7AWA5RFXZNQguKCjAoUOH0KtXL3mbQqFAr169sH///gpdIycnB4WFhahXT984Oi4uDomJiSbX9PLyQqdOncq8Zn5+PjIyMky+HJ2/hz4TvD8/TL+hnOWTOTmOiIioZjPUA3u4eMDV2RUAUKgrRKG20J7DqtHsGgTfuHEDWq0WAQEBJtsDAgKQmJhYoWu88sorCA4OloNew3mWXHPu3Lnw8vKSv0JCQiy9K9XOkAn+K7chhKQEshKBjGsmx3ByHBERUe1gnAl2dXKVt7MkovLsXg5RFfPmzcOGDRvwww8/QK1WV/o606dPR3p6uvx1+fJlK47SNrw0znBxUiAPKhT6NtNvLJENNp4cR0RERDWXIRPs6eIJZ6UznBROAIDcIvYKriy7BsF+fn5QKpVISkoy2Z6UlITAwMByz120aBHmzZuH7du3IzIyUt5uOM+Sa6pUKnh6epp8OTpJkuRscHq9NvqNV00XzTD0Cr6YflGeUUpEREQ1j3EmGICcDWZdcOXZNQh2cXFB+/bt5UltAORJbp07dy7zvAULFmDOnDnYunUrOnToYLKvUaNGCAwMNLlmRkYGDhw4UO41a6KA4rrgRLeW+g0lVo5r4N4AGicNCnWFSMhMqO7hERERkZUYgmBPF32izlAXzHKIyrN7OcSUKVOwevVqrFu3DqdPn8bzzz+P7OxsjB07FgAwatQoTJ8+XT5+/vz5ePPNN7FmzRqEhYUhMTERiYmJyMrKAqDPkE6ePBlvv/02Nm/ejBMnTmDUqFEIDg7GkCFD7HEXbcbQJu2iylAOcQTQ6eT9CklR40oi1vy7Bgv/XsjexkREREaYCbY+J3sPYPjw4UhJScGMGTOQmJiIqKgobN26VZ7YlpCQAIXidqz+0UcfoaCgAI8++qjJdd566y3MnDkTADBt2jRkZ2dj/PjxSEtLw3333YetW7dWqW7YEfkXl0Oc1YYATmogP12/epxfhHxMhHcETtw4gQtpF9AHfew11Ao5eeMklhxaAgAY0HgAWvi2sPOIiIiIHEOZQTAzwZVm9yAYACZOnIiJEyea3bdr1y6Tn+Pj4+94PUmSMHv2bMyePdsKo3NchkxwYlYRENQWuHxAPzmuRBAM1IxewSuPrZS/P3PzDINgIiKiYsYt0oDb5RCcGFd5di+HoMozTIxLzswHgu/Wb7xmfnKco/cKPpV6Cruu7DL5mYiIiPRKBcEsh6gyBsE1mGFiXFJGHtCgvX5jiTZphl7BCZkJyCty3GWjDVngemr9oienb56253CIiIgcSsmJcRpnDQCWQ1QFg+AazN/TOAguzgQnngCMVo/x0/jBS+UFndAhLj3OHsO8ozM3z2Dn5Z2QIGFG5xkAgLM3z7KtGxERUbFS3SGYCa4yBsE1mGFiXEZeEXLdQwG1F1CUByTfLiWQJEnOBjtqXbAhC9y3UV/0aNgDGicN8rR5iE+Pt+/AiIiIHESpiXFskVZlDIJrMA+VEzTOSgBAcpZRXXAZK8c54vLJZ2+eRUxCDCRIeC7yOSgVSrSop58Qx5IIIiIiQAjBFmk2wCC4BjNeNS4pI/92SUSJleOa+BRngh2wV/DHxz8GAESHRaOxd2MAkLtCcHIcERGRvgOEVmgBMBNsTQyCazjTumDD5LgSHSIctE3auVvn8Nul3yBBwrORz8rbmQkmIiK6zdAZwknhBLVS/7lvyASzRVrlMQiu4QKMg2BDOUTKaaAgWz4m3DscAHA9+7r85xRH8PExfRa4d2hvuZUbcDsTfObmGeiEzuy5REREdYUhCPZ08YQkSQAAjVNxdwiWQ1Qag+AaLsDDqFewZxDgEQwIHXD9mHyMl8oL/q7+AIDYtFi7jLOkC7cu4LdLvwEAnm37rMm+xl6NoVKqkF2YjcuZl+0xPCIiIodRsjMEwHIIa2AQXMOZZIIBIOQe/b+xO02OM3SIcJTJcR8f/xgCAr1De6OpT1OTfU4KJ3nb6VSWRBARUd1WclIcwIlx1sAguIbzlyfGFQfBzR7S/3vmF5PjHGlyXGxaLLbFbwMAk1pgY4a64FM3OTmOiIjqNrNBMDPBVcYguIYzZIKTM/P1G5r0ASQlkHwSuHlRPs6RJscZssA97+qJZvWamT3GUBfMTDAREdV1JZdMBpgJtgYGwTWcHARnFAfBrvWAsK76789skY8zTDyzdxB8Mf0itsZtBQA81/a5Mo+Tg+CbpyGEqJaxEREROaJyyyGYCa40BsE1nH/xxLis/CJk5RcvM9x8gP5fo5KIxl6NIUHCzbybuJF7o7qHKVt1fBUEBB4IeQDN6zUv87gm3k3gpHBCen46rmdfr8YREhERORZzmWCNs747RG5RLjspVRKD4BrOTeUED5UTACC5ZF1wwn4gKwWAvpVKiEcIAPtlg+PT4/Fr3K8Ays8CA4CL0kWezMeSCCIiqsvMdocozgQDQF5RXrWPqTZgEFwL1DdeNQ4AvEOAoCgAAjj3q3ycXBdsp8lxq46vgk7o0L1hd7T0bXnH4w0lESdTT9p6aERERA7LXBCsdlJDgr5nMEsiKodBcC0Q4GGYHGf0P0EzJRH2rAu+lHEJv8Tpx/J82+crdA5XjiMiIjJfE6yQFFwwo4oYBNcCASXbpAFA8/76f2N3AvlZAG63SbNHr+Dvz38PndDhvgb3oZVfqwqdY8gEn0o9xclxRERUZ5kLgoHbbdK4dHLlMAiuBW4vmJF/e6N/C8CnEaDNB2JjANxeMOPCrQvVHlQeST4CAIgOi67wOU19mkIhKXAz7yZSclNsNTQiIiKHZm5iHMAOEVXFILgW8C+5ahwASBLQorgk4vTPAIC7PO+Ck8IJOUU51dpxoUBbgJM39HW97fzbVfg8jZMGjb0aA+DkOCIiqrvulAlmOUTlMAiuBQzlEMnGmWDgdl3wuW2AthDOCmc08moEADh/q/pKIk6lnkKBrgD11PVwl8ddFp3LleOIiKgu0wmd2YlxADPBVcUguBaQyyEyS7RIaXgP4FYfyE8H4vcAuN0hojrrgo8mHwUAtK3fFpIkWXQuV44jIqK6LLswGwL6EsaSmWBDr2BmgiuHQXAtIHeHyMg3rfVVKIFm/fTfF3eJkOuCq7FDhKEe2JJSCANDKzV2iCAiorrIkAVWKVVQKVUm+5gJrhoGwbWAf3E5RG6hFpmGVeMMjFulCSF3iKiuXsFCCBxNOQqgckGwYVW5xOxE3My7ac2hERERObyy6oEBoyCYmeBKYRBcC6idlfDSOAMwWjXOoFF3wMUdyLwGXDssl0NcTL+IIl1RyUtZ3eXMy7iZdxPOCme5tMESbs5uCPMMAwCcST1j5dERERE5trI6QwBGE+OYCa4UBsG1REDJVeMMnNVARC/992d+QbB7MDROGhTqCpGQmWDzcRlKIVr5tir1Z5yK4uQ4IiKqq8rLBHOxjKphEFxL+HuYaZNmYFQSoZAU1bp8clVKIQw4OY6IiOqqcjPBTlwsoyoYBNcS/mVlggGgSW9A4QSknAFuXKjWDhFyZwj/tpW+hvHKcURERHWJ3B7N2bPUPvYJrhoGwbVEgLkFMww03kBYN/33Z3+ptkxwen663IUiqn5Upa9jKIe4knUF6fnp1hgaERFRjSAHwSozQTC7Q1QJg+BaIsCjeMGMkr2CDYxWj4vw0QfB526ds+mYjqccBwCEeobCV+Nb6et4qbzQwL0BAODszbNWGRsREVFNUG53CE6MqxIGwbXE7UywmXIIAGj2kP7fK3+jlcoPCkmBhMwEXMu6ZrMxGSbFta1f+VIIA0M2mP2CiYioLqlITTDLISqHQXAt4V9eOQQAeAYDDdoDEPCK2yuXJ/x55U+bjclkUlxuGnDkSyD3VqWuxbpgIiKqi5gJth0GwbWEoUVaqVXjjDXvr//3zC+4v+H9AIA/rvxhk/EU6grx741/AQDtPBoBn/UH/jcBWP8wUFhGoF4OrhxHRER1ERfLsB0GwbVE/eKa4AKtDum5heYPaj5Q/2/cH+ju3x4AcPD6QZu8eM7dPIfcolx4OLuj0Q8TgSR9QIxrh4GfJgFlBeplMKwcF58ezxc7ERHVGYZyCHPdITTOxX2CmQmuFAbBtYTKSYl6bi4AyqkLrt8U8G0CaAsQnnwBDdwboEBXgIOJB60+HkM9cFRuLhTJpwD3AGDAEkBSAsc3AH99aNH1/DR+8Hf1h4DA2VucHEdERHVDRTLB7BNcOQyCaxF/D0Ov4HLKDYpLIqSzW2xaEnH0+gEAQFRGqj4AHv0z0OE/QPS7+gO2vwHE/m7RNVvW05dEsC6YiIjqinJbpBXXBBfpilCoLeOvwFQmBsG1SLm9gg0Mq8ed2477gzoDAP68/GfZdcSVIDKTcSRBH1i3U7gBY37RZ6EBoNOzQNQTgNABm8YCNy9W+LpcOY6IiOoSrU6LrMIsAOUvmwywJKIyGATXIv5yr+AyyiEAfYcI90CgIBP35OVB46RBcm4yztw8Y51BZKXg+voBSFYIKIVA6xHfAX5Nbu+XJKD/YqBBByAvDfh6JJCfWaFLs00aERHVJYYAGAA8nEsHwc4KZ7go9KWQnC9jOQbBtUiFMsEKBdBc3zNYdW477g26F4CVSiKyUoB1A3E0+zIAoLl3E2gCWpU+zlkNDP9CH4ynnAZ+eA7Q6e54eUMmODYtFvnacgJ9IiKiWsBQCqFx0sBZ6Wz2GLZJqzwGwbWIoU1auUEwcLtV2skf5C4Ru6/srtqNZyUD6wYAKadxxKMeAKBd8L1lH+8ZBIz4ElC6AGd+Bv6Yf8ebCHANQD11PWiFFudvna/aeImIiBycPCnOTBbYgG3SKo9BcC3if6dV4wwa9QD8WwG5t9DtzC4AwIkbJ3Aj90blbjgrGVg3EEg5A3gE42hAOAAgyj+q/PMadgAGvq///o95wOmfyj1ckiS5JIKT44iIqLYrb7U4A2aCK49BcC1iKIdIvlMmWOkEDFoGQIL/v9+jpVtDCAjsubrH8hvNzwTWDdIHwJ4NkP3kJpzLvAQA8qp05YoaCXR6Xv/9988CSSfLPdywaAaDYCIiqu3Ka49mwExw5TEIrkXkVeMy86HT3aHbQ8P2QKfnAAD337gCoBJLKAsBbP4/fV2vRxAw+icc12ZCJ3QIdgtGgFtAxa7T522g0f1AYTawYSSQc7PMQ+UOEZwcVysk5yRj39V99h4GEZFDKq89mgEXzKg8BsG1iJ+7CpIEFOkEbuYU3PmEB98AvELQ/WYiAGDv1b2W9Rk8uBo4+QOgcAKGfQ74huNo8lEAFSiFMKZ0Ah5bB3iHArfigW/HljlRzlAOcf7WefZErAXe3Psmnt3xLPZdYyBMRFRSRcohDG3SGARbjkFwLeKsVMDXrTgbfKe6YABQuQMDlqBlQQF8tVrkFOXgn6R/KnZjVw4B217Tf997DhDSEQBwNOUoAAuDYABwrQc8/jXg7AZc3AWc3mz2sAbuDeDh4oFCXSFi02Mtuw1yKEW6IhxOOgxAv3w3ERGZ4sQ422IQXMvIHSIy71AXbNCkNxRtHsP9OfolF/+8vPPO5+TcBDaNAXSFQItBwL36ml6tTotjKccAAO3821k8dgS0Arr8n/77P+abzQZLkiSvHMdFM2q22LRY5Gn1z9MTN07YeTRERI6nQjXBnBhXaQyCa5mGPvo/i5xNrNgCFACA6LnoXqQEAPxx4efyV4/T6fR9fdMTgHqNgcHL9QtgALiQdgHZhdlwdXJFE+8mZV+jPPc+D6i8gORTwOn/mT3EUBf87flvcfbm2crdDtmdceD7741/odVp7TgaIiLHI9cEu5RdE2zIBOcW5lbLmGoTBsG1TKdGvgCAfbGpFT/JvT7u7fYGnIXA5aJMxCeU0zN471Lg/DZAqdLX8aq95F2GeuDI+pFQKpSVGD0AjTfQeYL++13ms8G9QntBKSlxPOU4Hv3pUUyMmSjfNtUc/974V/4+pygHF9MrvoQ2EVFdwBZptsUguJbpEqEPgv+Ou4mCojuvwmbgdvdodJD0L6Q/d76h7/xQUvwe4Pc5+u8fWggERZrsPpJyBEAlSyGMdXpOnw1OOQ2c+rHU7rb122LjgI3oG9YXEiT8ceUPPPXrU3h629PYf21/+ZlschgnU/Xt8JwUTgBYEkFEVBJbpNkWg+BaplmAB3zdXJBbqMWxK2kVP1GS0L3VkwCAP/KuA0e+MN2fmQR8+x9A6IDIEcDdo0pdolKdIczReAOdX9B/X0ZtcLN6zbCw+0JsHrIZQyOGwklywsHEgxj/23g8seUJ/J7wO3Si4v8JoOqVV5Qnr/rXO7Q3AAbBREQlGTLB5bVIk4NgZoItxiC4lpEkCZ3D9dngvRcsWwHu/qZDAQCH1Spk/Pa6PvAFAJ0W+O5pICsJqN8CGLBYrgM2SM5JxtWsq1BICkT6RZa8tOXufU5fapFyBjj1Q5mHhXmFYXbX2djy8BaMbD4SKqUKJ26cwKSdk/DI5kcQcymm6mMhqztz8wy0QgtftS/6hPYBAJxIYRBMRGTMoolxzARbjEFwLdQl3A+AhXXBAEI8Q9DYqxG0koR9ikJg6yv6HTvfBeJ369uXDVsHuLiVOteQBW7i3QTuLu5VGj8AfQDceaL++z8W6APxcgS5B2F6p+nY9sg2PN36abg5u+FC2gVM3jWZwZUDMtQDt/ZrjTZ+bQAA59PO802ciMiIPDHOmZlgW2AQXAt1Kc4EH0m4hdwCy2bcd2/YAwDwp6urfiGMrdOB3Yv0Owd9ANRvZva8I8n6euAql0IY6/Ts7WzwybKzwcZ8Nb6Y3H4ytj+6HV2DuwIAtsZvtd6YyCr+TdUHwa38WiHALQD+Gn/ohI7LYRMRFSvSFcmBbbmLZRSvGJdbxO4QlmIQXAuF+rqigbcGhVqBfy6VvQSxOfc3vB8AsNvTG1oA+OtD/Y4OTwNtHi3zPEN/YKsGwWovoLOhb/Cds8HGPF088VjTxwAAMQkxnCznYE7e0E+KM2SB29TX/8u6YCIivayCLPn78v7CyolxlccguBYyrQu2rCQiyj8KHi4eSNMV4IRfqH5jUBTQd26Z5+QW5coLV1S5M0RJnZ4F1N7AjbMVzgYbdGnQBWqlGlezruL0TS6s4SgyCjIQnxEPAGjl2wrA7WCYQTARkZ5hUpyrk6vcRccctkirPAbBtZShJGJ/rGWT45wUTrgv+D4AwB+Rg4B7ngFGfAU4qco8598b/6JIFMFf449gt+DKD9octSfQxVAbPN+ibLDGSYP7Gujvy45LO6w7Lqo0Qxa4gXsD+Kh9AOh7SwMMgomIDCoyKQ4wWiyD5RAWYxBcSxkywSeupiM9t9Cic+8P0ZdE/JF2Cuj/HuDVoNzjDaUQbf3bQirRNcIqOhqyweeAf7+36NSeoT0B6EsiyDEY+gO39mstb2vl2woKSYHE7ESk5KTYa2hERA6jIu3RgNuZ4NyiXK68aSEGwbVUkJcGjf3coBPAwTjL6oLvC74PCkmB87fO41rWtTseb5gUZ/VSCIMqZIO7N+wOJ4UTLqZfxMU0rkjmCAydIQwlEID+TTzcOxwAcPzGcbuMi4jIkciZYOeKZYIBIE+bZ9Mx1TYMgmsxw+pxlvYL9lZ7I6p+FADgzyt/lnlcYnYiNpzZgENJhwBAPscmOj4LaHyA1PPAv99V+DQPFw/cG3QvAGaDHYUhCDbUAxsY+kuzpR0RkVF7NJfyM8EqpQoKSR/OcXKcZeweBK9YsQJhYWFQq9Xo1KkTDh48WOaxJ0+exCOPPIKwsDBIkoSlS5eWOmbmzJmQJMnkq3nz5ja8B47L0C94v4X9goHbXSKMg2AhBC7cuoBVx1dhxM8j0Pvb3njnwDvILsxGPXU9NPe14eOs9jTqG2xZNrjXXb0AAL9d+s0WIyMLpOSkICknCQpJgZa+LU32GcojWBdMRFTxmmBJktgruJLsGgRv3LgRU6ZMwVtvvYXDhw+jbdu2iI6ORnJystnjc3Jy0LhxY8ybNw+BgYFlXrdVq1a4fv26/LVnzx5b3QWHdm9jfSb4bFImUjLzLTrXEAQfuH4Af13/C4v+XoQBPwzA0M1DsezIMpxMPQkJEtr5t8N/2/8XGwdshLPC2er3wUQnQzb4AnDi2wqf1iOkBxSSAqdvnsbVrKs2HCDdiSEL3NirsVzHZmAoj/j3xr+sayOiOs9QE3ynIBhgm7TKsmsQvHjxYowbNw5jx45Fy5YtsXLlSri6umLNmjVmj7/nnnuwcOFCjBgxAipV2d0KnJycEBgYKH/5+fnZ6i44tHpuLmgRpP8zyl8XLcsGR3hHINgtGAW6AozbPg7rTq1DQmYCXBQuuL/h/ZjZeSZ+H/Y7Pu/3Oca0HoNAt7L/U2I1Kg+gS3Hf4D8XANqiCp3mq/HF3f53AwCXUbYzwyIZxpPiDCK8I6Bx0iCnKAcX01m/TUR1m0VBMNukVYrdguCCggIcOnQIvXr1uj0YhQK9evXC/v37q3Tt8+fPIzg4GI0bN8YTTzyBhISEco/Pz89HRkaGyVdt0bW4S8Q+C1ulSZKEAeEDAOhfgAMaD8DiHouxe8RurOi5Ao80fQR+Gjv856LjeEBTT58N/rfi2eBeofrnGeuC7cvQHq21b+kgWKlQynXChowxEVFdVdFyCEDfEhRgJthSdguCb9y4Aa1Wi4CAAJPtAQEBSExMrPR1O3XqhM8++wxbt27FRx99hLi4OHTr1g2ZmZllnjN37lx4eXnJXyEhIZW+fUdjmBy3rxJ1wc+3fR7/G/I//DH8D8ztNhe9Q3uX+hN2tTPOBu9+D9DpKnRaz7v0rdKOJB/BjVzL/kNA1iGEKDcTDNxeOY4dIoiorqvoxDjAKAhmJtgidp8YZ239+vXDY489hsjISERHR2PLli1IS0vDN998U+Y506dPR3p6uvx1+fLlahyxbd0TVg9KhYRLqTm4csuyF4eTwgmNvRrbvtbXUvc8o19S+cY54MzPFTol0C0QbfzaQEDg94TfbTxAMudK1hWk56fDWeGMpj5NzR7DDhFERHqWBMFyOQQzwRaxWxDs5+cHpVKJpKQkk+1JSUnlTnqzlLe3N5o2bYoLFy6UeYxKpYKnp6fJV23hoXZGZEMvAJXrEuGQ1J7APeP03+9ZDAhRodMM2WCuHmcfhhKH5vWaw1lp/j9Whslx59PO882ciOo0S8oh2B2icuwWBLu4uKB9+/aIibldo6nT6RATE4POnTtb7XaysrIQGxuLoKAgq12zpula3CqtMiURDuve5wEnDXDtCBD3R4VOMdQF/534N9Lz0205OjKjrP7AxgLcAuCv8YdO6HAq9VR1DY2IyOFYFAQ7c+nkyrBrOcSUKVOwevVqrFu3DqdPn8bzzz+P7OxsjB07FgAwatQoTJ8+XT6+oKAAR48exdGjR1FQUICrV6/i6NGjJlneqVOn4o8//kB8fDz27duHoUOHQqlU4vHHH6/2++couhhNjhMVzJo6PDc/4O5R+u93L67QKaGeoWji0wRFogh/XKlY4EzWYwiCy6oHNjDUBbNfMBHVZWyRZnt2DYKHDx+ORYsWYcaMGYiKisLRo0exdetWebJcQkICrl+/Lh9/7do1tGvXDu3atcP169exaNEitGvXDs8884x8zJUrV/D444+jWbNmGDZsGHx9ffHXX3+hfv361X7/HMXdoT5wcVIgKSMfF29k23s41tPl/wCFkz4TfPVQhU4xLJzBkojKySvKw9Q/pmLo/4biVt6tCp9XpCvC6ZunAVQgCPZjEExEdVuBtkBeApkt0mzHyd4DmDhxIiZOnGh2365du0x+DgsLu2Mmc8OGDdYaWq2hdlai/V0+2H8xFftiUxFe393eQ7IO7xCgzTDg2Ff6bPCIL+94Ss+7euKjYx9h37V9yCnMsX+3i3L8GvcrFJICfUL7QJIkew8HOYU5eHHnizhw/QAA4IvTX+D/2v1fhc69mH4RuUW5cHVyRZhnWLnHRtYvnhzHIJiI6ihDKYQEyaJMMMshLFPrukOQeV0NrdIu1LL2YF0n6f898zOQcvaOhzf1aYoQjxDka/Ox++puGw+u8i5nXsa0P6dh6h9T8cbeN+z+xpZdmI0JMRNw4PoBKCUlAODrM18ju7Bif1kw9Adu5dcKSoWy3GNb+baCQlIgMTsRKTkpVRs4EVENZAiC3Z3doZDuHKqxO0TlMAiuIzoXT47bfzEVOl0tqQsGAP/mQHP9oh7Ys/SOh0uSdHvhDAdePW7v1b3y95tjN+PJLU8iIaP8RV9sJaMgA+N/G49DSYfg7uyOtX3XIswzDJkFmfj2XMUWLJHrgc0sklGSq7Mrwr3DAbBfMBHVTZZMigPYHaKyGATXEZENveDmokRaTiFOJ9aeFfEAAPdN0f974hsg7c49ng11wX9c+QP52nxbjqzSDEFw79DeqKeuh3O3zmHEzyOwM2FntY4jPT8d47aPw/GU4/B08cQnfT5BO/92GNNqDADg81Ofo1BbeMfrGBbJaOVXdmcIY+wXTER1maVBsMaZK8ZVBoPgOsJZqUDHRvUA1KJ+wQYN2wON7gd0RcC+ZXc8vLVfa/i7+iOnKEeub3UkBdoCHEjUj2tcm3H4ZsA3aFu/LTILM/HizhfxweEPoNVpbT6Om3k38Z9t/8Gp1FPwUflgTfQaOYgdGD4Q9TX1kZyTjJ8vlr9gSb42H+dunQNw50lxBobjWBdMRLXB+VvnMWbrmAr/xz6jsOKdIQBmgiuLQXAd0qW4JGJvbasLBm5ngw9/DmSXf/8UksKhF844knwEuUW58FX7olm9ZghwC8Da6LV4osUTAIDVJ1bj2R3P4mbeTZuN4UbuDfxn639w7tY5+Kp9sSZ6DZrVaybvd1G64KmWTwEA1p5cC50oe/nqszfPokhXhHrqegh2C67Q7Rs6RPx7499qCfiJiGzpu/Pf4VDSIaw9ubZCx2fkVzIIZibYIgyC65AuxZPjDsbdRKG27KClRmrcAwhuBxTlAgdW3vFwQ0nEzss7UaQrsvHgLGMohejaoKs8IcJZ6YxXO76K+d3mQ+OkwYHrBzDsp2E4nmL9mtnE7ESM3ToWsemx8Hf1x2d9P0OET0Sp4x5r+hg8nD0Qlx6HXZd3lXk940UyKtrlIsI7AhonDXKKchCXHleZu0FE5DCuZV0DABxOOlyhfv0W1wSzRVqlMAiuQ1oEesLb1RnZBVocv1LLVkyTpNvZ4IOrgLzy657vDrgbPiofpOWn4VBSxXoMV5c91/YAALoGdy2176HGD+Grh75CmGcYknKSMHrraGw4s6HcTOz/t3ff4VGV2QPHv3d6eq8kJKH3jnQBQdB1/dnFjmV17XXdtaxtXRdX197bWlbF3hUQ0SC9d0IgQCCF9F6n3d8fNzNJIKQnk3I+z3OfuZm5c+fNDSRn3jnvOS2RUZbB1UuvJrUklWifaN474z3iA+IbPNbX5MuCIQsAeGf3Oyf9xb4nX6sM0dxUCAC9Tu/uLCcpEUKI7u5YudbzIL8qn/TS9CaPdwXB/ib/Zp3fXSLNJiXSWkKC4F5Ep1OY0k+bDV53sAemRAz5I4QMhKpi2NL4R04GnYHZfWcDXSslIqcihwOFB1BQmBLdcPvwAUEDWHzWYub2nYvdaeeJDU8w+ePJXPnTlfxz/T/5cv+X7Mnb0+SiP6fq5FjZMdYfW89nyZ/x9KanWbhkIRllGcT4xvDuGe8S6xfb6DkuH3o5Jp2Jnbk7T/pmormd4o7n6hwnFSKEEN2dayYYYGvO1iaPb3EQXGcmuMd0hu0EHm+WITrX1P4hLNmdxZqUfG49baCnh9O+dDqYfid8ewusewVO+TMYLSc9fE7fOXx14Ct+Pfor90+6v1m1GDuaKxViROgIgixBJz3O1+TLs7Oe5f097/PajteosFewPXc723O3u4/RK3oSAhIYGjyUwcGD8TH6cLTkKEdKjnC09ChppWkNBsrx/vG8Pe9tInwimhxvqFco5w44l8/2f8Y7u99hQuSEeo+XWcvc6Qyumd3mkgoRQoieoMxa5m6BDNq6j3MGnNPoc1pbIs2hOrA6rZj15laOtneRILiXmTpAWxy35WghVTYHFmPjjQu6nZEXw2//gpIMrZPchGtPeujkqMn4Gn3JqcxhV94uRoeN7sSBNmxNZm0+cFMUReHqEVdzxTCthnBSQRLJBckkFSSxr2AfRdVFpBSlkFKUwveHvm/wHAadgRjfGPr696WvX18SAhKYHz+fAHNAs8d89fCr+eLAF6zOWE1yQXK9BXR78/eiohLtE02IV0izzwm1M8cHig50+e5+QghxMpnlmfW+bs5McEurQ3gZvNz7FbYKCYKbSYLgXqZfqA8R/mayS6rZeqTQHRT3GAYTTL0Nlt4Ha16AsVeBvuF/5ia9iVNjTuWnwz+xPHW5x4Ngu9POusx1QMP5wCdj0BnoF9iPfoH9OKvfWQCoqkp2RbY7KE4uSKbaUe0OduP84+jr35conygMurb9Goj1j+X0uNNZlrqMd/e8y5MznnQ/1tL6wHVF+kQS7hVOTmUOe/P3njDLLIQQ3cGxMi0fONonmszyTA4XH6awqrDRT/taOhOs1+kx681UO6qpsFcQxMnPLWp5/vNf0akURXGXSlvb0+oFu4y7CryCoTAV9n7T6KHz4uYB8NG+j+p1afOE3Xm7KbGW4G/yb3H+7PEURSHSJ5KZsTO5cfSNPDf7OV6d+yr3nXIflw29jGl9phHrF9vmANjl2hHajPvSw0vJKMtw39/afGAXV16w6zxCCNHduGaChwQPISEgAYDtOdsbfU5LS6SBlElrDQmCe6Ep/bWPpVf1xHrBACYfmHyTtr/6OWhkkcDsvrM5I/4M7E47d/52J1uzm/6YqqO4UiEmR01ut+C0swwLGcaUqCk4VAfv73nfff+ePK0yhKvub0u5nieL44QQ3ZV7Jtg3mnHh4wAtL7gxLV0YB1ImrTUkCO6FTh0YhkGnsCOtiLU9NRA+5Xow+UL2bth38o5mOkXHv6b/i+l9plPlqOKWFbeQlJ/UiQOt5ZqJnt5nukdev62uHanNBn994GsKqgrIr8wnszwTBYVhIcNadc5RYTWL46RMmhCim3J9OhbtG83Y8LFAxwTBrrxgmQluPgmCe6HIAAuXT+oLwBM/JeF09sByKl5BMOlGbX/FP8Bx8oYYRr2RZ2c9y7jwcZTZyvjz8j9zqPhQJw1UU1hV6P7If2r01E597fYyKXISw0OGU+Wo4uOkj931gRMCEvAx+rTqnMNDhqNTdGSVZ5FbkduewxVCiE7hqhEc7VMbBO/O302VvarB46sd1VidVqCF6RAyE9xiEgT3UrfPGYif2cCezBK+2Z7R9BO6o2m3a7nBefu1ShGN8DJ48fKclxkaPJTC6kJu+PmGenUdO9r6Y+tRURkYNLBZpcm6IkVR3LnBi/ctZlPWJqD1+cCg/VLvH9gfkJQIIUT35PpbEuUbRaxfLCGWEOxOu3ui4HiuWWCdomtRVRzJCW45CYJ7qRBfMzfP1lrhPr0smSqbw8Mj6gCWADj1L9r+b4vA2vgvBj+TH6+f/joJAQlkV2Rz/c/Xk1fZOekiqzO0LnHTo7tnKoTLnL5ziPOPo8RawkdJHwFtC4KhNi9Y6gULIbqbakc1+VXaIvRon2gURWFcRON5wa6awr5G3xbVr3d3jbNL17jmkiC4F7tmWjx9Ar04VlzFO6sPe3o4HWPinyCgL5RmwsY3mjw82BLMm6e/SbRPNEdLj/Ln5X+muLpjW0w7Vac7H7g59YG7Mr1Oz9XDrwbA5rQBMCKknYJgyQsWQnQzrkVxXgYvd/31pvKCW1oezcU1ayxBcPNJENyLWYx67p2vNTZ4LfEgeWWNt9ntlgxmmP2Atr/6OagoaPIpkT6RvDXvLUIsIewv3M8tK27p0I+X9hfuJ78qHy+Dl/uXY3d2dv+zCfXSyvAZdIZ6zTNawzVrsjV7KzkVOW0enxBCdBZXebQ+vn1QFAWoHwQ7VecJz3GVR2vJojiQdIjWkCC4l/u/0dGM7BNAWbWdF3454OnhdIxRF0P4cKgq1gLhZujr35c3572Jv8mfHbk7uOO3O7A6rB0yPFcqxKTISZj0pg55jc5k1pu5ctiVAAwNHtrm76lfQD/GhY/Drtr5LPmz9hiiEEJ0Cnc+sE+U+77BwYPxMnhRai3lYNHBE57T1plgWRjXfBIE93I6ncIDfxgKwMcbj5KSU+bhEXUAnR7mPqrtb3gDitOb9bRBQYN4de6reBm8WH9sPfesvIddubvaPRjuKakQdV059EruGHcHD05+sF3Od9nQywD4fP/nHfZmRAgh2psrCI72jXbfZ9QZGRWqlX9sKCWiNeXRQGaCW0OCYMGU/iHMHRqBw6ny5JJ9nh5Oxxh4OsRNA0c1JC5q9tNGh43mxdNexKgzkpiWyGU/Xcbkjydz2Y+XsWjDIn449ANHS46iNtKQozFl1jJ356CeFAQb9Ub+NPJPDA9pebvkhpzW9zTCvcMpqCpgWeqydjmnEEJ0NFd5tLozwQBjI06eF1xqk5ngziJBsADgvjOHoNcp/JKUzbqe2E5ZUWDuY9r+9o8hp/kNMSZHTeb1ua8zo88MAs2B2Jw2duXt4uN9H3P/qvs56+uzmPHpDG785UZe3f4qR0qONPvcG7I2YFftxPnHEesX29Lvqtcw6owsGLwAgI+TGi93J4QQXUVDM8EAY8NOHgS7qkO0NAiWZhktJ0GwAGBAuC+XnaI10PhXT22gETsRhp4NqlNroNECp0SdwqtzX+X3Bb/z03k/8eSMJ7li6BWMChuFSWeiuLqYNRlreG3Ha1z6w6UcKGxefvXajLUATIvuObPAHeWCgRdg1BnZnb+bnblSM1gI0fWdbCZ4VNgodIqOjLIMssuz6z0mOcGdR4Jg4XbH3IH4mg3syijmux2d1yiiU532MCg6SP4Jjq5v8dMVRSHWP5az+p3F3075Gx/94SPWX7aexWct5oFJDzAsZBiltlJuXnHzCb/YjqeqKmsye14+cEcJ8QrhzIQzAfh4n8wGCyG6NpvTRnaF9negj2+feo/5mnwZHKRVztmWW3822FUdosVBsEGC4JaSIFi4hfqauWmW1p2rxzbQCBsEY7XKBSx/BFqZy1uXUW9kROgILh1yKW+e/iYJAQlklWdx84qbKbOefKFhakkqGWUZGHVGJkRMaPM4eoPLhmgL5JalLuu0RiZCCNEaORU5OFUnRp2REK+QEx4fEz4GgG3Z9YPgVi+MM8rCuJaSIFjUc930BKICLGQUVfLe2lRPD6djzLoPDF6Qth72L23XUweYA3ht7mvuGsN3J97tbhpxPFdViPER41vUGrM3Gx46nFFho7A77Xy+/3NPD0cIIU6qbnm0hjq/jQtvuHNcq9MhpGNci0kQLOqp20DjlV9TKCjvgeWo/KNh8o3a/i+PgrN9Z7z7+Pbhlbmv4GXwYt2xdTy69tEGq0eszqxpldyne7dK7myu2eDPkz/H5mj4DcbJqKrKtynfsip9VUcMTQgh3Nz5wL5RDT7umglOLkym3Fbuvt9VHUJKpHU8CYLFCc4d04fh0f6UVtt5cUUPbaAx7U6wBELuPtixuN1PPzxkOP+Z+R/0ip7vDn7Hqzterfd4lb2KzVmbtaHIorgWmRc3j1CvUHIrc/nl6C8teu6nyZ/y9zV/567Eu6iyV3XQCIUQok5lCJ/oBh+P9Imkj28fnKqTHbk73Pe3dibYy1hTHUJygptNgmBxAp1O4cGaBhofrj/SMxtoeAXCqX/R9n/7F9ja/+OjU2NO5e+T/w7A6zte58v9X7of25K9hWpHNRHeEfQP7N/ur92TGfVGLhp0EdCycmnbc7bz703/BqDaUc3uvN0dMj4hhICmZ4KhTl5wTUqEqqqtLpHmmgmudlRjd9pbOtxeSYJg0aCpA0KZMyQcu1Pl9sXbqLT2wEVyE68H/xgoydA6yXWACwddyPUjrwfg8fWPuz+Gd7VKnt5nurufvGi+iwZdhEFnYHvudvbm723y+LzKPO5OvBu7045e0QMN1+cU3UeZtYxXt7/KsbJjnh5Kr1RYVUhaaZqnh9GlZZRlACdWhqjLnRdcsziuylHlDmBbuzAOJC+4uSQIFif1z/NGEOJjYu+xEh78Zleru6J1WUYLzH5A2//tiVaVTGuO28bextn9zsahOrhn5T3szd/L2sya+sBSGq1VwrzDmBc3D2h6NtjmtHFP4j3kVubSP6A/t4y5BYCtOVs7fJyi47y7511e2/HaCalGonNcvfRqzvrqLJ7f8nyLc/N7C9cbtONrBNc1NlxrmrEzbyc2p81dHk2v6N3NL5rLpDO53+RLXnDzSBAsTioqwIuXLhuLToGvtmbw0Yajnh5S+xt9KQz9P3BY4ZPLobD53d6aS1EUHpv6GJOiJlFpr+TG5TdyqPgQekXPpKhJ7f56vcVlQ7UFcksOL6GgquCkxz27+Vm25mzFx+jDc7Ofc7/x2JGzA0c7L4oUnWfjsY0ApBSmeHgkvU+FrYJDxYdQUXln9ztc/tPlHCw66OlhdSlO1elOhzi+W1xd/QP742fyo9Jeyf6C/fXygVv6KaGiKFIruIUkCBaNmto/lL+dMQSAx77fw9ajhR4eUTvT6eC81yFyFFTkweJLobq03V/GqDfy3KznGBg0kMJq7RqODhvd4o+7RK1RoaMYHjIcq9PKVwe+avCYnw79xIdJHwLwxPQnSAhIYFDQIHyMPpTaSkkpkgCqO6q0V7I7X8vpTi1J7XmfUnVx6WXpAFj0FgLNgSQVJLHghwV8lPQRTtXp4dF1DfmV+dicNnSKjnDv8JMep1N0jAkbA2ifTrkqQ7Q0H9hFFse1jATBokk3nNqPM0dEYnOo3PzhVvLKqj09pPZl8oFLPwHfCMjZA19e3+5l00D7pfbqnFfdvxBnxMxo99foTRRFcc8Gf5r86QkLQfYX7ufRdY8C8KeRf2JO3zkAGHQGRoeNBiQvuLvambvT/fMus5WRX5Xv4RH1LhmlWq5rv8B+fPV/XzGtzzSqHdU8ufFJbvrlpia7ZfYGmeVaZYhw73CMOmOjx46LqK0X3NpGGS7uWsEdsNi7J5IgWDRJURSevmg0/cN8yCqp4raPt2F39LB3+wF94JKPQW+G/UtgxWMd8jKRPpG8d8Z73DX+Lne9W9F6Z8SfQbAlmKzyLH5L+819f4m1hLt+u4tKeyVToqZw65hb6z3PlYcnecHd0+bszfW+Plx82EMj6Z3qLvgK8w7jtTmv8cCkBzDrzazNXMv5353PstRlHh6lZ7nygU9WHq0u1++jbTnbKK4uBlo/E+zuGiczwc0iQbBoFl+zgTeuHI+PSc+6Q/k8vSzZ00NqfzET4JxXtP01L8D25pffaolYv1iuHXGtdIlrBya9iQsGXgDULpBzqk4eXPUgR0uPEuUTxb9P/Td6nb7e81wrsrdmb5WP0rshV41tBS1nUoLgzuVKh4jxjQG0iZJLh1zKZ2d/xrCQYZRYS/jLyr/wwKoH3DObvY3rjUJj+cAuw0OGY9AZyKvMI6kgCWhDECw5wS0iQbBotgHhfjx9kfYx8hu/H2LJrh5YmmjURTCjpn7w93d0WMUI0X4uHnwxekXP5uzNJBck8+bON0lMT8SkM/Hc7OcIsgSd8JwRoSMwKAayK7Ldi1dE92B1WNmZuxOo7baYWpLqwRH1Pq50iONLf/UL6MeHf/iQG0bdgE7R8f2h77nwuwvZX7jfE8P0KHeN4EYqQ7hYDBaGhwwH4Pf034E2pEMYJR2iJSQIFi3yh5FR3HBqPwD+8vmOntlIY/aDMPTsDq0YIdpPpE+kO9/30bWP8up2rWTW3yf/3f2H5XjeRm+GhmgNYSQlonvZlbcLq9NKiCWEWbGzAEgtTvXomHob90ywX8wJjxl1Rm4bexvvn/E+Mb4xZJZn8tLWlzp7iB7n7hbXjJlgqP106kiJ9vdGZoI7hwTBosX+On8wk/sFU2518Of/baasuod1ptHp4Lw3OrxihGg/rgVyu/N3o6Jy0aCLOG/geY0+x52Hly2L47oTVyrE+IjxxPvHAzIT3JlUVW1WE4gx4WN4fNrjABwoOtApY+tK3OXRmpETDLWd41zanBMsdYKbRYJg0WIGvY6XLxtHpL+Fg7nl/PWLHT0vr9LkA5cuBp/wDq0YIdrHuPBxDA4aDGil0+475b5mPQdkJri7cS2KmxA5gfiAeEDLv7Q6rB4cVe9RUFVApb0SBaXJWU7XzyezLJMqe1UnjK5rUFXVPRPcWMvkutotCJaZ4BaRIFi0SqivmVevGIdRr/DTrize+P2Qp4fU/gJitEC4gytGiLZTFIXHpj3GJYMv4fnZz2PSm5p8ztgIbSY4pSjFvSJbdG02p40duTsAmBAxgTCvMLwN3jhVJ0dLemAzny7INQsc5h3W5P+zEEsIfiY/VFSOlvaen09xdbE7CG1OTjBAsCWYhIAE99etzQl2dZmTmeDmkSBYtNq4vkE8fLaWc/nkkn0s+imp55VOO75ixNYPPDsecVLDQ4bz4OQHCfMOa9bxwZZg98fprsBKdG178/dSaa8k0BxI/8D+KIriDhwkJaJzuIJgV2WIxiiKQoJ/zc+nF+Vtu2oEh1hCsBgszX6e69MpkBJpnUWCYNEmV0zqyy2z+wNaxYir391EYXkP+1hy1EVw6r3a/vd3QsovHh2OaD+uIvVbsyUlojtw5QOPCx+HTtH+fLk+cpcguHO4g+AGFsU1xPXz6U1l7Nw1gpu5KM6lbkpEW5tlyExw80gQLNpEURTunT+Ely8bi5dRz+qUPM5+eTV7MnvYx8uzH4SRF4PqgM8WQtYuT49ItANpmtG91M0HdnHN5vemIMuT0ku1yhCNLYqrqzfO1LtmgpubCuEiM8GdT4Jg0S7+OCqar2+ZSlyIN+mFlVzw2lq+3Z7h6WG1H0WBc16G+BlgLYOPLobiHvT99VKuPzq783ZT7ehh7cB7GIfT4W5zPSGiThAsM8GdylUerblBsLuCR29Kh2hheTSXWL9YBgYNJMAc0OIA2kVmgltGgmDRboZE+vPdLdM5dVAYVTYnd3yynX/+sLfn5AkbzLDgfxA6GEoz4aOLoKqHzXj3MrF+sYRYQrA5bezJ2+Pp4YhG7CvcR7mtHD+jH4OCBrnvr5tz2uOq1HRBrkYZzU6HqFPGrrf8fFrSKKMuRVFYfNZilpy/pNUdRV0L4yrt0iyjOSQIFu0qwNvIu1dP5OZZWp7w26sPc9V/N1LQU/KEvYLgii/AN0IrnfbZQnDYPD0q0UqKotTmBUtKRJfmygceGzG2Xhvsvv59UVAosZZQUFXgqeH1CnannazyLKD5M8F9/fuiU3SU2crIq8zryOF1Ga6Z4OZeo7rMenOrUyFA0iFaSoJg0e70OoW/njGEVy8fh7dJz9qD+Zz90mp2Z/SQWdPAvnDZp2D0hkO/aYvleskMR0/kSolwfdQuuiZ3PnCdVAjQZr5cM26SEtGxsiuysat2jDoj4d7hzXqOSW9yB4O95efjzgluZo3g9iTpEC0jQbDoMH8YGcU3t0wjPsSbjCItT/id1Yd7RnpE9Fi46D1QdLD9Q1j5lKdHJFrJVS94W842nGrX/7fZHcbY3pyq013B4/ggGOrkBfeivFNPcKVCRPtGu6tzNEdvWrxYYatw1x1vbre49iQzwS0jQbDoUIMi/Pj21unMHhxGtd3J4z/s5awXV7P+UL6nh9Z2g+bDH/6j7Sf+C7Z/7NnxiFYZHDQYL4MXpdZSDhYd9PRwGrUpaxNTF0/l2S3PenoonepA4QFKrCV4GbwYEjLkhMelfXLnaEmN4Lp6U5k0VyqEn8kPX5Nvp79+3Zng3pKD3RYSBIsOF+Bl5J2FE1l0/kiCvI0kZ5dyyZvruX3xNrKKu3krzYnXwbQ7tf3vboNDiZ4cjWgFg87A6LDRQNeuF1xYVcjffv8b5bZyPk76uFd1uXOlQowNH4tRZzzhcZkJ7hwtrQzh0pvKpLlSITwxCwy1M8EqqlS8aQYJgkWn0OkULj2lL7/9ZRZXTO6LosB3OzKZ80wib6w8iNXejT/infMIjLgAnHb49ErI3uvpEYkWcuUFd9XFcaqq8vCah8mtzAWg2lHND4d+8PCoOs+W7C1Aw6kQ0LuCLE9y1wj2a1kQ3JvKpLlmgj2RDwxg0dd2qJOUiKZJECw6VaC3iX+eO5Lvb53O2L6BlFsdLFqyjzNf+J3VB7rpymGdDs55FfpOheoS+N95kLPP06MSLVA3L7grWrxvMYnpiRh1Ri4adBEAX+z/old83Kmqam0QHNlwEOwKstJK07B1k2otPxz6gUt/uNRdbaE7cKVDtHYmOLM8s8fPTrpmgltTGaI96HV6d5k0WRzXNAmChUeM6BPAlzdO5ekLRxHqa+JgbjlXvLOBmz7cQkZRN6xvaLTAJR9B+DAoy4L3/gDHdnh6VKKZRoWOQq/oOVZ+zN3ytKtILkjmmc3PAHDPhHu4c/ydWPQWUopS2JHb8/+NHS4+TEFVAWa9meEhwxs8JsI7Ai+DFw7VQVpZWiePsHU+2vsRu/N38+OhHz09lGZractklxBLCL5GX5yqk6MlRztiaF2G6/dHa5tdtAd3ECwzwU2SIFh4jE6ncNGEWFbcM4urp8ajU2DJ7ixm/yeRh77Z3f2CYe9guPpHiBoDFfnw3tmQttHToxLN4G30ZkiwtuCqK80GV9gquPf3e7E6rcyMmcllQy7D3+TP/Pj5gDYb3NO58oFHh43GpDc1eIyiKN3qI3en6uRgsbYIM7kg2cOjaZ5Ke6W7zm9LF8YpitJrUlbcOcEt7BbXnqRMWvNJECw8LsDLyKP/N5wfb5/BpIRgrHYn/1t/hFlP/8Z9X+7kaH43+o/sHQwLv4PYyVBdDB+cC4dXeXpUohm6YtOMpzY9xeHiw4R5hfH4tMdRFAWACwddCMCy1GWUWEs67PVzKnK449c7OPWTU9mb75lc95PVBz5ed6oQkVmW6e7olVzYPYJgV66rr9EXf5N/i5/fnd6ktIVrJthTC+NAyqS1hATBossYGuXPJzdM5uPrJzGlXwg2h8onm9KY/Uwid3+2nZScMk8PsXksAXDlV9BvFtjK4aML4cByT49KNKGrNc34OfVnvjzwJQoKi2YsIsgS5H5sdNhoBgQOoMpRxQ8H23+BnKqqfJvyLed+ey6/pv1KYXUhnyV/1u6v05xxbMnS8oHHR4xv9NjuVCGibim+1JLUbtHitm4+sOvNWEu4fz7d4E1Ka1kdVvfiVU8tjIPameBKW9f/d+VpEgSLLkVRFKb2D2XxDZP54sYpzBwUhsOp8tXWDE5/biW3fryVfVkdN/PVbkw+cOmnMOhMsFfB4kth73eeHpVoxJjwMUBtTVpPyizL5NF1jwJw3cjrmBQ1qd7jiqLULpA70L4L5LLLs7llxS38fc3fKbWWunMbf0//vdMbdaSVppFTmYNRZ2RU2KhGj+1OH7enFKW4952qk5TClEaO7hpclSFamg/s4vr59ORawcfKtVlgi95CkDmoiaM7jswEN5/Hg+BXXnmF+Ph4LBYLkyZNYuPGk+dQ7tmzhwsuuID4+HgUReH5559v8zlF1zUhPpj3rz2Fb2+ZxunDIlBV+GHnMc54fhU3fLCZZXuyKK7owivBjRZY8D8Yfj44bfD51bDjU0+PSpxEqFcocf5xqKhsz9nusXHYnXbuW3UfpdZSRoWO4uYxNzd43B/7/xGz3syBwgPszNvZ5tdVVZWvD3zNed+ex6qMVRh1Ru4cdyffnPMN3gZvcitzOz0lwpUKMTJ0JBaDpdFju1NXsrpBMMC+wq5fTaa1NYJd6qZD9NSqJq6UkWjf6FbNlrcXyQluPo8GwZ9++il33303jzzyCFu3bmX06NHMnz+fnJycBo+vqKigX79+PPnkk0RGRrbLOUXXNzo2kLeumsBPt8/grJFRKAr8vDebP/9vC2Me/5mzXlzFP3/Yy4qkbEqqulhQrDfCBW/DmMtBdcDXf4bN73p6VOIkxoZ7vlTaGzvfYFvONnyMPjx56pMNNocA2nWBXFZ5Fjf9chMPr32YUpsWfH9+9udcN/I6vI3eTOszDYDEtMQ2vU5LuUqjNZUKARDnHwdAUXURRVVFHTmsNnOlQ7gCw+6wOM7VMrm1QXBf/74oKJTaSsmv6gEdQxvgmgn2ZCoEyExwS3g0CH722We5/vrrueaaaxg2bBivv/463t7e/Pe//23w+IkTJ/L0009zySWXYDab2+WcovsYFu3PK5ePY/ldp3LVlDj6h/mgqrAns4S3Vx/muvc3M+axnznn5dUs+imJ35JzKKu2e3rYoNPD/70ME68HVPjhTlj3iqdHJRrgbprhoc5xm7I28ebONwF4ePLDxPrFNnq8KyVi6eGlrUrhUFWVL/Z/wbnfnsuazDWYdCbuGX8PH5z5Af0D+7uPmxU7C4CV6Stb/BptsTmreYviQPvDH+EdAXTtlAiH08Gh4kMA/KHfH4BuEgS3sjyai1lvdgfQ3WG2vjXcM8EeXBQHUiKtJQyeemGr1cqWLVu4//773ffpdDrmzp3LunXrOvWc1dXVVFfXFvAuKekGOae92IBwP/5xzggAckqqWHcon/WH8ll3MJ/U/Ap2pBezI72YN34/hF6nMCzKnwnxQUyMD2ZCXBDh/o1/rNohdDr4w9Ng8oY1L8CyB6CqBGbdBx782EzU55oJ3p23G6vDetKSXB2huLqY+1fdj1N1ck7/c9wBUmNcC+RSilL48dCPXDrk0ma/XrmtnHsS72FN5hr3uf4x7R/0C+h3wrEz+sxAp+jYV7CPY2XHOmWmK7Msk8zyTPSK3p2v3ZT4gHiyK7I5XHy42c/pbOll6VQ7qjHrzcztO5dXt79KcmEyTtWJTvF4hmKDVFWtDYJbWB6trviAeNLL0kktSWVi5MT2Gl6X4ZoJ9mR5NKgzEyzpEE3yWBCcl5eHw+EgIiKi3v0RERHs29e6/KjWnnPRokU89thjrXpN4Vnh/hbOGdOHc8ZoMwzHiivdAfG6Q/mkFVSyK6OYXRnFvLsmFYC+wd7uoHhifBD9Qn3R6TohEFUUmPsYmPzgt3/CyiehPFcLjnX6jn990aQ4/ziCLcEUVBWwN39vhwVSDqeD9LJ0UopSOFR0iJSiFHbm7iS7Ips4/zgemPRAs86jKAoXDrqQJzc+yef7P+eSwZc0KxfRqTq5f9X9rMlcg1lv5raxt3HF0CvQn+TfYZAliNFho9mWs42V6Su5ZMglLfp+W8OVDzw8ZLj7j3pTEvwT2HBsQ5eeCXblA/cL6EdCQAJmvZlKeyVppWnulI6upri6mDKbVp2nLQFevH88qzNWd4sKHq3heqPgyUYZUCcnWGaCm+SxILgruf/++7n77rvdX5eUlBAb2/jHkKJrigrw4ryxMZw3VputyCyqZPORQjanFrA5tZCkrBKOFlRwtKCCr7Zqv7ACvY3Eh/gQ6msi1NdMiK+JEB8zoX5mQn1MhPqZCfExEeRtanuwrCgw817wDoIf/wKb39Eaa5z/JhgaTvERnUdRFMaFj+OXo7+wNWdruwTBTtXJ2sy1JOUnaUFv8SEOFx9usH2st8Gbf5/672YHfQB/7PdHntvyHAcKD7Arb1eTVRQAXt72Mr+l/YZJZ+Kd+e8wOmx0k8+ZFTuLbTnbSExL7JQg2J0PHNl0PrBLdyiT5soH7h/YH4POwMDAgezO301yQXKXDYJdwV2oV2iTCxQb050qeLSGq0awp1omu8jCuObzWBAcGhqKXq8nOzu73v3Z2dknXfTWUec0m80nzTEW3Vt0oBf/F+jF/43WZi9KqmxsO1rE5tQCNqUWsD2tiKIKG9sripo8l16nEOJjItzfTJivmTA/M+F+lppb7eswPzN+FiNmgw6zQYdBf5KPNyf+CbxD4KsbYO83OCoKyDvrXYqdZkqrbJRU2SmtsmPQKQR5mwj2MRHkYyTI24TxZOcU7WJs+Fh+OfoL27K3wYi2n++rA1/x2LoTP2ky6830C+hH/8D+2hbQn1FhowjxCmnR+QPMAcyPn893B7/ji/1fNBkE/3ToJ97a9RYAj059tFkBMMCsmFk8t+U5NmZtpNxWjo/Rp0XjbKmW5AO7JPjXlOEq6bo5p65yaAMCBwAwOHgwu/N3s69gH/Pi53lyaCfV1soQLj25TJrdaSe7Qos9PD0T7GWUnODm8lgQbDKZGD9+PCtWrODcc88FwOl0smLFCm699dYuc07Rs/hbjMwcFMbMQWEAWO1OkrNKySyuJL/MSl5ZNfll1eSVW8krrSa/XLuvqMKGw6mSU1pNTumJM3gno9cp7oDYbNBjNuow6XXodQpl1QEMddzHc+pT+Kb+Ts5Lc7ja+jfyCWjiezDUBMUmgmsC5MgAC1EBXkQFWIgK1Pb9LQaPlunprlyd47blbmuXPM2fU38GYGLkRKZGT6V/QH8GBA4g2jf6pOkHLXXhoAv57uB3LE1dyr0T78XP5NfgcbvzdvPw2ocBuGbENZzd/+xmv0ZCQAJ9/fpytPQoazPXcnrc6e0y9obkVORwtPQoOkXnztNuDtdMcFppGnanHYOu633YmVJ8YhAMXbtzXFsXxbm4qmFklGV0es59R8utyMWhOjDoDIR5h3l0LNIso/k8+hvi7rvvZuHChUyYMIFTTjmF559/nvLycq655hoArrrqKvr06cOiRYsAbeHb3r173fsZGRls374dX19fBgwY0KxzClGXyaBjZEwAI2MaDzxtDqc7SM4prSK3tJrcmoD4+P1Km8P9PIdTpcLqoMLqAE4s35bOEC5R/s57pqcYqUvla8tj/MXyKGVeMfhZDDicKgUVVgrLrRRV2lBVKKmyU1JlJ7WJdtI+Jj2RARaiA7XgODbIm/7hvgwI9yU+xAeTQWaUGzI4eDBeBi+Kq4s5VHSIAUEDWn2ucls5m7I3AVq1B1eQ1t7GhI2hf0B/DhYf5MdDPzaYruBqgVztqObUmFO5Y+wdLXoNRVGYGTuT/+39H4lpiR0aBLtmgQcHDT5pQN+QSJ9ILHoLVY4qMsoyulx6gc1pc6dquP5dDQkeAsC+gq5bK9jVKKOtM8GhXqH4GH0ot5WTVppWrwJJd5dZrlWGiPSO9PgCR8kJbj6PBsELFiwgNzeXhx9+mKysLMaMGcPSpUvdC9uOHj2KTlf7jykzM5OxY2tnBf7zn//wn//8h5kzZ5KYmNiscwrRGka9jsgAC5EBFmhiptbpVLE6nFTbnFTbHVTbtdsqm9O9b3eo+FoM+FsM+Fnm4FN+OuonF9K3OI3PDI/Cgi8hsv5n8Q6nSnGljYJyK4UVVu22ZqY6q6SKY0VVZBZXcay4kqIKG+VWBwdzyzmYW37CGPU6hb7B3vQP04LiAeG+9A/zYUC4L36WhuvS9hZGnZFRoaPYkLWBrTlb2xQEr81ci91pJ84/rsMCYKjpIDf4IvcCuQWDF9T7FKDKXsWdv91JTmUO/QP68+8Z/27VLPTs2Nn8b+//WJW+CofT0W4z2cdrSX3gunSKjjj/OJILk0ktTu1yQXBaSRo2pw0vg5f7I/NBQYMA7U1KYVVhvfbYXUV7VIYA7d9pgn8Cu/N3c7j4cM8Kgus0yvA015qC7tCO29M8/lnRrbfeetJUBVdg6xIfH9+sTjONnVOIjqbTKVh0eixGPdDMgNJ/CFy3HD48H3L2wrt/gMs+gbip7kP0OoVgHy39oSkVVjtZxVUcK64is6iSY8VVpOZrAfHBnDLKqu0czivncF45vyTVz6H3NRvciwRDfc2E+mn7YX5m931+FgM2hxO7Q8XudGJzqNgdKjZnzX0OJ3anisWox9dswM+ibb5mA74WA2ZD166GMSFyAhuyNvB7+u9cPPjiVp9nZZpWV3dmzMz2GtpJuRbI7S/cz+683YwMGwlo5a0eXfcou/J2EWAO4KXTXsLX5Nuq1xgTPgY/kx+F1YXszNvZolSFlnBVhpgQ2fx8YJf4gHgtCC5JZSYdf91bwlUZon9Af/dsoY/Rx51mklyYzOSoyZ4cYoNcQXB7LPiKD4hnd/7uHrc4zhUEezofGGRhXEt4PAgWQtTwj4JrfoLFl8LRdfC/8+DCd2FI0/Vij+dtMtAvzJd+YScGO6qqkl1STUpOGSk5pRzMLdf2c8vILa2mrNpOWXXT6RZtYdLrtKDYYsDLqMeo16HTKRh0Cnqdgl5RMOhr9/U6BbNRj7dRj5dJj7dJj5d736B9bdLjYzIQ6G0kyMdEkLcRL6O+VXnRc/vO5ZXtr7Amcw0l1hL8Tf4tPofD6WBVxiqgc4LgAHMA8+Lm8f2h7/niwBfuIPi/u//Lj4d+RK/oeWbmM8T6t77yjVFnZHqf6Sw5vITEtMQOCYLXZq7lUPEh9Iqe8eEtmwmGrt0+2RUEH//pwuDgwVoQXND1gmCH0+EO8NqaEwxd++fTFq4awZ6uDAHSMa4lJAgWoivxCoIrv4bPr4H9S+CTy7SGGqf+VWu40Q4URXGndkwfGFrvsdIqG7ml1eTV5D9r+9U1+1b3foXVgaEmaDXodRj0CkaddmvQ6zDWBLNVdidlVTZKq7TAWsuNBqvDSX65lfxya7t8TydjNugI8jZpgXHNIsJAd3AMOkWBmluFmltFu0aBhhiK7Onct2QxA7xn1570uE+jTAYdgXXOHeStldNLq0iioKoAP6MfYyM6Zsb0eBcOupDvD33PksNLuHfCvWzO3swLW18A4L5T7mNS1KQ2v8bs2NnuIPiu8Xe1+Xx1VTuqeWL9EwBcMuQSAi2BLT6Hu0xaF5xpdAfBgccFwUGDWX5keZfMC86tzMXmtGFQDO6OfG3RlX8+beGeCfZwy2SQmeCWkCBYiK7G6AULPoQl98Lm/0LiIkjfBOe/Bd7BHfrSfhYjfhYj/TpocbPd4aTc6qC0yqbNOFfZKbc6cDpV7E4Vh2tTVRw1qRVOVcXmUKm2O6myOaiwasF0Zc2Cw0qba99OebWDwgorRRU2LS/b7iSrpIqskqoWj9UUOgRzWDq/pf3CT+ktnwEzhS3DHAqVJQM49+X1WIx6THodJkPtZq77tV6Hj9lAhL+FCH9zza2FEJ/m16ceGz7WvUDupW0v8U3KN6ioXDzo4nar7TutzzQMioFDxYc4WnKUvv592+W8AO/seoejpUcJ8wrj1jGtS2lzl0nrgjONdWsE19WVF8e5FsVF+kS2Sw543TJpqqr2mAo27m5xHm6ZDLUzwVanFZvThlHXu9d5NEaCYCG6Ir0B/vgcxEyEH+6ClF/gjZlw8fvQZ5ynR9dqBr2OAC8dAV4d+0tZVbWqHAXlWkBcWKEtJiwst1JYYaPK7gAVnKqKqoJKnX1VRQWK7PP4rewXjH4H+L9JIZh1takldf9sV9ocFFbYKKrQzu2q5GHwTQKgtGAQe9rQit2gU7Ra1P4WImuC4xAfM8E+xtoyeb7abaC3iQsHXci/N/2bj/d9DGil2e6bdF+rX/94/iZ/xkeMZ0PWBhLTErlq+FXtct7U4lTe3vU2AH875W+tzlt2LYYrqCpodSpLR7A6rBwpOQI0MBNcUyYttTjV3VK5q3DnA/u1z8f8ff36oqBQai2loKqgxXWxuyJVVd1BcFeaCQZtcZzRJEHwyUgQLERXNuYyiBwJn14JhYfhv/PhzKdg/NVa9znRIEVR8DEb8DEbiG315PkIzvv2TVKKUpgyMotzB5zb7Geml2Rw5tdZ6NDx3wVX4LB7U23TKoXYHCpWuxOr3YHV4azZd1LtcFJSaSe3tIrskmqyS6rILavG7lQ5VrPIcUczXtvPywfiDKDYMROKf8m1PPvzQUJ8tAWOwT4mQmoWPgZ5m1pVKm9m7Ew2ZG1gZfrKdgmCVVXln+v/ic1pY1qfacyLa33TCF+TL+Fe4eRU5pBanNqsDnqdIbUkFYfqwNfoe0JaQYR3BAHmAIqrizlYdJBhIcM8NMoTtVdlCBeLwUK0bzQZZRmklqT2iCA4vyqfakc1CgqR3q1r9tWejHojBp0Bu9NOha2iy7wR7IokCBaiq4scCTckwjc3QfJP8MOdWnrEWc9oqROiw8yLn0fK9hR+Tv25RUHw6kxtQdyY8DGc2j++1a9vdzjJK7OSXVJVZ9OauBSWW901pF0l85wqlFaaMOXNwuC3i4KMy/jGWgwUn/Q1QnxM9Anyok+gF9GB2q3r6z6BXgR6G0/4yHpWzCye2vQUW7K3UFxdTIC58bKBTfnx8I9syNqAWW/mwUkPtvkj8viAeC0ILuk6QbArFWJA4IATvj9FURgSNIQNWRtILkjuUkGwKx2iPRbFucQHxJNRlsHh4sMtLoPXFbnygcO8wzDqu8asq7fBmxJriSyOa4IEwUJ0B16BsOAjWPsCrPgHbP8Iju2EBR9AcD9Pj67Hmh83n1e3v8q6zHUtCvYS0xMBbca0LQz16lM3zulUKaly1ZGeQl6ZFhgXlLs6IVrJL6+uafqiBc4Op+peoLgzveFA2dukJ9LfouU0u7ofGvV46aOpVDO58cuPibdMd3dGdOU31819dn3teq5rVjrE10SVo4ynNz0NwJ9H/ZlYv9ZXr3CJ949nY9ZGd2OKruBA4QHgxHxgl8HBg9mQtaHL5QW3Z3k0lwT/BNZkrOlSP5+2cDXK6AqVIVy8jVoQLF3jGidBsBDdhU4H0++C6HHwxbWQvQvemAXnvwGDz/T06HqkfoH9GBA4gJSiFH49+ivnDTyvyedU2CrYeGwjoM2YdhadTiGwJi+4OZxOlaJKG8eKK8korCSjqJLMIu02o6iKjMJKdyWQQ3knNlwxhQ3EHJrJtrw1rMts/eI4vz7fgn8BJmcUW3eO5pHDu2sC5NrUjWAfEyE+JvwtxmYtEuyKFQjqzgQ3xLU4rqu1T04va59ucXW5yqR1pZ9PWxwrq8kH7gI1gl2ka1zzSBAsRHfTbybcuAo+WwjpG2HxJXDKn2HOw2Bu3WIicXLz4+drKRFHfm5WELzu2DpsThsxvjHulfBdka5O85Xh0Q3PcFfZHBwr1tIwqu1Od16z1e7kYIme/x1diU/gAf48tB82h6LlNx+X5+zadz1WWbNgMa+sGqfpCKrfehSgKO1sllTkNjpmvU4hyFsLiIN8jAT7mPAyGjAbtdlms1GH2aAn26a9Edh2LJlPNx3F0MzygoHeRsL8tMYwIT7mdm0tfrC4Jgg+SQdCV+e45ILkLlM1odpRTW7Nz6S90yGg5wTBXalbnIuUSWseCYKF6I78o+HqH+Hnv8PGN7Rt/xI4+wXof5qnR9ejzIufxyvbX2F95vpmpUT8nv47ALNiZ3WJQKYtLEY9CaE+JIT6nPCYwxnFD58FUVhdyLQRpZwSdUqLzm2127j4hwUcLFaZEj6fP465gLw6danzy7Sc54JyKwVlVkqr7TicqvvxxihGK74DILcqk799uQNoXTAbVCcoDqvpmhjgdWKOdEOdTA16HRaDDotRj15v52jJUQAKi4LZaivEbNDhbTIQ7G3Cz2KgX0A/jDojZbYyMsoy2jXobK3MskxUVLwMXgSZ26+ds+vNYXppOjaHrcvk0baWuzJEV5oJloYZzSJBsBDdlcEEf3gKBs2H7++EoqNal7mxV8C8J7Q8YtFm/QL6MTBoIAcKDzSZEuFUne5WyafGnNpZQ/QIvU7PqTGn8u3Bb/kt7bcWB8GLkz/mYPEBAswBPDn7AYItjZfxqLY7KCy3ufOc88urKaqwUVUzO11td2izz3YnVdY+LKsw4tTZmDZEh94ZRlNvR5yqSnGl1iwmt1SrylFYYaOwwsb+7LIWfW/H05kz8Omnotq9uen9/cCBeo9rM9xG1KgIMKRz1zc/MdB3ilb+zsdU04K9vuPfXymAUa/DaNBh0ivafs1mMtR+bTbotE6LRgMWkzaDfrI3a3XzgdvzDV2YVxjeBm8q7BWklabRL7B7r2twXSeZCe5+JAgWorsbMAduXgcrHoONb8K2D+HAL/DHZ2HIWZ4eXY8wP24+BwoPsOzIskaD4L35e8mvysfH6MOEiAmdOELPmBU7i28PfsvK9JX8deJfmx0oZZVn8cr2VwC4e/zdTQbAAGaDnsgAfbMWCQIc+DaOlKIUbpzrz4yYlgXoTmdNQFzTNdG9lVVTXGGrd2zdb9m1r6pgc6hU2R1U2xwccxzgKGChD1HhfnUavzgoc89wW7GURWAMTGdH9l427emcRVY6BXcLcouxth25Sa+jzLwa9JBf5Mtti7fVW+BoMujQKQpOVcVZ0+DGfatq19Dh1Pb1OtydJA16HQadgheRVHCIV1avZaCfikGnoKtpka5zt0vHfZ++7uPuW9zH6hQFnQ70NT+EKruWelNlq22oU2mr+bpmH6i3cNN43K1Jr2DQ6erUEa/53mpuVVXF4XBytFgLgnel6igpOFbzJkOPj9ngbvPubdT22zPFpjFeNZWDZCa4cRIEC9ETmH3hD0/D8PPhu1shP0VruTz8fO1+n9CmzyFOal78PF7e/jIbMjc0mhKRmJYIwNToqd3+I97mmBo9FaPOSFppGoeKD5208sHxFm1YRKW9krHhY1tUeq4lEgISSClKIbUklRnMaPTYSnsl36R8w9ToqcT5x6HTKQT5mAjyMTEowq/NY3luy2r+uxvOHT6Ov0+uXzHEane6S9x9cSCNzw5vYXh8GTPHDKSwpnKH1e6s95wTky+0gMzmULE5nNgcTqwOFZvdte/EVpOT7QrAbQ7tLE4Vyq0OymtamtdlDk/DFALZBT58n53Z5utQlyXaH2MAfJ+0A2t++6VadDbFUILvQK0Cw79/yAa1sNHjDTrFHRhrbzgMdfa1+4362kC5bqaNetxP3qCrO+Nf07K+5s3GkSI7AGsOZuAsSqWmQzyKqzU8Sp37wKBr+lME1ycJ2qZ35+I3t6NlVyRBsBA9SdwUuHE1JD4Ja1+CPV/BoUStwcbIC6XBRislBCQwKGgQ+wv3N5oSUTcfuDfwNnpzStQprMlYQ2JaYrOC4N+O/savab9iUAw8NPkhdErHzIy5KxA0UYbL5rBxd+LdrM5YzYSICbx7xrvtPpaTtUsGbSbS1SL7DGU8nx2GCtK4c+6gdh9HXTaHU5sZdbUerzNDWmnVUkveP/gDe4rhzCHDGDVxWL1Fj1aHE4dTRa/Tgil9zeysotTO4io1M7Ra63OtDbrdqWJ3ONlVPpC9VdvpF13BsLg+7pbpTtXVPh33fu19ap37qL9fZzYatHx2r5rg0rXvXWfG22LUua+DtaaJTbXrTUPdW6eqzTgr2syzUnOrq/nestUUkh3gSzzjBvah0uqg3Gp3t3V3tXm314zL7lQprbJTWmXv0J+vObwCUwj8uj+dpbl7TnqczpSDV8wH2EpGYc1rXZMao17RguKaTwgiAyx8ffO01g69U0kQLERPY/SC0x+D4efCt7dC9m746k+w9X0Ydo6WPiG1hVtsfvx89hfuZ1lqwykRWeVZJBUkoaAwvc90D4zQM2bFzHIHwdeNvK7RYytsFSzauAiAq4ZfxcCggR02Ltfiq8Mlh096jFN18uCaB1mdsRqArTlbKaoqItAS2K5jSSlKAU5eHs3F1T45szyzXZqQNMY1s+dvOfknFu+m5gNwzvCRzO7bvpVOlqaewr0rPyc0qJhn/zCmXc/dme5J/IjkI3D5qPncOvbkaTfWmvSMClttgOx6w6Ht26m0Oqmw2rE51PppNnXOUzflRntDUTv7X/eTgKSqUA7ZISHMQP/IyJqW8Npscm2reAd7eJ1y8jCHrmRUwDwURxDWmq6W7vPanTX3OdyfJtSdodZe145rvaquG022SBAsRE8VPRau/w3WPA8rn4LUVdoGENwfBszVtvjpYPJu9FQC5sXN46VtL7H+2PoGAyXXLPDosNHNynHtKWbFzuKJDU+wI3cHBVUFJ3zvuRW5rM5YzZrMNazNXEuptZRon2j+POrPHTqupmaCVVVl0YZFLDm8BINiIMAcQH5VPqsyVnF2/7PbbRwVtgr3wqmmgmB/kz/RPtFklmeyv3A/EyMntts4WsO9MM6v/fOTE/xr3qQUH+4yJeFayua0sTZzLdD0QlhX45gAOidN6p1dm3h+K0zo58MT0xvuyvf2rrfZuDVV+0JxMG7UDh6Y9ECT51ZVbUbfVTbR6nBSbXO6yyd2J52ToS2E8AyDCWb+FW7dCHMfhfgZoDNAwUGtrNrHF8G/47WqEutegdz99ZPQhFt8QDyDgwbjUB38mvbrCY+vTNeqQrS1S1x3E+kTyZDgIaiorEpfhc1pY1PWJp7f8jwXfnchp31+Gg+vfZhlqcsotZYSbAnmn9P/6S7h1FFctWhzK3Mps55Y3eHVHa/ySfInKCj8a8a/OH/g+UBtXnd7OVR8CIAQSwhBlqZzX12zwckF7d80Y1/Bvmaft8RaQom1BIAY3/Yv1xbnH4eCQom1hMLqxvNou6rtOdsps5URbAlmROgITw+nHtf/r0p7wx3jDhYd5NXtrwJw3gDtk62vDnxFfmV+k+dWFC1P2NdsIMTXTFSAF/GhPgyO9GNkTAAjYzruE4z2JjPBQvQGwf20bnPT74KqEjj8O6T8om3FaXDwV21b9gB4h0LMRIiZoN32GQfmti8O6gnmx88nuTCZZanL3EETaH9oNhzbAMDMmN4VBIM2G7yvYB8vbH2BRRsXUW6r7TCnoDA8ZDjTY6Yzvc90RoSMQK87seRXe/Mz+RFiCSG/Kp8jJUcYHjrc/dhHSR/x+o7XAXhw0oOcmXAm0b7RvLXrLdZkrmnX2rWudslNzQK7DAkewm9pv7V7++SCqgKu/OlKnKqTb8/9tsk6xBml2ixwsCW4Q96wWAwWonyiyCzPJLU4tVt+erIqXftkbVr0tA7LbW+txjrG2Z12HlrzEDanjRl9ZvDY1MdIKUphV94uPkz6kDvG3dHZw/WYrvVTE0J0PIs/DP0jnP083LkLbtkI8/8F/WaD3gwVeVrjjV8fhw/+DxbFwqtT4bvbYev/IGcfOLvXR17tZV68tnBkw7ENFFbVzl5tOLaBakc10T7RzQ52ehLXQsDcylzKbeUEmYM4q99ZLJqxiMQFiSz+42JuGXMLo8NGd0oA7OKaDa6bF/z9we95cuOTANw65lYWDFkAwMjQkQRbgim3lbM5e3O7jaGxRXENcc8Et3P75KWHl1LlqMLqtPLy9pebPL5ujeCO0t07x7lSoLpiTXD3TLDtxJngD/Z+wK68XfgZ/XhkyiMoisKfRv4JgE/2fUKptbRTx+pJMhMsRG+mKBA2WNum3AL2aji2E9I31Wybofgo5OzRtq3va8+zBEL/2VpOcf854N91OiV1pDj/OIYED2FfwT5+PforFwy6AKifCtEdcxvbanjIcB6a/BCFVYVM6zONYSHDusTMWLx/PFuyt7jzghPTEnlozUMAXDH0Cm4YdYP7WJ2i49SYU/km5RtWpq9kSvSUdhlDSnHNoriTtEs+3uAgLQg+WHSwXWekfzz0Y739hcMWMjRk6EmP74wgOCEggbWZa5us4NEVZZRlcLD4IHpF327/VtrTyWaCDxUd4pVtWo3ueyfeS4RPBKC9kR0QOICUohQ+Tf7UHRT3dJ7/LSWE6DoMZoidCFNuhovehbt2wT3JsOAjLZUifgYYvaGqCPZ8Dd/eAs8OgdemwfKHtTQLu9XT30WHmh8/H4BlqcsAbZHI72najFBvTIVwuXjwxfx59J8ZETqiSwTAUKdCRPFhNmdt5i8r/4JDdXB2v7O5d+K9J7xhmRUzC9CC5YZaIbdGSmHzKkO49PHtg6/RF5vT5s4nbqsjJUfYmbcTnaJjWrRWuuqFrS80+py00jSADm3f7Fq8eLj45BU8uipXKsTosNEdWsWjtdxtk+t0jHM4HTy05iGsTivT+kyrV6Nbp+i4dsS1APxv7/9Omkvc03SN31RCiK7LL1JLn5j7KFz9A9yXBtcth5l/gz7jAUUrw7bmBXj/bHgqARZfCpvehpwkcJ5YhL87mxenpURszNpIYVUhSQVJ5FTm4GXw8vhqflGfKwjelrON2369jWpHNbNiZvHYtMcaDNSnRE/BqDNqs3w1aQxtUWotJbsiG2h+OoSiKO2eEuGaBZ4SPYUHJz+IQWdgTeYa1h9bf9LnSDpE47pyKgQ0PBP8wd4P2Jm3E1+jL49OefSEN4FnJpxJH98+FFQV8NWBrzp1vJ4iQbAQomX0Bog9BWY/ANf/CvcehAvegdGXgk8YWMsg+Sf48R54dTI8GQfv/RGWPwJ7v4OaFqPdVV//vgwNHopDdbDi6ApWpmmpEFOjp2LSmzw8OlGXa6YxtzKXMlsZ4yPG8/TMpzHqGk4xcDX/AEhMT2zz67sC6XDvcPxN/s1+3pDgIQDtsjhOVVV+OPQDAH/s90di/WJZMFjLg35uy3M41Ybz+zslHaKmTFp6aTo2p62Jo7uOSnslG7M2At0gCK6ZCT5UfIiXt2m54H+d+FcifSJPeI5BZ3DPBr+35z1sju7zM2ktCYKFEG3jE6J1ozvvdbhnP/z5d5jzcE3qhA9YS7X6xGueh8+uhOeGwTND4JPLYdUzWlWK0qxuVZrNtUBuWeqy2nzgXpwK0VVF+0Zj0GlLX4YGD+Wl017CYrA0+hxXSoTrzU1bNLdJxvFcecHtUSZtZ95O0krT8DJ4cVrsaQDcMOoGfIw+7M3fy8+pP5/wHKfqJLNMa5PcEeXRXMK9w/E2eGNX7e70i+5gU9Ymqh3VRPpEdtmFsF5GL0AL2F3VIKxOK9OipzXaqvycAecQ6hVKVnkWPx7+8aTH9RQSBAsh2o9OB1GjYcY9WurE/Wlw01r4v5dg/NUQORIUPZQeg30/wIp/aDWKnxmszRi/fbrW5W7ty3BgORQd7ZKVKObHaXnBG7M2sid/DwoKM2JmeHhU4ngGnYFrhl/D1OipvDb3NfxMTZf6c72ZcTX/aIuWVoZwqZsO0dbc5B8OarPAc/rOceeJBluCuWb4NQC8uO3FE2b88irzqHZUo1N0RPqeOGPYXhRFIc4/Dmi6vXVX4k6F6HNql10I65oJVlF5a9db7MzdiY/Rh0ennpgGUZdZb+aqYVcB8M6ud3D0sHS240l1CCFEx9HpIWK4to3TfrFiLdcqUGRs0bZjO6DwMFQXQ/pGbavL6AOhA8E3QutsZ3RtXmDy0W6N3tq+JQCix3V4tYpY/1iGBg8lqSAJ0MprhXqFduhrita5fdztLTo+yjeKwUGDSS5MZlX6Ks4ZcE6rX9s1EzwwsGXtofsH9segGCiuLia7IrvBj66bw+a0sTR1KaClQtR15bArWbxvMWmlaXy+/3MuG3qZ+zFXKkSkd+RJU0faS3xAPEkFSd0mL1hVVfeiuK78xtdisKCgoKLyxo43ALh3wr3N+rd08eCLeWvXW6SWpLLi6Ar3J189kQTBQojOZfKBuCna5mKvhvwUyE2u2fZpt/kpYCuHY9tb9hqBcdB3srbFToawIdosdTuaHz/fHQR31bxA0TozY2eSXJjMyvSV7RIEt3Qm2Kw3Ex8QT0pRCvsK9rU6CF6bsZai6iJCLCFMippU7zFvozc3j7mZx9c/zhs73+CcAefgY/QBtBxd6Jh2ycdzLV7sLjPBh4oPkVmeiUln4pTIUzw9nJPSKTq8DF5U2CtwqA6mRE2p1+CnMT5GHy4bchlv7HyDt3e9zelxp3fZGe+2kiBYCOF5BnPtjHFdDhsUpmoBcWUh2Cq0mWRbpbZvqwBrRe1+abZWz7joiLbt/FQ7jyUAYidpW98pED4UvIK0OsmtNC9+Hs9vfR6obRYheoZZMbN4c+ebrMlYg9VhbdWCx6KqIvIq84CWB8GgLY5zBcGt/fflqgpxZsKZ7tzous4beB4f7P2AIyVHeH/P+9w85magcxbFubgWx3WXMmmuVIiJURM7vPV3W3kbvamwV+Bj9OGxqY+1KJC9fOjlfLD3A5IKklibuZZpfaZ14Eg9R4JgIUTXpTdqqRChLfg4uaoEMjbD0fVwdJ3W8KOqGA78rG0uOoPWItonDHxCa7awOrdhWnk4vyht/7hOZ7F+sfxlwl+osFcwKGhQO33DoisYHjqcUK9Q8irz2Jy1mal9prb4HK5Z4GifaPcMa0sMCR7CD4d+YH/h/hY/F6DMWsavab8C8Mf+f2zwGKPOyO1jb+eelffw3p73uHjwxYR6hbpngjtyUZxLdyuT5gqCZ/TpuqkQLuHe4eRV5vGXCX8hyrdlKWJBliAuGHgBHyZ9yFu73pIgWAghugWLP/Q/TdtAm03O2gVpG7Sg+OgGKMsCp127Lctq+pyKviYgrgmK/aPBL5KFftHg1xdy9mo5y17B7Z52ITqfq3vcVwe+IjE9sVVBcGsXxbm4Fse1tkzaiqMrqHZUkxCQwLDgYSc97vS40xkZOpJdebt4fcfr/H3y32tngjshHcK1MK6ouojCqkKCLEEd/pqtVWotZVvONqB7pED9a/q/SC1O5bS+p7Xq+QuHL+ST5E/Ykr2FbTnbGBs+tp1H6HkSBAsheja9EfqM07bJN2n32auhPA/Kc6Eir3a/PLdmPw/KsrXSbeU5oDqgJEPbGqPotVlj3zAtKPYJr933Dq2dcXbtG8wd//2LVpkZM5OvDnzFyrSV3H/K/S3OiXSXR2tmu+TjucqkpZWmUWYtw9fk26Lnu2oDn5VwVqNjVxSFu8bfxbXLruXL/V9y5bAr3UFwZ8wEexm8iPaJJrM8k915u7v0YrO1mWtxqA4SAhKI9Yv19HCa1D+wf6vfhAFE+kRyTv9z+PLAl7y9621emfNKO46ua5AgWAjR+xjMENBH25risGuBcMkxrbRb6TEoydQC5NJMLQ+5PAcq8rVg2T27vKvpc5v8tDrLrrSMgD7QZ4LWjCS4X5tylkXbTI6ajElnIrM8kwNFB1qc8tLaGsEuQZYgwr3DyanIYX/hfsZFjGv2c3MqcthwbAMAf+j3hyaPnxg5kRl9ZrAqYxXPbn7W3eWuM3KCQVuIuHjfYj7e93GXDoK7UypEe7lmxDV8nfI1v6f/TnJBsvsTip5CgmAhhGiM3qClP/hHN36cw1Y7g1yeq92W5dTul+dpgXJ5njb77LRrjUSspdriP5dNb2u33iEQM1HbYk/RSr+ZWzYbKFrP2+jNpKhJrMpYxcq0lS0OgtuaDgFaXnBORQ7JhcktCoKXHF6CisrY8LHNnrG8Y9wdrM5Y7c4jNuvNnVb278qhV/Jp8qeszljNgcIDDAxqWUm5unbm7uTXo79yzYhrCDAHtNsYnaqT1Rmrge6RCtFe4vzjmBc3j6WpS3ln1zs8NfMpTw+pXUkQLIQQ7UFv1OoTN6dGsapCVRGU59dPx8hPgbSNWkm4inzYv1TbABSdVj0j5hQITqiziC+8dl/fsTVde5tZsbNYlbGKxPRErh91fbOfl1+ZT2F1IQoK/QL6tfr1BwcNds/AtUTdNsnNfq3gwZzd/2y+O/gdoM0Cd1ZZrFj/WOb0ncPyI8t5f8/7/HP6P1t1HqvDyj0r7yGrPIutOVt54/Q38DJ4tcsY9+bvpaCqAB+jD+PCm/+GpCe4buR1LE1dyrIjyxi3bxznDTwPs75npHJJECyEEJ1NUbQSbV5BQAMfl9urtYYi6Ru1oDh9k5aPnLVL207GK6gmIA7XZq6jx0Kf8RA1SmsqIlrENeO3K3cX+ZX5hHiFNOt5rlSIGL+YNgVhro+e9+TvQVXVZgWlBwoPsK9gHwadgXlxLWtycOuYW1l6eClWp7XTUiFcrh5+NcuPLOfHwz9y+7jbCfcOb/E5Pt//OVnl2kLXbTnbuHflvTw/+/kGy8O1lCsVYkrUFIy97M3mkOAhnB53OsuPLOeJDU/w1s63WDh8IRcOurDLl4lriixjFkKIrsZghtiJMOUWuPh9uHsv3LUXLnoPpt4OIy+CfrMgYoQW8Co1v8orCyFvPxxZDbs+g2X3w3/nwaIYeONU+OFu2P6xVne5C7aj7moifSIZGjwUFZVVGaua/by25gO7jAodhU7Rsa9gH09seAKn2vTPzFUbeEafGQRaAlv0elG+UVw+7HIAhoYMbfF422JU2CjGhY/D7rTzcdLHLX5+ha2CN3e+CcAFAy/ArDezMn0lj617rM2tpwF3l7jelApR16IZi7j/lPuJ8I4gpzKHpzc/zRlfnsHbu96m1Frq6eG1mswECyFEdxDQBwLOg+HnnfiY06kFwOU5tVUuCg5BxlatTnJ5jtae+tgO2PyO9hyzvzZTHD0GIkZC5AgIGajlQAu3mbEzSSpIYmXaSs4dcG6znuPKB25rEBzlG8XfJ/+dx9c9zqfJn1JqLeWf0/950lbGTtXJj4e1ILglqRB13TH2DqZETWF02OhWj7u1rh5+NVtztvJZ8mdcP+r6FtVX/ijpIwqqCoj1i+XByQ8yM2YmdybeyTcp3xBiCeHO8Xe2elx5lXnszt8NwPQ+01t9nu7MrDdz2dDLuGjQRXx38Dve3vU26WXpvLD1Bf67679cNvQyrhh6RYvfeHma/LYTQojuTqfTqkz4hADHzeCpKhSnQcYWLSDO2AKZ26G6BA6v1DYXvRnCh9QGxREjtFuvrlu7taPNipnF6zteZ23m2mZ3j2ttu+SGXDToIvyMfty/6n5+OvwTZbYynpn5DBaD5YRjt2RvIas8C1+jLzNjZ7bq9fQ6PVOipzR9YAeYGTuTeP94UktS+erAV1w57MpmPa+4uph397wLwM1jbsaoMzK772wemfIIj6x9hHd2v0OIV0izz3c814K4ocFDCfMOa9U5egqj3sgFgy7gnAHnsDR1KW/tfItDxYd4Y+cbfLD3Ay4ZfAlXDb+q0xZVtpUEwUII0ZMpCgT21TbXLLLDrjX4yNii5Rhn74bsPWAtq50xrsu/D4T0h5ABENy/dj8wDgwtbyncnQwNGUqYVxi5lblsytrUZOcsVVXbLR3C5YyEM/Ax+nB34t38nv47N/5yIy+d9hJ+Jr96x7lSIebFz+uWC5d0io6Fwxfy2LrH+N/e/3HpkEublc/7/p73KbWWMiBwAGfGn+m+//yB51NQVcALW1/gqU1PEWQJatUMeW9PhWiIQWfgj/3+yB8S/sCKoyt4a+dbJBUk8e6ed/kk+RN+vuDnbjErLEGwEEL0NnqDtlgualTtfU4nFKVC1m4tKM7aDdm7oOhobaOQw7/XP4+i04JrV3AcMUyrXhE2+IQ2092Vq3vclwe+JDEtsckgOKMsg1JrKXpFT0JAQruNY0bMDF4//XVuXXErW7K3cN2y63j99NcJtgQDUO2o5udUrS14a1MhuoKz+5/NS9te4lj5MX5O/bnJOsd5lXl8mPQhALeOvRX9cf/urhtxHXmVeXyU9BEPrX6IIHNQi1oA25w21mauBejSNYw9RafoOD3udOb2ncuqjFW8sfMNon2iu0UADBIECyGEAC2lIriftg37v9r7q4ohZx8UHIT8g1oZt4KDkH8IbOVajePCVOCX2ueY/CBmfE2d41MgZgJ4B3fyN9R+ZsXO4ssDX7IyfSUPqA80WKWhuLqYD5M+5KO9HwEQ7x/frNSJlhgfMZ7/zv8vN/5yI0kFSSxcspC35r1FpE8kq9JXUWorJcI7gvER49v1dTuTWW/msiGX8fL2l3lvz3ucmXBmo1Ux3tn1DpX2SkaEjOC02BPbAyuKwl8n/pWCqgKWHF7CXYl38c68dxgZNrJZ49mes50yWxlB5iBGhIxo9ffV0ymKwqkxpzKjzwyqHFWeHk6zSRAshBDi5CwB0HeSttWlqlrXPHdwfEDLNc7YqjUAOZSobS4hA2oC4vHaflA8+Md0i4V4k6ImYdabOVZ+jP2F++t1zSquLuaDvR/wcdLHlNnKAC0N4oFJD3TIWIaGDOW9M97jhuU3kFqSylVLruLN09+sbZPc7yx0Svcu/LRg8ALe3vU2SQVJbMzayKSoSQ0ed6zsGJ8mfwrAbeNuO2mwrFN0PDHtCYqri1mbuZabV9zM+2e+36wazq5UiOl9pp8wy9yrqSrYq7TNVuXeV+xVeIG26LYb6Pq/fYQQQnQ9ilLbHCS+zop5hx1yk2rqG2/Wah3np9RuO+qUv1L0EBirBcRB8VqOsWs/OKHLLMjzMngxOWoyK9NXsjJ9JYODB1NUVaQFv/s+ptxWDsDAoIHcOOpG5sbN7dBANCEggQ/O+MAdCC9cutBdpqo7p0K4BFoCOXfAuXyS/Anv7XnvpEHwGzvfwOa0MTFyIlOiGl/MZ9QbeW7Wc1y37Dp25+/mxuU38sGZHxDpE9no89ytkrtrKoSqgq0Sqku1nP/qEqguO/FraznYKrT7rBU1++V17q+5dQW9juqTv2ZAX7irGW3juwAJgoUQQrQfvQEiR2rbxOu0+yoKagPizO1a+kTREXBY66RTNMA7pGYh3oCaxXiuxXn9wNT88lntYWbsTFamr2T5keVU2iv5OOljKuwVgNbZ7cbRN3Ja39M6bRY2yjeK9854j5t+uYmkgiT3ONrScrgruWrYVXy2/7OTtlI+UnKEb1K+AeD2sbc3q5GIt9GbV+a+wsIlC0ktSeX0L04nwjuC+IB44v1rtoB44vzjiPaJJqsii4PFB9EreqZGT+2Ib7NxtiotWK0u0baqmtvq0pr9Oo9Vlx53fylUF2sBruro2HEqOjB4gdECBgv4RXTs67UjCYKFEEJ0LO9gGDRP21ycTijLqg2C3dsRKDwMZdla6+iKfC14Pp5ftBYUB/fTZo2DEmpvLf7t/i3MjNFKju0r2Me+gn2A1knrxtE3Mjt2tkdSEEK8Qnhn/jvcuuJWtuZs5fyB53f6GDpKU62UX9n+Cg7VwakxpzImfEyzzxtsCXYvMEwpSiG7Ipvsimw2HNtQ7zijzkigORCA0WGjCTAHNO8FnE4tV766tGaGtbTOflmdwLWhoLWk/q3D2uzvq2kKmP20zeRbs19za/LT3lSavLXHjN7avtF1n4+2b/TSNkNNsOsKertxBz1FbY9WKj1MSUkJAQEBFBcX4+/f/r9MhRBCNKG6TGv4kZ+i5RwXHKxNqagsbPy53qF1AuN+EBSnlXkLiNFujSfW2G2O65Zdx8asjQwNHuoOfpszA9nRbE4b+wv2MzRkaLfPB65rZ+5OLv/pcgw6A8suWOZupZxckMyF318IwOdnf86Q4CGtOn9RVRGpJanaVnSYI0UppJakcqQsE5tqdx93b/gMrvIdUCeFoLThzVoT6LY3U03wavGvCV7963ztf9zXfrX3WfxrA16Tj5bC1MO0NV6TILgBEgQLIUQXVlFQW6mi8DAUHNYC5sLD2sxxU7xDagLiGK0TnytA9osE30jt1ux7wtOKq4vJLMtkSPCQLhH89gYLlyxka85Wrhtxnbvr222/3kZiWiLz4+fzn5n/0Q502LRKJlXFUFVUZ7/uVlL/6+qS2vvrtP51AJkGPUeMRsp0OuaUV9DiuU6doU4AetzMa91Att7X/nWOqxPEyoK8k5IguANIECyEEN1UVbEWFNcNjl21joszwF7ZvPOYfME3QguI3cFxBPhF1dxXc2v2a/pconmcDu3nV1kIlUVQWchvWeu5/dCn+ClGlofOIaU8kysqdqJT4ZtKHxIqawLZ9pqBNVhqA9C6t/UCVldaQd2v66QWmH2188gbpQ7X1nhNcoKFEEL0HJYAiB6jbcdTVS3AKk6vCYrTa4Pjkgyt5FtplpbTaS2DgjItDaMxJt/6QbFr3xKoBUOmms217/po2tD9OroBWqWBykJts1aA6gRU7dqial+791VtUVZ1qTuoparo5PtVxSe83Ewgvk8UqSb4at9iVnp7gZeFc8rKSMg7euL4XDOqXoHavwXXZvav/7XFv/79roC3u/5cRKvITHADZCZYCCF6sepSKM3WFu65AuOyLO2+0mPaor3SLO3j9NbSGWsWHHlrOcpG75rFRnUWH7ke05u0j9d1eq2snM5QZ9Npt4r+uJnHOvt171ed2oyr6qi5Pf7rmtu6AWrdzd4JjRBMflp5PK8A8AriC6ODx2xp+CoGylQ7BkXHjyPuJDogTnuz4RVY86bDv1vUnRbtR2aChRBCiPbk+og7dEDjx1WX1QTEx2qC5Tq3VcW19VetdaoDuIJIp61m9rOoo7+b9qczaEGq0Vsrj6UogFL/VtHV7ptrglpLYE1wG1h///jHjqs2cLajmpe+mEdBVQEAFw++hOhx13TiNyx6KgmChRBCiNYw16Q5hPRv/nMc9toqAtZyLb3AVqnlKtuq6jQkqKzdnDZw2ms2Z519e+3MrdN+4mud8EGvWhO06mtmlXV1Zpfr3Or0WoqAV1DDm8m3U/NdzXozlw65lFe2v4JFb+H6Udd32muLnk2CYCGEEKKz6A01s5+Bnh5Jt3LlsCtJK01javRUQr1CPT0c0UNIECyEEEKILs3H6MMT05/w9DBED9NzqmoLIYQQQgjRTBIECyGEEEKIXkeCYCGEEEII0etIECyEEEIIIXodCYKFEEIIIUSv0yWC4FdeeYX4+HgsFguTJk1i48aNjR7/+eefM2TIECwWCyNHjuSnn36q9/jVV1+Noij1tjPOOKMjvwUhhBBCCNGNeDwI/vTTT7n77rt55JFH2Lp1K6NHj2b+/Pnk5OQ0ePzatWu59NJLue6669i2bRvnnnsu5557Lrt376533BlnnMGxY8fc2+LFizvj2xFCCCGEEN2AoqontJTpVJMmTWLixIm8/PLLADidTmJjY7ntttu47777Tjh+wYIFlJeX88MPP7jvmzx5MmPGjOH1118HtJngoqIivvnmm1aNqa29qIUQQgghRMdqa7zm0Zlgq9XKli1bmDt3rvs+nU7H3LlzWbduXYPPWbduXb3jAebPn3/C8YmJiYSHhzN48GBuuukm8vPzTzqO6upqSkpK6m1CCCGEEKLn8mgQnJeXh8PhICIiot79ERERZGVlNficrKysJo8/44wz+OCDD1ixYgX//ve/WblyJWeeeSYOh6PBcy5atIiAgAD3Fhsb28bvTAghhBBCdGU9sm3yJZdc4t4fOXIko0aNon///iQmJjJnzpwTjr///vu5++673V+XlJRIICyEEEII0YN5dCY4NDQUvV5PdnZ2vfuzs7OJjIxs8DmRkZEtOh6gX79+hIaGkpKS0uDjZrMZf3//epsQQgghhOi5PBoEm0wmxo8fz4oVK9z3OZ1OVqxYwZQpUxp8zpQpU+odD7B8+fKTHg+Qnp5Ofn4+UVFR7TNwIYQQQgjRrXm8RNrdd9/NW2+9xfvvv09SUhI33XQT5eXlXHPNNQBcddVV3H///e7j77jjDpYuXcozzzzDvn37ePTRR9m8eTO33norAGVlZdx7772sX7+e1NRUVqxYwTnnnMOAAQOYP3++R75HIYQQQgjRtXg8J3jBggXk5uby8MMPk5WVxZgxY1i6dKl78dvRo0fR6Wpj9alTp/Lxxx/z97//nQceeICBAwfyzTffMGLECAD0ej07d+7k/fffp6ioiOjoaObNm8fjjz+O2Wz2yPcohBBCCCG6Fo/XCe6KpE6wEEIIIUTX1q3rBAshhBBCCOEJEgQLIYQQQoheR4JgIYQQQgjR60gQLIQQQggheh0JgoUQQgghRK8jQbAQQgghhOh1JAgWQgghhBC9jgTBQgghhBCi15EgWAghhBBC9Doeb5vcFbma6JWUlHh4JEIIIYQQoiGuOK21zY8lCG5AaWkpALGxsR4eiRBCCCGEaExpaSkBAQEtfp6itjZ87sGcTieZmZn4+fmhKEqbzlVSUkJsbCxpaWmt6mstWk+uvWfJ9fccufaeJdffc+Tae1ZnX39VVSktLSU6OhqdruUZvjIT3ACdTkdMTEy7ntPf31/+Q3qIXHvPkuvvOXLtPUuuv+fItfeszrz+rZkBdpGFcUIIIYQQoteRIFgIIYQQQvQ6EgR3MLPZzCOPPILZbPb0UHodufaeJdffc+Tae5Zcf8+Ra+9Z3e36y8I4IYQQQgjR68hMsBBCCCGE6HUkCBZCCCGEEL2OBMFCCCGEEKLXkSBYCCGEEEL0OhIEd6BXXnmF+Ph4LBYLkyZNYuPGjZ4eUo/0+++/c/bZZxMdHY2iKHzzzTf1HldVlYcffpioqCi8vLyYO3cuBw4c8Mxge5hFixYxceJE/Pz8CA8P59xzzyU5ObneMVVVVdxyyy2EhITg6+vLBRdcQHZ2todG3LO89tprjBo1yl2YfsqUKSxZssT9uFz7zvPkk0+iKAp33nmn+z65/h3n0UcfRVGUetuQIUPcj8u171gZGRlcccUVhISE4OXlxciRI9m8ebP78e7yd1eC4A7y6aefcvfdd/PII4+wdetWRo8ezfz588nJyfH00Hqc8vJyRo8ezSuvvNLg40899RQvvvgir7/+Ohs2bMDHx4f58+dTVVXVySPteVauXMktt9zC+vXrWb58OTabjXnz5lFeXu4+5q677uL777/n888/Z+XKlWRmZnL++ed7cNQ9R0xMDE8++SRbtmxh8+bNnHbaaZxzzjns2bMHkGvfWTZt2sQbb7zBqFGj6t0v179jDR8+nGPHjrm31atXux+Ta99xCgsLmTZtGkajkSVLlrB3716eeeYZgoKC3Md0m7+7qugQp5xyinrLLbe4v3Y4HGp0dLS6aNEiD46q5wPUr7/+2v210+lUIyMj1aefftp9X1FRkWo2m9XFixd7YIQ9W05OjgqoK1euVFVVu9ZGo1H9/PPP3cckJSWpgLpu3TpPDbNHCwoKUt9++2259p2ktLRUHThwoLp8+XJ15syZ6h133KGqqvzb72iPPPKIOnr06AYfk2vfsf72t7+p06dPP+nj3envrswEdwCr1cqWLVuYO3eu+z6dTsfcuXNZt26dB0fW+xw+fJisrKx6P4uAgAAmTZokP4sOUFxcDEBwcDAAW7ZswWaz1bv+Q4YMoW/fvnL925nD4eCTTz6hvLycKVOmyLXvJLfccgtnnXVWvesM8m+/Mxw4cIDo6Gj69evH5ZdfztGjRwG59h3tu+++Y8KECVx00UWEh4czduxY3nrrLffj3envrgTBHSAvLw+Hw0FERES9+yMiIsjKyvLQqHon1/WWn0XHczqd3HnnnUybNo0RI0YA2vU3mUwEBgbWO1auf/vZtWsXvr6+mM1mbrzxRr7++muGDRsm174TfPLJJ2zdupVFixad8Jhc/441adIk3nvvPZYuXcprr73G4cOHmTFjBqWlpXLtO9ihQ4d47bXXGDhwIMuWLeOmm27i9ttv5/333we6199dg6cHIIToGW655RZ2795dLy9PdLzBgwezfft2iouL+eKLL1i4cCErV6709LB6vLS0NO644w6WL1+OxWLx9HB6nTPPPNO9P2rUKCZNmkRcXByfffYZXl5eHhxZz+d0OpkwYQL/+te/ABg7diy7d+/m9ddfZ+HChR4eXcvITHAHCA0NRa/Xn7ASNTs7m8jISA+NqndyXW/5WXSsW2+9lR9++IHffvuNmJgY9/2RkZFYrVaKiorqHS/Xv/2YTCYGDBjA+PHjWbRoEaNHj+aFF16Qa9/BtmzZQk5ODuPGjcNgMGAwGFi5ciUvvvgiBoOBiIgIuf6dKDAwkEGDBpGSkiL/9jtYVFQUw4YNq3ff0KFD3eko3envrgTBHcBkMjF+/HhWrFjhvs/pdLJixQqmTJniwZH1PgkJCURGRtb7WZSUlLBhwwb5WbQDVVW59dZb+frrr/n1119JSEio9/j48eMxGo31rn9ycjJHjx6V699BnE4n1dXVcu072Jw5c9i1axfbt293bxMmTODyyy9378v17zxlZWUcPHiQqKgo+bffwaZNm3ZCKcz9+/cTFxcHdLO/u55emddTffLJJ6rZbFbfe+89de/eveoNN9ygBgYGqllZWZ4eWo9TWlqqbtu2Td22bZsKqM8++6y6bds29ciRI6qqquqTTz6pBgYGqt9++626c+dO9ZxzzlETEhLUyspKD4+8+7vpppvUgIAANTExUT127Jh7q6iocB9z4403qn379lV//fVXdfPmzeqUKVPUKVOmeHDUPcd9992nrly5Uj18+LC6c+dO9b777lMVRVF//vlnVVXl2ne2utUhVFWuf0e655571MTERPXw4cPqmjVr1Llz56qhoaFqTk6Oqqpy7TvSxo0bVYPBoD7xxBPqgQMH1I8++kj19vZWP/zwQ/cx3eXvrgTBHeill15S+/btq5pMJvWUU05R169f7+kh9Ui//fabCpywLVy4UFVVrVzLQw89pEZERKhms1mdM2eOmpyc7NlB9xANXXdAfffdd93HVFZWqjfffLMaFBSkent7q+edd5567Ngxzw26B7n22mvVuLg41WQyqWFhYeqcOXPcAbCqyrXvbMcHwXL9O86CBQvUqKgo1WQyqX369FEXLFigpqSkuB+Xa9+xvv/+e3XEiBGq2WxWhwwZor755pv1Hu8uf3cVVVVVz8xBCyGEEEII4RmSEyyEEEIIIXodCYKFEEIIIUSvI0GwEEIIIYTodSQIFkIIIYQQvY4EwUIIIYQQoteRIFgIIYQQQvQ6EgQLIYQQQoheR4JgIYQQQgjR60gQLIQQwi0xMRFFUSgqKvL0UIQQokNJECyEEEIIIXodCYKFEEIIIUSvI0GwEEJ0IU6nk0WLFpGQkICXlxejR4/miy++AGpTFX788UdGjRqFxWJh8uTJ7N69u945vvzyS4YPH47ZbCY+Pp5nnnmm3uPV1dX87W9/IzY2FrPZzIABA3jnnXfqHbNlyxYmTJiAt7c3U6dOJTk52f3Yjh07mD17Nn5+fvj7+zN+/Hg2b97cQVdECCE6hgTBQgjRhSxatIgPPviA119/nT179nDXXXdxxRVXsHLlSvcx9957L8888wybNm0iLCyMs88+G5vNBmjB68UXX8wll1zCrl27ePTRR3nooYd477333M+/6qqrWLx4MS+++CJJSUm88cYb+Pr61hvHgw8+yDPPPMPmzZsxGAxce+217scuv/xyYmJi2LRpE1u2bOG+++7DaDR27IURQoh2pqiqqnp6EEIIIbQZ2uDgYH755RemTJnivv9Pf/oTFRUV3HDDDcyePZtPPvmEBQsWAFBQUEBMTAzvvfceF198MZdffjm5ubn8/PPP7uf/9a9/5ccff2TPnj3s37+fwYMHs3z5cubOnXvCGBITE5k9eza//PILc+bMAeCnn37irLPOorKyEovFgr+/Py+99BILFy7s4CsihBAdR2aChRCii0hJSaGiooLTTz8dX19f9/bBBx9w8OBB93F1A+Tg4GAGDx5MUlISAElJSUybNq3eeadNm8aBAwdwOBxs374dvV7PzJkzGx3LqFGj3PtRUVEA5OTkAHD33Xfzpz/9iblz5/Lkk0/WG5sQQnQXEgQLIUQXUVZWBsCPP/7I9u3b3dvevXvdecFt5eXl1azj6qY3KIoCaPnKAI8++ih79uzhrLPO4tdff2XYsGF8/fXX7TI+IYToLBIECyFEFzFs2DDMZjNHjx5lwIAB9bbY2Fj3cevXr3fvFxYWsn//foYOHQrA0KFDWbNmTb3zrlmzhkGDBqHX6xk5ciROp7NejnFrDBo0iLvuuouff/6Z888/n3fffbdN5xNCiM5m8PQAhBBCaPz8/PjLX/7CXXfdhdPpZPr06RQXF7NmzRr8/f2Ji4sD4B//+AchISFERETw4IMPEhoayrnnngvAPffcw8SJE3n88cdZsGAB69at4+WXX+bVV18FID4+noULF3Lttdfy4osvMnr0aI4cOUJOTg4XX3xxk2OsrKzk3nvv5cILLyQhIYH09HQ2bdrEBRdc0GHXRQghOoIEwUII0YU8/vjjhIWFsWjRIg4dOkRgYCDjxo3jgQcecKcjPPnkk9xxxx0cOHCAMWPG8P3332MymQAYN24cn332GQ8//DCPP/44UVFR/OMf/+Dqq692v8Zrr73GAw88wM0330x+fj59+/blgQceaNb49Ho9+fn5XHXVVWRnZxMaGsr555/PY4891u7XQgghOpJUhxBCiG7CVbmhsLCQwMBATw9HCCG6NckJFkIIIYQQvY4EwUIIIYQQoteRdAghhBBCCNHryEywEEIIIYTodSQIFkIIIYQQvY4EwUIIIYQQoteRIFgIIYQQQvQ6EgQLIYQQQoheR4JgIYQQQgjR60gQLIQQQggheh0JgoUQQgghRK/z/68CkTiqiWx6AAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import pandas as pd\n", + "from matplotlib import pyplot as plt\n", + "collected_data = pd.DataFrame(columns=[\"epochs\", \"train_losses\", \"train_accuracies\", \"val_losses\", \"val_accuracies\", \"optimizer\"])\n", + "dataset = \"mnist\"\n", + "# \"shampoo\", \"shampoo_diagonal\", \"shampoo_momentum\", \"shampoo_momentum_diagonal\", \"shampoo_heuristic\", \"shampoo_heuristic_diagonal\", \"adam\"\n", + "optimizers = [\"shampoo\", \"shampoo_momentum\", \"adam\"] \n", + "\n", + "for optimizer in optimizers:\n", + " path = f\"metrics/metrics_{optimizer}_{dataset}.csv\"\n", + " df = pd.read_csv(path, names=[\"epochs\", \"train_losses\", \"train_accuracies\", \"val_losses\", \"val_accuracies\"])\n", + " df[\"optimizer\"] = optimizer\n", + " collected_data = pd.concat([collected_data, df], axis=0)\n", + "\n", + "figure = plt.figure(figsize=(8,8))\n", + "for optimizer in optimizers:\n", + " print(collected_data[collected_data[\"optimizer\"]==optimizer])\n", + " optimzer_data = collected_data[collected_data[\"optimizer\"]==optimizer]\n", + " plt.plot(optimzer_data[\"epochs\"], optimzer_data[\"val_losses\"], label = optimizer)\n", + "\n", + "plt.title(\"Comparison between multiple variants of the shampoo optimizer and adam\")\n", + "plt.xlabel(\"epochs\")\n", + "plt.ylabel(\"validation loss\")\n", + "plt.legend()\n", + "plt.show()\n", + "figure.savefig(f\"diagrams/diagram_metrics_{dataset}.png\", format=\"png\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "65b2c54b", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/scripts/staging/shampoo_optimizer/diagrams/diagram_metrics_mnist.png b/scripts/staging/shampoo_optimizer/diagrams/diagram_metrics_mnist.png new file mode 100644 index 00000000000..11cfa4b58e1 Binary files /dev/null and b/scripts/staging/shampoo_optimizer/diagrams/diagram_metrics_mnist.png differ diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_adam_cifar.csv b/scripts/staging/shampoo_optimizer/metrics/metrics_adam_cifar.csv new file mode 100644 index 00000000000..efcfbc37287 --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_adam_cifar.csv @@ -0,0 +1,90 @@ +1.0,2.0366285569366878,0.26598543709489775,1.9111378509607304,0.328 +2.0,1.863508219437754,0.3412155143759349,1.8029196566374601,0.3632 +3.0,1.7781075000737478,0.37343204670101376,1.7365597405952091,0.3826 +4.0,1.7178380710578425,0.3947648329732425,1.6806893259419955,0.4002 +5.0,1.669877784216493,0.41048020192787105,1.6390592825854502,0.4138 +6.0,1.6350585694820974,0.42428172261924546,1.6146616228664268,0.4242 +7.0,1.6090974892669985,0.4327987369120824,1.5910762546497168,0.4314 +8.0,1.587529581892735,0.44248223782615925,1.5692068211402614,0.4374 +9.0,1.5657862599669354,0.4502518904769819,1.5481026555575144,0.4452 +10.0,1.5430813878099718,0.45822149742396545,1.5275131045309027,0.4538 +11.0,1.5199649145732006,0.46613397457204586,1.506689447582152,0.4618 +12.0,1.4982848263026722,0.4736133039720792,1.488505237593026,0.4718 +13.0,1.4789747560435527,0.48032605534319434,1.4727941864822314,0.4766 +14.0,1.4621960038234303,0.4855388898121987,1.4603915792006466,0.4802 +15.0,1.4475009074972,0.4910233505068971,1.4511544780956973,0.483 +16.0,1.4349582625238917,0.49627929200598303,1.4428575600357896,0.485 +17.0,1.423895901496466,0.4997927746385241,1.4336665640797934,0.4904 +18.0,1.414059771299443,0.5039585757021772,1.4282307999663446,0.4916 +19.0,1.404377145998948,0.5074814068472661,1.4222616630433387,0.4968 +20.0,1.3953288419933125,0.5116233172677415,1.4168349642383111,0.499 +21.0,1.386648730995609,0.5150796701013794,1.4117382901756448,0.5014 +22.0,1.37796944077023,0.5181361143426957,1.4065785507454347,0.5026 +23.0,1.3696668695223018,0.5211639936845605,1.402195989771859,0.505 +24.0,1.3616434672260533,0.5245060869203922,1.3972141118943933,0.5052 +25.0,1.3542630371676898,0.5273719253780954,1.3935068999222704,0.5078 +26.0,1.3473694290558793,0.5303141100216054,1.3907051889091704,0.5102 +27.0,1.3402073426222127,0.5322232840285857,1.3892659032226624,0.508 +28.0,1.3336872003973426,0.5347987992354993,1.3865499433195474,0.506 +29.0,1.327384157679365,0.537255380588333,1.3873974009402807,0.5034 +30.0,1.3216604201787392,0.5391406639521356,1.3836836288514893,0.5054 +31.0,1.3161280598647895,0.5419685889978394,1.3832381357516508,0.507 +32.0,1.3105416127136869,0.5436824829649327,1.3800900635777025,0.5088 +33.0,1.3047664781705213,0.5450535981386072,1.3801428125459738,0.5088 +34.0,1.2996126109982393,0.5470817059996675,1.3776929353987932,0.5116 +35.0,1.293823188412723,0.5487145795246802,1.375557900607078,0.514 +36.0,1.2884030433697722,0.5512854204753199,1.3705886811509542,0.5142 +37.0,1.2831253551292954,0.552880380588333,1.3683649044462805,0.5134 +38.0,1.2783286448400695,0.553456352833638,1.3651622891751962,0.515 +39.0,1.2733306632853874,0.5543226483297324,1.3643001109138162,0.5164 +40.0,1.268891600263991,0.5557508933023101,1.3617116557796567,0.5192 +41.0,1.264266710657484,0.557293397872694,1.3587595770290883,0.5178 +42.0,1.2596929659095248,0.5590452052517866,1.3554118331969724,0.519 +43.0,1.2553917233114704,0.560554470666445,1.3546034200212493,0.5224 +44.0,1.2516899362081932,0.5619302600963936,1.353135177682238,0.5236 +45.0,1.2473519464193588,0.5630967467176333,1.3521054237005408,0.5234 +46.0,1.2433013995256341,0.5652105492770483,1.3513343518829966,0.5234 +47.0,1.239448787494256,0.5670148121987701,1.3487089014064364,0.5268 +48.0,1.2354618536988764,0.5683812531161708,1.3461108297962203,0.5266 +49.0,1.2318261814835512,0.5694667192953299,1.3446704091638717,0.5274 +50.0,1.2282065364187986,0.5719851047033405,1.344586378891869,0.5296 +51.0,1.224133646245753,0.5726992271896294,1.3432699487117177,0.5304 +52.0,1.220546875066253,0.5752082640850922,1.3426149725440661,0.5308 +53.0,1.217120690696172,0.5763555343194283,1.3426393843545892,0.5318 +54.0,1.213568203392273,0.577783779292006,1.3404433750471476,0.5328 +55.0,1.2103219298678027,0.5786931818181817,1.3395115771735926,0.5354 +56.0,1.206984211848854,0.5799261467508725,1.3372971072601654,0.5356 +57.0,1.2037463317947639,0.5816639313611434,1.3349564292855296,0.5356 +58.0,1.2002898800179351,0.5821495346518198,1.334121740633766,0.535 +59.0,1.196807843220183,0.5832921306298819,1.3337720183945212,0.537 +60.0,1.193598377912578,0.5850345894964267,1.3323687113361222,0.5366 +61.0,1.1900622506273248,0.5858058417816187,1.331289361658365,0.5352 +62.0,1.1871378053459447,0.5867438092072461,1.3306094609968828,0.5378 +63.0,1.1841863183111265,0.5882577488781785,1.3306050856582037,0.5376 +64.0,1.1808866625784924,0.5883720084759847,1.3297544647820971,0.5372 +65.0,1.1784611619616456,0.5891718256606282,1.3293163604280938,0.539 +66.0,1.1754922767281581,0.5895146044540468,1.3281268708805458,0.539 +67.0,1.1727712033092526,0.591171368622237,1.3243216554218156,0.5398 +68.0,1.16986368405737,0.5916948853249128,1.3250558910354457,0.5402 +69.0,1.1669975974070492,0.5933277588499252,1.32206820973747,0.541 +70.0,1.1640628770424661,0.5947419810536813,1.3229685907464526,0.5398 +71.0,1.1614204583284387,0.5954083222536147,1.324079172392963,0.5418 +72.0,1.1584428785720262,0.5964034194781452,1.322592790482728,0.54 +73.0,1.155897042957851,0.5969840659797241,1.321316808597242,0.5404 +74.0,1.1533538946445323,0.5984221788266578,1.3240338764937882,0.5402 +75.0,1.150596395153521,0.5990838457703174,1.3250515243843302,0.5414 +76.0,1.1479828276607542,0.5997361642014293,1.3213816765518247,0.5408 +77.0,1.1456013991649443,0.6007359356822337,1.322209337122594,0.5414 +78.0,1.1428948834559163,0.6021402900116337,1.3208159001419877,0.5416 +79.0,1.1405840977255453,0.6030258018946318,1.3190405319298553,0.5436 +80.0,1.138456276159628,0.6046540011633704,1.319016162754993,0.5428 +81.0,1.1360969314308267,0.604801499916902,1.317674437313949,0.5436 +82.0,1.1335010399508012,0.6058584011966096,1.3186716090087884,0.5436 +83.0,1.13123556274039,0.60646761259764,1.3147843189430144,0.5432 +84.0,1.1291076858663522,0.6074959489778959,1.3154507688752388,0.5422 +85.0,1.1270488055037702,0.6081815065647332,1.3113865161079785,0.5454 +86.0,1.1246272411531353,0.6084671555592488,1.31230086333935,0.543 +87.0,1.1226729512819174,0.6089195196941998,1.3106131954976976,0.5432 +88.0,1.1202892968081963,0.6099572045870034,1.3103511664799978,0.5434 +89.0,1.11780839667264,0.6109523018115339,1.3103589073105326,0.5418 +90.0,1.115877186853028,0.6116378593983712,1.3095933331067162,0.5456 diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_adam_cifar.csv.mtd b/scripts/staging/shampoo_optimizer/metrics/metrics_adam_cifar.csv.mtd new file mode 100644 index 00000000000..d13a02334c8 --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_adam_cifar.csv.mtd @@ -0,0 +1,12 @@ +{ + "data_type": "matrix", + "value_type": "double", + "rows": 90, + "cols": 5, + "nnz": 450, + "format": "csv", + "author": "nicol", + "header": false, + "sep": ",", + "created": "2026-01-17 19:35:45 MEZ" +} \ No newline at end of file diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_adam_mnist.csv b/scripts/staging/shampoo_optimizer/metrics/metrics_adam_mnist.csv new file mode 100644 index 00000000000..5fa736eb04c --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_adam_mnist.csv @@ -0,0 +1,60 @@ +1.0,0.6600831748999706,0.7919920634920635,0.45846249291763935,0.844 +2.0,0.25601354551075184,0.923,0.3060521207758516,0.909 +3.0,0.1791216355591798,0.947125,0.21327004292691254,0.933 +4.0,0.1399477948175259,0.95775,0.1736507581045767,0.942 +5.0,0.10750665179781986,0.9675,0.1505918516055093,0.952 +6.0,0.09024092394376199,0.971875,0.15272215195884234,0.949 +7.0,0.08109318636628057,0.974,0.18253549984362463,0.943 +8.0,0.07343285225967074,0.9765,0.1963133682103849,0.936 +9.0,0.06492667107394304,0.97925,0.16139850527851352,0.948 +10.0,0.05564659771922558,0.9825,0.15813024537483578,0.949 +11.0,0.04885065099874338,0.984625,0.13685280674984188,0.958 +12.0,0.04345266142523965,0.986,0.1295635143772901,0.963 +13.0,0.03789947918193832,0.988125,0.1280999416145119,0.964 +14.0,0.03440348961873195,0.98975,0.12033225446550745,0.967 +15.0,0.03113956579003195,0.9905,0.14465871496448326,0.956 +16.0,0.02782411652521333,0.992,0.13935065575895655,0.959 +17.0,0.025903924198902143,0.992875,0.13805741037482513,0.96 +18.0,0.023049034921953607,0.994,0.1390425642677865,0.958 +19.0,0.021960489895278133,0.99375,0.14567394365139114,0.953 +20.0,0.022683080115491985,0.992875,0.13101678745364734,0.962 +21.0,0.01951068056535099,0.99375,0.16387851254291194,0.953 +22.0,0.02072305049782118,0.993375,0.13199021229927832,0.963 +23.0,0.01573298327963868,0.9955,0.11595083667347869,0.966 +24.0,0.016863144556757713,0.994625,0.12464655755935368,0.964 +25.0,0.011906263883559153,0.99675,0.09555637996964318,0.975 +26.0,0.01387731248733787,0.995,0.07513476435524133,0.973 +27.0,0.014348454929750287,0.99525,0.09799803592380817,0.973 +28.0,0.014791121236119583,0.995,0.09913795348726741,0.97 +29.0,0.01745363921146775,0.995,0.10855312385534872,0.967 +30.0,0.016810133122882962,0.994625,0.09143590778251723,0.972 +31.0,0.013776919442576465,0.995125,0.09972306275201406,0.973 +32.0,0.012081841240356836,0.995875,0.1042540514767386,0.971 +33.0,0.009673850822736923,0.997,0.08166328277502062,0.976 +34.0,0.010130896530972777,0.99775,0.08359973442180495,0.975 +35.0,0.009486136169168992,0.996375,0.09465088299135849,0.969 +36.0,0.011784477117724483,0.9964980158730159,0.12253367435178857,0.964 +37.0,0.01285823612554023,0.9955,0.0778909504372984,0.973 +38.0,0.005531593403143221,0.99875,0.08527911368874061,0.978 +39.0,0.004174380412227944,0.99925,0.06256664584964217,0.98 +40.0,0.003724836264838745,0.999375,0.06596277050627802,0.977 +41.0,0.00373747439026559,0.999,0.106305429921357,0.975 +42.0,0.00446614766949216,0.99875,0.06967587883698868,0.983 +43.0,0.0033526953383332878,0.999125,0.06330396928658376,0.984 +44.0,0.0028230799970961214,0.999375,0.08233284112176356,0.979 +45.0,0.0027498272954112166,0.99975,0.06967829332529306,0.983 +46.0,0.0021902541798457316,0.9995,0.12228901254433457,0.971 +47.0,0.014584827217146804,0.994625,0.11751439804792264,0.973 +48.0,0.003940440886971421,0.999125,0.07298610902463333,0.982 +49.0,0.002838167116175086,0.999375,0.05795638638785478,0.986 +50.0,0.0024289928108280536,0.99925,0.07105060997549256,0.98 +51.0,8.843184635752194E-4,1.0,0.06353335404859009,0.984 +52.0,6.38977018076913E-4,1.0,0.06250178394164058,0.986 +53.0,0.0010020022685720159,0.99975,0.08592793990596928,0.977 +54.0,0.003981127656437652,0.99875,0.14202391884937093,0.964 +55.0,0.009452855843030253,0.99725,0.11272917235094357,0.969 +56.0,0.02030383425854622,0.994,0.22102710131762598,0.961 +57.0,0.0201962723872667,0.992625,0.0746092794797957,0.98 +58.0,0.012778119867117035,0.99625,0.08488781163569933,0.98 +59.0,0.005098323074058392,0.998625,0.0705106763422874,0.983 +60.0,0.002205910690743962,0.9995,0.06720935609152047,0.985 diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_adam_mnist.csv.mtd b/scripts/staging/shampoo_optimizer/metrics/metrics_adam_mnist.csv.mtd new file mode 100644 index 00000000000..04598b34d75 --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_adam_mnist.csv.mtd @@ -0,0 +1,12 @@ +{ + "data_type": "matrix", + "value_type": "double", + "rows": 60, + "cols": 5, + "nnz": 300, + "format": "csv", + "author": "nicol", + "header": false, + "sep": ",", + "created": "2026-01-18 12:40:31 MEZ" +} \ No newline at end of file diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_cifar.csv b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_cifar.csv new file mode 100644 index 00000000000..06ae4237297 --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_cifar.csv @@ -0,0 +1,90 @@ +1.0,2.308857970725247,0.09733879009473159,2.303781414916082,0.1044 +2.0,2.304008930124782,0.09928120325743726,2.3033522297439544,0.1044 +3.0,2.3037258759929133,0.09828143177663287,2.303188402061223,0.1044 +4.0,2.3035719744954597,0.09843931776632873,2.3031009169012147,0.1044 +5.0,2.3034703247407293,0.09878209655974737,2.303046278570225,0.1044 +6.0,2.303396340692825,0.09849644756523183,2.303008793363021,0.1044 +7.0,2.303339197122346,0.09803940917400697,2.3029813853473597,0.1044 +8.0,2.303293240514144,0.09798227937510387,2.3029603952712767,0.1044 +9.0,2.3032551783927606,0.097525240983879,2.302943743402374,0.1044 +10.0,2.303222941068266,0.09741098138607279,2.3029301622777862,0.1044 +11.0,2.30319515139901,0.0971538972910088,2.3029188364644875,0.1044 +12.0,2.3031708517290452,0.09735385158716968,2.3029092180000568,0.1044 +13.0,2.303149352177799,0.09749667608442746,2.30290092519086,0.1044 +14.0,2.303130141169306,0.09738241648662123,2.3028936839986205,0.1044 +15.0,2.303112830057921,0.09746811118497589,2.3028872925240407,0.1044 +16.0,2.303097117460817,0.0978394548778461,2.3028815986504196,0.1044 +17.0,2.303082765498494,0.09755380588333055,2.3028764855041284,0.1044 +18.0,2.303069583506499,0.09738241648662123,2.302871861726882,0.1044 +19.0,2.303057416587966,0.09738241648662123,2.3028676548032334,0.1044 +20.0,2.3030461373916387,0.09772519528003988,2.3028638063815388,0.1044 +21.0,2.303035640092217,0.09789658467674921,2.302860268927418,0.1044 +22.0,2.303025835907424,0.09781088997839454,2.3028570032873428,0.1044 +23.0,2.3030166497080473,0.09775376017949143,2.302853976886092,0.1044 +24.0,2.303008017419134,0.09758237078278212,2.302851162373504,0.1044 +25.0,2.3029998840026313,0.09763950058168522,2.3028485365948654,0.1044 +26.0,2.3029922018735722,0.09755380588333055,2.3028460797978676,0.0958 +27.0,2.3029849296437037,0.09726815688881502,2.3028437750148405,0.0958 +28.0,2.302978031115318,0.09718246219046035,2.3028416075764797,0.0958 +29.0,2.3029714744683667,0.09718246219046035,2.302839564725359,0.0958 +30.0,2.302965231598419,0.09675398869868705,2.3028376353059916,0.0958 +31.0,2.3029592775733714,0.09721102708991192,2.3028358095141903,0.0958 +32.0,2.3029535901845293,0.09703963769320259,2.3028340786928005,0.0958 +33.0,2.3029481495732043,0.09718246219046035,2.3028324351640124,0.0958 +34.0,2.302942937918265,0.09683968339704171,2.3028308720907593,0.0958 +35.0,2.3029379391731855,0.09683968339704171,2.3028293833614257,0.0958 +36.0,2.302933138843571,0.09695394299484793,2.3028279634933666,0.0958 +37.0,2.30292852379795,0.09689681319594481,2.302826607551716,0.0958 +38.0,2.3029240821061667,0.09692537809539638,2.3028253110806913,0.0958 +39.0,2.3029198029006626,0.09689681319594481,2.302824070045189,0.0958 +40.0,2.302915676256911,0.09695394299484793,2.3028228807808824,0.0958 +41.0,2.302911693089989,0.09701107279375104,2.3028217399514026,0.0958 +42.0,2.3029078450647407,0.09695394299484793,2.302820644511436,0.0958 +43.0,2.302904124517459,0.09695394299484793,2.302819591674796,0.0958 +44.0,2.302900524387369,0.09695394299484793,2.3028185788866904,0.0958 +45.0,2.30289703815653,0.09686824829649326,2.3028176037995474,0.0958 +46.0,2.302893659796928,0.09669685889978394,2.3028166642518713,0.0958 +47.0,2.3028903837237644,0.09695394299484793,2.302815758249686,0.0958 +48.0,2.3028872047540805,0.09695394299484793,2.302814883950203,0.0958 +49.0,2.302884118070067,0.09701107279375104,2.3028140396473953,0.0958 +50.0,2.302881119186381,0.09709676749210569,2.3028132237592303,0.0958 +51.0,2.302878203920975,0.09741098138607279,2.302812434816333,0.0958 +52.0,2.302875368369051,0.09726815688881502,2.302811671451892,0.0958 +53.0,2.3028726088796594,0.09712533239155725,2.3028109323926556,0.0958 +54.0,2.302869922034726,0.09695394299484793,2.302810216450874,0.0958 +55.0,2.3028673046301313,0.09701107279375104,2.3028095225170846,0.0958 +56.0,2.302864753658669,0.09692537809539638,2.3028088495536134,0.0958 +57.0,2.302862266294632,0.0971538972910088,2.3028081965887446,0.0958 +58.0,2.3028598398798685,0.0971538972910088,2.302807562711441,0.0958 +59.0,2.30285747191109,0.09726815688881502,2.3028069470665855,0.0958 +60.0,2.3028551600284204,0.09726815688881502,2.302806348850665,0.0958 +61.0,2.3028529020048816,0.09726815688881502,2.302805767307857,0.0958 +62.0,2.3028506957368564,0.09741098138607279,2.3028052017264735,0.0958 +63.0,2.3028485392353937,0.09732528668771813,2.3028046514357197,0.0958 +64.0,2.3028464306182124,0.09743954628552434,2.3028041158027452,0.0958 +65.0,2.3028443681023987,0.09769663038058833,2.3028035942299465,0.0958 +66.0,2.302842349997736,0.09772519528003988,2.3028030861525015,0.0958 +67.0,2.3028403747005197,0.09766806548113678,2.30280259103611,0.0958 +68.0,2.3028384406879288,0.09761093568223367,2.3028021083749253,0.0958 +69.0,2.3028365465127987,0.09755380588333055,2.3028016376896474,0.0958 +70.0,2.302834690798834,0.09758237078278212,2.3028011785257787,0.0958 +71.0,2.302832872236157,0.09749667608442746,2.302800730452008,0.0958 +72.0,2.3028310895772277,0.09772519528003988,2.302800293058731,0.0958 +73.0,2.3028293416330525,0.09769663038058833,2.3027998659566746,0.0958 +74.0,2.3028276272696626,0.097782325078943,2.302799448775633,0.0958 +75.0,2.302825945404852,0.09786801977729766,2.3027990411632944,0.0958 +76.0,2.302824295005185,0.09801084427455542,2.302798642784158,0.0958 +77.0,2.3028226750831555,0.09841075286687717,2.3027982533185245,0.0958 +78.0,2.3028210846945876,0.09838218796742562,2.3027978724615648,0.0958 +79.0,2.3028195229362116,0.09826792836961941,2.3027974999224523,0.0958 +80.0,2.302817988943382,0.09806797407345853,2.302797135423554,0.0958 +81.0,2.3028164818879793,0.09818223367126475,2.302796778699684,0.0958 +82.0,2.3028150009764223,0.09830064816353665,2.302796429497398,0.0958 +83.0,2.302813545447827,0.09830064816353665,2.3027960875743467,0.0958 +84.0,2.302812114572304,0.09818638856573043,2.3027957526986635,0.0958 +85.0,2.302810707649296,0.09815782366627888,2.3027954246483984,0.0958 +86.0,2.3028093240061196,0.09818638856573043,2.302795103210983,0.0958 +87.0,2.302807962996513,0.09804356406847267,2.3027947881827373,0.0958 +88.0,2.3028066239993126,0.09804356406847267,2.3027944793684005,0.0958 +89.0,2.3028053064172282,0.09784360977231178,2.3027941765806963,0.0958 +90.0,2.3028040096756257,0.09795786937011801,2.3027938796399248,0.0958 diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_cifar.csv.mtd b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_cifar.csv.mtd new file mode 100644 index 00000000000..85c0deb5c72 --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_cifar.csv.mtd @@ -0,0 +1,12 @@ +{ + "data_type": "matrix", + "value_type": "double", + "rows": 90, + "cols": 5, + "nnz": 450, + "format": "csv", + "author": "nicol", + "header": false, + "sep": ",", + "created": "2026-01-17 14:31:06 MEZ" +} \ No newline at end of file diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_diagonal_mnist.csv b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_diagonal_mnist.csv new file mode 100644 index 00000000000..ded8beac83a --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_diagonal_mnist.csv @@ -0,0 +1,60 @@ +1.0,1.529923114957812,0.615625,0.4163508024364405,0.873 +2.0,0.25060122984207545,0.927,0.22952393475133331,0.916 +3.0,0.1548000660061219,0.95425,0.16200895599016565,0.94 +4.0,0.11335569472656531,0.964625,0.13560831935242834,0.955 +5.0,0.09042976548413022,0.971625,0.12328820920087886,0.963 +6.0,0.07636142457008835,0.976625,0.11456082728594813,0.968 +7.0,0.06691350664721099,0.979625,0.109108661140788,0.969 +8.0,0.05926908260776385,0.9825,0.10503831758586746,0.971 +9.0,0.053242110795777134,0.98375,0.1017104393824572,0.971 +10.0,0.048538005602347,0.985125,0.09997651189079403,0.971 +11.0,0.04440544159683867,0.98575,0.09838001759437191,0.971 +12.0,0.040599887346924705,0.98725,0.09778095571436933,0.972 +13.0,0.03734205810908477,0.98825,0.09713194222471225,0.974 +14.0,0.03431520433747054,0.98875,0.09692151531360448,0.974 +15.0,0.0316160967130946,0.98975,0.09732424066243325,0.976 +16.0,0.029327893562393736,0.990125,0.09678624020161315,0.974 +17.0,0.02731709621117975,0.990875,0.09601975312839364,0.975 +18.0,0.025400220810051326,0.99125,0.09493154669329465,0.975 +19.0,0.023849761916733277,0.992375,0.09431458155105914,0.975 +20.0,0.022359802493171675,0.992875,0.09444793298522774,0.975 +21.0,0.02104063762906861,0.993375,0.0940675280289127,0.976 +22.0,0.019815579920937687,0.99375,0.09397031760585552,0.976 +23.0,0.018703144105318287,0.994375,0.09368188701034882,0.976 +24.0,0.01755765508691582,0.995,0.09316670018928445,0.976 +25.0,0.016504243053257428,0.995375,0.09312778676414861,0.976 +26.0,0.015547213662833325,0.996,0.09313668530584004,0.976 +27.0,0.01464135909303369,0.99625,0.09262190180664727,0.975 +28.0,0.013849651145544097,0.996375,0.09243578056237582,0.975 +29.0,0.013045399463127353,0.99675,0.09173679387095408,0.974 +30.0,0.012308034427099244,0.996875,0.09119741075842759,0.974 +31.0,0.01160452532703958,0.997375,0.09061579657563777,0.973 +32.0,0.010950352937654438,0.997625,0.08979792576235757,0.973 +33.0,0.010404225456806958,0.997625,0.08958915530484804,0.973 +34.0,0.00993531874403182,0.99775,0.08940260293029532,0.973 +35.0,0.009447877428045336,0.99775,0.08878696599496204,0.973 +36.0,0.008965917585343961,0.997875,0.08823171309031336,0.974 +37.0,0.008521583132989957,0.99825,0.08802536709390377,0.973 +38.0,0.008150154841642552,0.998375,0.08802838909372968,0.973 +39.0,0.007784229895554334,0.9985,0.08738821101087546,0.973 +40.0,0.00743699102601898,0.9985,0.08739398647224264,0.973 +41.0,0.007119215903327162,0.998625,0.0871615661765243,0.973 +42.0,0.006813014561591904,0.998625,0.08662144756724738,0.974 +43.0,0.006518437802804398,0.99875,0.08661521760369693,0.975 +44.0,0.006218632411764271,0.999,0.08645243665206297,0.973 +45.0,0.005935930756546657,0.99925,0.08618233389590946,0.973 +46.0,0.005689173929135219,0.99925,0.08599448297307472,0.973 +47.0,0.0054517521228319875,0.99925,0.08606674894466114,0.973 +48.0,0.005212159802743296,0.99925,0.08593790211989807,0.973 +49.0,0.0049843212661704744,0.99925,0.08585662017134629,0.973 +50.0,0.00477715303486137,0.999375,0.08578410203580478,0.973 +51.0,0.0045963000060570336,0.999375,0.08605111424505173,0.973 +52.0,0.004398328314693466,0.999375,0.0858784041018631,0.974 +53.0,0.004202551847212241,0.9995,0.08593016807062147,0.974 +54.0,0.004025727697865245,0.9995,0.08563104128541399,0.974 +55.0,0.003846586555499973,0.999625,0.08534700218134265,0.975 +56.0,0.0036902008756122787,0.99975,0.08535591916811447,0.975 +57.0,0.0035283367720043307,0.999875,0.08520610388234004,0.975 +58.0,0.0033746694849511975,1.0,0.0851791314692245,0.976 +59.0,0.0032474977805542946,1.0,0.08501889027122635,0.976 +60.0,0.00312400676742454,1.0,0.08481266555259098,0.977 diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_diagonal_mnist.csv.mtd b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_diagonal_mnist.csv.mtd new file mode 100644 index 00000000000..e25dd89e1b6 --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_diagonal_mnist.csv.mtd @@ -0,0 +1,12 @@ +{ + "data_type": "matrix", + "value_type": "double", + "rows": 60, + "cols": 5, + "nnz": 300, + "format": "csv", + "author": "nicol", + "header": false, + "sep": ",", + "created": "2026-01-17 12:04:45 MEZ" +} \ No newline at end of file diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_cifar.csv b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_cifar.csv new file mode 100644 index 00000000000..d0762291631 --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_cifar.csv @@ -0,0 +1,90 @@ +1.0,2.303354277121383,0.09927237410669769,2.3026645264806,0.088 +2.0,2.3024429019304438,0.09835829732424797,2.302395347533886,0.0912 +3.0,2.3022537470495514,0.09710144174837959,2.3022451434900346,0.092 +4.0,2.3020886355946986,0.10289492271896293,2.302096100276622,0.1392 +5.0,2.3019041482290583,0.12017668688715306,2.3019092225507234,0.158 +6.0,2.301688531255859,0.13920090992188797,2.301681104703284,0.145 +7.0,2.3014280018530657,0.14300004154894466,2.301399021947866,0.1332 +8.0,2.301109485417331,0.14231448396210736,2.301052050694326,0.1334 +9.0,2.3007171148037786,0.14319999584510554,2.300619124932079,0.1382 +10.0,2.3002324038562887,0.1473133413661293,2.3000753268846386,0.1498 +11.0,2.2996250406346364,0.15494951803224197,2.299390727424242,0.1668 +12.0,2.298875319802741,0.16842747631710153,2.298581855353828,0.1826 +13.0,2.2979581473242936,0.18157667857736415,2.2975821039931352,0.1988 +14.0,2.296838431406206,0.19601130131294664,2.2963575161816574,0.2086 +15.0,2.295479784689714,0.2057093443576533,2.2948736290852683,0.2054 +16.0,2.2938451276224763,0.21202218713644672,2.2931041844251396,0.2144 +17.0,2.2919033381267577,0.21262205002492934,2.291004362607398,0.215 +18.0,2.2896238508484257,0.21265061492438092,2.288558037422181,0.2122 +19.0,2.286979065939002,0.21156514874522186,2.285732886786278,0.2112 +20.0,2.2839381179310227,0.21030829316935348,2.2824818251091212,0.2096 +21.0,2.2804795042279915,0.20924204337709823,2.2788294933816533,0.2084 +22.0,2.27661619286966,0.2084422261924547,2.27476184874041,0.2082 +23.0,2.27233921479588,0.20778990776134285,2.270293209378558,0.2086 +24.0,2.267662648526721,0.20636166278876517,2.2654318172890657,0.2086 +25.0,2.2626192756606813,0.207332869370118,2.260219881002514,0.2076 +26.0,2.2572590182860295,0.20756138856573042,2.2547077733439878,0.2098 +27.0,2.2516373187233776,0.2081326865547615,2.2489572231627677,0.2108 +28.0,2.245814456526489,0.20861828984543793,2.2430298475130375,0.212 +29.0,2.239854898346882,0.20981801562240318,2.23699057143831,0.215 +30.0,2.2338232484515137,0.21070352750540136,2.2308996938500947,0.216 +31.0,2.227779156047962,0.21227459697523682,2.2248143943377916,0.2164 +32.0,2.221777624187474,0.21447409423300648,2.2187839689159294,0.2168 +33.0,2.215869242739315,0.21642118165198604,2.212859492208603,0.219 +34.0,2.2100970410566547,0.21776373192620907,2.207076999483368,0.221 +35.0,2.2044948909552398,0.2199632291839787,2.201472196746826,0.222 +36.0,2.199092194103585,0.2212818888150241,2.1960714225106948,0.2244 +37.0,2.1939100113217833,0.22248161459198937,2.190890716689774,0.2252 +38.0,2.18896049922426,0.22430976815688883,2.185943191743232,0.2272 +39.0,2.184248558275656,0.22576657802891809,2.181228442466861,0.2278 +40.0,2.1797760513002586,0.22645213561575536,2.1767469219366973,0.2296 +41.0,2.1755397102667793,0.22762329649326907,2.1724977070890747,0.23 +42.0,2.1715336259991895,0.22870876267242812,2.1684688117747153,0.2324 +43.0,2.167749590700271,0.22910867126474987,2.164655458022773,0.233 +44.0,2.16417730105029,0.22999418314774805,2.1610486141209355,0.2332 +45.0,2.1608063211125814,0.23087969503074623,2.157636642520351,0.2326 +46.0,2.1576246790351115,0.2315652526175835,2.154407637714963,0.2342 +47.0,2.154617104468484,0.23242219960113014,2.1513416793938696,0.235 +48.0,2.1517599169780426,0.23252711068638857,2.1484218399487305,0.2358 +49.0,2.149066816222913,0.23321266827322584,2.145675349150036,0.2364 +50.0,2.1465245866633307,0.23366970666445072,2.1430793565890967,0.2378 +51.0,2.14412072766194,0.2344409589496427,2.140617606284286,0.239 +52.0,2.1418439754982397,0.2351836463353831,2.138280235881615,0.2398 +53.0,2.139685512113253,0.2356121198271564,2.13605717653691,0.2402 +54.0,2.137635492810324,0.23606915821838126,2.133940270945409,0.2408 +55.0,2.1356854486646553,0.23681184560412166,2.13192250053105,0.2404 +56.0,2.1338272048501303,0.23744027339205587,2.1299960314510455,0.242 +57.0,2.1320540810077424,0.23829722037560247,2.1281568242149995,0.2426 +58.0,2.1303613659253196,0.23858286937011802,2.126397779975742,0.2434 +59.0,2.1287450851810075,0.23869712896792422,2.124716807678159,0.2438 +60.0,2.1271998321934493,0.23912560245969755,2.1231064529381376,0.2442 +61.0,2.1257209692206427,0.23955407595147085,2.1215637509056395,0.2446 +62.0,2.1243035036437874,0.24003967924214728,2.1200814646668116,0.2452 +63.0,2.122943037851745,0.24066810703008146,2.118657105482082,0.2452 +64.0,2.121635884560944,0.24098232092404856,2.1172868604212627,0.2456 +65.0,2.120378615583713,0.2413822295163703,2.1159680410580592,0.2456 +66.0,2.1191679446234666,0.24166787851088584,2.1146974962627203,0.2464 +67.0,2.1180009668346935,0.24209635200265914,2.113471173048431,0.2468 +68.0,2.116874691196301,0.24218672095728766,2.1122863875401126,0.2474 +69.0,2.1157867655260487,0.24244380505235166,2.111141636758149,0.248 +70.0,2.1147352368261987,0.24270088914741564,2.1100332378539677,0.2482 +71.0,2.113718002806775,0.24307690709655977,2.1089602225780317,0.249 +72.0,2.112733184646633,0.2436767699850424,2.107919882704548,0.2494 +73.0,2.1117789394143482,0.24381959448230017,2.106913026103143,0.2488 +74.0,2.110853316989593,0.24421950307462192,2.1059350834636765,0.2498 +75.0,2.109955079089359,0.2445290427123151,2.104985952301633,0.2502 +76.0,2.109082620696774,0.24475756190792752,2.1040617864167523,0.251 +77.0,2.1082346435077755,0.24478612680737907,2.1031627040313423,0.2512 +78.0,2.1074095248882703,0.2450146460029915,2.1022870982946626,0.252 +79.0,2.1066062067836695,0.24510034070134618,2.1014329767324553,0.2518 +80.0,2.1058225393256165,0.24535742479641015,2.100595247228801,0.252 +81.0,2.1050574606919588,0.24572876848928035,2.099777698683264,0.2526 +82.0,2.1043111830414234,0.24621437177995678,2.0989845437353187,0.2532 +83.0,2.1035842211982745,0.24630006647831143,2.0982143244746703,0.254 +84.0,2.102876376387279,0.24672853997008476,2.0974656861364447,0.253 +85.0,2.10218567299019,0.24707131876350338,2.0967362386813306,0.2536 +86.0,2.1015113351260197,0.24721414326076116,2.096022887408198,0.2534 +87.0,2.100852094441317,0.24738553265747049,2.095325468476296,0.2536 +88.0,2.1002072680052266,0.24758548695363136,2.094641673375884,0.2538 +89.0,2.0995766034511516,0.2474997922552767,2.093973751766461,0.2544 +90.0,2.098959302207614,0.24758548695363136,2.0933196838290935,0.255 diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_cifar.csv.mtd b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_cifar.csv.mtd new file mode 100644 index 00000000000..7bf18c4774f --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_cifar.csv.mtd @@ -0,0 +1,12 @@ +{ + "data_type": "matrix", + "value_type": "double", + "rows": 90, + "cols": 5, + "nnz": 450, + "format": "csv", + "author": "nicol", + "header": false, + "sep": ",", + "created": "2026-01-17 16:41:31 MEZ" +} \ No newline at end of file diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_diagonal_mnist.csv b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_diagonal_mnist.csv new file mode 100644 index 00000000000..61c471f700d --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_diagonal_mnist.csv @@ -0,0 +1,60 @@ +1.0,1.866423388179165,0.5742242063492063,1.6439261632170747,0.64 +2.0,1.4928910617188813,0.7032321428571429,1.3853969008124252,0.683 +3.0,1.272710742254421,0.7394861111111111,1.2131369327400596,0.711 +4.0,1.1254325674232708,0.7601150793650794,1.095144211752259,0.727 +5.0,1.022459770574194,0.7726170634920635,1.0101238395751708,0.74 +6.0,0.946530560718444,0.7826170634920635,0.9456451197422426,0.751 +7.0,0.8880268839828795,0.7903670634920635,0.8949212750028692,0.765 +8.0,0.8415278206758608,0.7978670634920635,0.854089316705041,0.768 +9.0,0.8035827965978666,0.8036190476190476,0.8202159641777057,0.774 +10.0,0.7718674883077749,0.8092440476190476,0.7916438039580443,0.781 +11.0,0.7448529883790529,0.8146190476190476,0.7670526565955637,0.792 +12.0,0.721458277224362,0.8191190476190476,0.745724482535323,0.798 +13.0,0.7009611157389515,0.8232440476190476,0.7268900229389159,0.801 +14.0,0.6827906144486946,0.8262440476190476,0.7101796206958488,0.803 +15.0,0.6665491364904129,0.8307460317460317,0.6951588756175966,0.804 +16.0,0.6518900515924803,0.8338710317460317,0.6816308662811724,0.807 +17.0,0.6385824868896167,0.8378710317460317,0.6692585415240807,0.809 +18.0,0.626401446142148,0.8412460317460317,0.657986591683656,0.81 +19.0,0.6152151008879067,0.8434960317460317,0.6475896323415835,0.81 +20.0,0.6048551240028875,0.8463710317460317,0.6380074275770728,0.816 +21.0,0.595226790651475,0.8477460317460317,0.6291030696035893,0.819 +22.0,0.5862262284167258,0.8484960317460317,0.6207762320382855,0.821 +23.0,0.5777942203740585,0.8506210317460317,0.6129073353307255,0.824 +24.0,0.569829336052029,0.8521210317460317,0.6055186920462967,0.827 +25.0,0.562322087467323,0.8549960317460317,0.5984862630477912,0.829 +26.0,0.5551972532193233,0.8561210317460317,0.5918507350880778,0.831 +27.0,0.5483922690557319,0.8568710317460317,0.5854528969297884,0.83 +28.0,0.5417717020101807,0.8577460317460317,0.5791749719268132,0.834 +29.0,0.5353834920310123,0.8588710317460317,0.573021548269845,0.838 +30.0,0.5291950049277978,0.8594980158730159,0.5671532053783704,0.84 +31.0,0.5232429187125389,0.8608730158730159,0.561629254937016,0.841 +32.0,0.5176039971163227,0.8618730158730159,0.5564097976587178,0.843 +33.0,0.5122833004991232,0.8626230158730159,0.5514111670120752,0.844 +34.0,0.5072275346885798,0.8628730158730159,0.5466137422719917,0.846 +35.0,0.5023701417154098,0.864998015873016,0.5420622512278893,0.846 +36.0,0.49771279900284227,0.8653730158730158,0.5377322462760586,0.848 +37.0,0.49325002706494225,0.8657480158730159,0.5335120751616993,0.847 +38.0,0.4889316669931411,0.8663730158730159,0.5294598410779857,0.847 +39.0,0.4847795598700526,0.8673730158730159,0.5256187227433868,0.847 +40.0,0.4808159819962951,0.8683730158730159,0.5218443641381453,0.848 +41.0,0.4769954353746303,0.8692480158730159,0.5181261701052222,0.847 +42.0,0.4732806998370051,0.8706230158730159,0.514536341695689,0.847 +43.0,0.4696791877007013,0.8717480158730159,0.5111297293916626,0.847 +44.0,0.466207952568916,0.8723730158730159,0.5078820073977189,0.847 +45.0,0.4628588261989359,0.8731230158730159,0.504615170638606,0.848 +46.0,0.45960234279588047,0.8742480158730159,0.5016026210614756,0.848 +47.0,0.4564797005612003,0.8744980158730159,0.49853096066258135,0.848 +48.0,0.45340866555174103,0.8748730158730159,0.4956449103544591,0.848 +49.0,0.4504369793890093,0.8758730158730159,0.49280215847599296,0.848 +50.0,0.4475359327883268,0.8763730158730159,0.490056720180492,0.849 +51.0,0.4447109415788691,0.8772480158730159,0.48734386450053985,0.85 +52.0,0.44194446747002286,0.8773730158730159,0.48479191333510535,0.853 +53.0,0.439268379013645,0.8781230158730159,0.4822259811676885,0.855 +54.0,0.4366447818848977,0.8789980158730158,0.47979430296590536,0.855 +55.0,0.4341039553458775,0.879875,0.47737496308325084,0.856 +56.0,0.4316169860382033,0.8805,0.47503514269851493,0.857 +57.0,0.4291959500256212,0.881375,0.4727302757008901,0.857 +58.0,0.42682299095231074,0.88225,0.4704963291546497,0.858 +59.0,0.4245037978579831,0.882875,0.46832729836663517,0.858 +60.0,0.4222409407797942,0.883,0.46619377517871974,0.859 diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_diagonal_mnist.csv.mtd b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_diagonal_mnist.csv.mtd new file mode 100644 index 00000000000..bc5790ab0e1 --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_diagonal_mnist.csv.mtd @@ -0,0 +1,12 @@ +{ + "data_type": "matrix", + "value_type": "double", + "rows": 60, + "cols": 5, + "nnz": 300, + "format": "csv", + "author": "nicol", + "header": false, + "sep": ",", + "created": "2026-01-17 12:53:09 MEZ" +} \ No newline at end of file diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_mnist.csv b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_mnist.csv new file mode 100644 index 00000000000..74604775d39 --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_mnist.csv @@ -0,0 +1,60 @@ +1.0,1.866423388179165,0.5742242063492063,1.6439261632170745,0.64 +2.0,1.492891061718881,0.7032321428571429,1.385396900812425,0.683 +3.0,1.2727107422544208,0.7394861111111111,1.2131369327400594,0.711 +4.0,1.1254325674232704,0.7601150793650794,1.0951442117522587,0.727 +5.0,1.0224597705741936,0.7726170634920635,1.0101238395751704,0.74 +6.0,0.9465305607184437,0.7826170634920635,0.9456451197422422,0.751 +7.0,0.8880268839828789,0.7903670634920635,0.8949212750028689,0.765 +8.0,0.84152782067586,0.7978670634920635,0.8540893167050403,0.768 +9.0,0.803582796597866,0.8036190476190476,0.8202159641777054,0.774 +10.0,0.7718674883077745,0.8092440476190476,0.7916438039580438,0.781 +11.0,0.7448529883790521,0.8146190476190476,0.7670526565955632,0.792 +12.0,0.7214582772243614,0.8191190476190476,0.7457244825353226,0.798 +13.0,0.7009611157389508,0.8232440476190476,0.7268900229389155,0.801 +14.0,0.6827906144486944,0.8262440476190476,0.7101796206958487,0.803 +15.0,0.6665491364904125,0.8307460317460317,0.6951588756175962,0.804 +16.0,0.6518900515924798,0.8338710317460317,0.6816308662811721,0.807 +17.0,0.6385824868896166,0.8378710317460317,0.6692585415240803,0.809 +18.0,0.6264014461421475,0.8412460317460317,0.6579865916836556,0.81 +19.0,0.6152151008879062,0.8434960317460317,0.6475896323415832,0.81 +20.0,0.604855124002887,0.8463710317460317,0.6380074275770723,0.816 +21.0,0.5952267906514745,0.8477460317460317,0.629103069603589,0.819 +22.0,0.5862262284167256,0.8484960317460317,0.6207762320382852,0.821 +23.0,0.5777942203740583,0.8506210317460317,0.6129073353307254,0.824 +24.0,0.5698293360520286,0.8521210317460317,0.6055186920462965,0.827 +25.0,0.5623220874673228,0.8549960317460317,0.5984862630477912,0.829 +26.0,0.5551972532193232,0.8561210317460317,0.5918507350880776,0.831 +27.0,0.5483922690557316,0.8568710317460317,0.5854528969297881,0.83 +28.0,0.5417717020101804,0.8577460317460317,0.5791749719268128,0.834 +29.0,0.5353834920310118,0.8588710317460317,0.5730215482698446,0.838 +30.0,0.5291950049277971,0.8594980158730159,0.5671532053783702,0.84 +31.0,0.5232429187125387,0.8608730158730159,0.5616292549370158,0.841 +32.0,0.5176039971163221,0.8618730158730159,0.5564097976587177,0.843 +33.0,0.5122833004991226,0.8626230158730159,0.5514111670120748,0.844 +34.0,0.5072275346885796,0.8628730158730159,0.5466137422719916,0.846 +35.0,0.5023701417154097,0.864998015873016,0.5420622512278892,0.846 +36.0,0.497712799002842,0.8653730158730158,0.5377322462760586,0.848 +37.0,0.4932500270649419,0.8657480158730159,0.5335120751616992,0.847 +38.0,0.4889316669931409,0.8663730158730159,0.5294598410779855,0.847 +39.0,0.4847795598700526,0.8673730158730159,0.525618722743387,0.847 +40.0,0.48081598199629483,0.8683730158730159,0.5218443641381452,0.848 +41.0,0.4769954353746303,0.8692480158730159,0.5181261701052222,0.847 +42.0,0.473280699837005,0.8706230158730159,0.514536341695689,0.847 +43.0,0.4696791877007012,0.8717480158730159,0.5111297293916625,0.847 +44.0,0.46620795256891595,0.8723730158730159,0.5078820073977189,0.847 +45.0,0.46285882619893565,0.8731230158730159,0.504615170638606,0.848 +46.0,0.4596023427958802,0.8742480158730159,0.5016026210614756,0.848 +47.0,0.45647970056120024,0.8744980158730159,0.4985309606625814,0.848 +48.0,0.45340866555174086,0.8748730158730159,0.4956449103544591,0.848 +49.0,0.4504369793890092,0.8758730158730159,0.49280215847599296,0.848 +50.0,0.44753593278832665,0.8763730158730159,0.49005672018049207,0.849 +51.0,0.4447109415788691,0.8772480158730159,0.48734386450053974,0.85 +52.0,0.44194446747002264,0.8773730158730159,0.48479191333510524,0.853 +53.0,0.43926837901364496,0.8781230158730159,0.48222598116768844,0.855 +54.0,0.43664478188489775,0.8789980158730158,0.4797943029659054,0.855 +55.0,0.43410395534587737,0.879875,0.4773749630832509,0.856 +56.0,0.43161698603820314,0.8805,0.4750351426985147,0.857 +57.0,0.4291959500256211,0.881375,0.4727302757008901,0.857 +58.0,0.42682299095231074,0.88225,0.4704963291546497,0.858 +59.0,0.42450379785798303,0.882875,0.4683272983666352,0.858 +60.0,0.4222409407797941,0.883,0.4661937751787198,0.859 diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_mnist.csv.mtd b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_mnist.csv.mtd new file mode 100644 index 00000000000..39ca9b83ca4 --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_heuristic_mnist.csv.mtd @@ -0,0 +1,12 @@ +{ + "data_type": "matrix", + "value_type": "double", + "rows": 60, + "cols": 5, + "nnz": 300, + "format": "csv", + "author": "nicol", + "header": false, + "sep": ",", + "created": "2026-01-18 12:30:06 MEZ" +} \ No newline at end of file diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_mnist.csv b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_mnist.csv new file mode 100644 index 00000000000..bf5fe82079c --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_mnist.csv @@ -0,0 +1,60 @@ +1.0,1.5299231149614017,0.615625,0.41635080243384964,0.873 +2.0,0.2506012298428366,0.927,0.22952393475157162,0.916 +3.0,0.15480006600605273,0.95425,0.1620089559895203,0.94 +4.0,0.11335569472662702,0.964625,0.13560831935160117,0.955 +5.0,0.09042976548384442,0.971625,0.12328820920011867,0.963 +6.0,0.07636142456988171,0.976625,0.11456082728567903,0.968 +7.0,0.06691350664702883,0.979625,0.10910866114092198,0.969 +8.0,0.059269082607526495,0.9825,0.10503831758585568,0.971 +9.0,0.053242110795707065,0.98375,0.10171043938325056,0.971 +10.0,0.04853800560203754,0.985125,0.09997651189261585,0.971 +11.0,0.044405441596568826,0.98575,0.09838001759817647,0.971 +12.0,0.040599887346456684,0.98725,0.097780955718731,0.972 +13.0,0.03734205810884943,0.98825,0.09713194222958135,0.974 +14.0,0.03431520433712973,0.98875,0.09692151531860237,0.974 +15.0,0.03161609671275515,0.98975,0.0973242406668757,0.976 +16.0,0.02932789356176544,0.990125,0.09678624020786661,0.974 +17.0,0.027317096210515864,0.990875,0.09601975313441609,0.975 +18.0,0.025400220809541275,0.99125,0.09493154670227145,0.975 +19.0,0.023849761916380875,0.992375,0.09431458156343589,0.975 +20.0,0.02235980249269527,0.992875,0.09444793299463022,0.975 +21.0,0.021040637629279574,0.993375,0.09406752804026423,0.976 +22.0,0.01981557992146146,0.99375,0.09397031761636573,0.976 +23.0,0.018703144106497697,0.994375,0.0936818870241683,0.976 +24.0,0.01755765508786901,0.995,0.09316670020081608,0.976 +25.0,0.01650424305425841,0.995375,0.09312778677582582,0.976 +26.0,0.015547213664301573,0.996,0.09313668531755637,0.976 +27.0,0.014641359094149346,0.99625,0.09262190181973926,0.975 +28.0,0.013849651147343996,0.996375,0.0924357805757844,0.975 +29.0,0.013045399464671547,0.99675,0.0917367938821814,0.974 +30.0,0.01230803442885487,0.996875,0.09119741077156332,0.974 +31.0,0.011604525329266091,0.997375,0.09061579658698077,0.973 +32.0,0.010950352940047849,0.997625,0.08979792577002656,0.973 +33.0,0.010404225458991357,0.997625,0.08958915531131897,0.973 +34.0,0.009935318745837484,0.99775,0.0894026029257118,0.973 +35.0,0.009447877430231724,0.99775,0.08878696599874977,0.973 +36.0,0.008965917587196202,0.997875,0.08823171309225875,0.974 +37.0,0.0085215831353927,0.99825,0.08802536709661077,0.973 +38.0,0.008150154843487975,0.998375,0.08802838908367926,0.973 +39.0,0.007784229897606622,0.9985,0.08738821100377808,0.973 +40.0,0.007436991028673095,0.9985,0.08739398645698664,0.973 +41.0,0.0071192159066914345,0.998625,0.08716156616155904,0.973 +42.0,0.006813014565763649,0.998625,0.08662144756300286,0.974 +43.0,0.0065184378060441145,0.99875,0.08661521759366482,0.975 +44.0,0.006218632412817654,0.999,0.08645243663927514,0.973 +45.0,0.005935930757015673,0.99925,0.08618233388001328,0.973 +46.0,0.005689173931263665,0.99925,0.08599448294668054,0.973 +47.0,0.005451752123017894,0.99925,0.08606674891635356,0.973 +48.0,0.005212159804542434,0.99925,0.08593790210578428,0.973 +49.0,0.004984321267572547,0.99925,0.08585662011359306,0.973 +50.0,0.004777153036591143,0.999375,0.08578410196981058,0.973 +51.0,0.0045963000083057756,0.999375,0.0860511141682166,0.973 +52.0,0.004398328316017485,0.999375,0.08587840406791315,0.974 +53.0,0.00420255185037797,0.9995,0.08593016802382229,0.974 +54.0,0.004025727702078051,0.9995,0.0856310412373703,0.974 +55.0,0.003846586557412409,0.999625,0.08534700213479356,0.975 +56.0,0.003690200877363095,0.99975,0.0853559191273964,0.975 +57.0,0.003528336774508031,0.999875,0.08520610384158768,0.975 +58.0,0.0033746694859510206,1.0,0.08517913142356824,0.976 +59.0,0.0032474977825223453,1.0,0.08501889024451946,0.976 +60.0,0.0031240067693776782,1.0,0.0848126655488919,0.977 diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_mnist.csv.mtd b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_mnist.csv.mtd new file mode 100644 index 00000000000..bdad9be7c54 --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_mnist.csv.mtd @@ -0,0 +1,12 @@ +{ + "data_type": "matrix", + "value_type": "double", + "rows": 60, + "cols": 5, + "nnz": 300, + "format": "csv", + "author": "nicol", + "header": false, + "sep": ",", + "created": "2026-01-18 12:10:29 MEZ" +} \ No newline at end of file diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_cifar.csv b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_cifar.csv new file mode 100644 index 00000000000..a0083b7d42b --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_cifar.csv @@ -0,0 +1,90 @@ +1.0,2.3052508540430696,0.0977101337876018,2.303963296401079,0.1044 +2.0,2.303944458910519,0.0991955085590826,2.3035221142468574,0.1044 +3.0,2.3036721115786545,0.09878209655974737,2.303348806933503,0.0986 +4.0,2.3035195526911423,0.09792514957620076,2.3032560152519927,0.0986 +5.0,2.3034184316980806,0.097782325078943,2.303197726078531,0.0986 +6.0,2.3033450284556385,0.09803940917400697,2.303157331622366,0.0986 +7.0,2.303288583012619,0.0981536687718132,2.3031273807533745,0.0986 +8.0,2.303243403976425,0.09775376017949143,2.303104046162939,0.0986 +9.0,2.303206158991423,0.097782325078943,2.30308516976094,0.0986 +10.0,2.303174749851404,0.09729672178826658,2.303069447232022,0.0986 +11.0,2.3031477810136294,0.09675398869868705,2.3030560461012657,0.0986 +12.0,2.303124283577331,0.09681111849759015,2.3030444108621255,0.0986 +13.0,2.3031035611613118,0.09712533239155725,2.3030341567229007,0.0986 +14.0,2.3030850987567844,0.09763950058168522,2.303025008490508,0.0986 +15.0,2.3030685062423215,0.0975293958783447,2.303016763823304,0.0986 +16.0,2.3030534819916095,0.09704379258766828,2.30300927028908,0.0958 +17.0,2.3030397886431664,0.09621541050357321,2.3030024105684137,0.0958 +18.0,2.3030272365062925,0.09641536479973409,2.302996092635205,0.0958 +19.0,2.3030156719152237,0.09658675419644341,2.3029902430729923,0.0958 +20.0,2.3030049688788137,0.09715805218547449,2.3029848024211437,0.0958 +21.0,2.3029950229784286,0.09724374688382915,2.3029797218672385,0.0958 +22.0,2.3029857468327006,0.09701522768821673,2.3029749608519454,0.0958 +23.0,2.3029770666752962,0.09695809788931362,2.302970485304816,0.0958 +24.0,2.3029689197369243,0.09701522768821673,2.302966266324296,0.0958 +25.0,2.3029612522174836,0.09712948728602294,2.3029622791757705,0.0958 +26.0,2.302954017697477,0.0968438382915074,2.302958502520811,0.0958 +27.0,2.30294717588051,0.09681527339205585,2.3029549178169453,0.0958 +28.0,2.302940691588364,0.09698666278876517,2.3029515088448482,0.0958 +29.0,2.3029345339507907,0.09644392969918565,2.302948261331938,0.0958 +30.0,2.3029286757469483,0.09627254030247633,2.302945162649764,0.0958 +31.0,2.302923092866068,0.09601545620741234,2.3029422015684697,0.0958 +32.0,2.3029177638626317,0.09615828070467011,2.3029393680558727,0.0958 +33.0,2.302912669587062,0.09624397540302476,2.3029366531117326,0.0958 +34.0,2.3029077928772526,0.09675814359315274,2.3029340486300307,0.0958 +35.0,2.302903118299415,0.09681527339205585,2.302931547283736,0.0958 +36.0,2.302898631929166,0.09692953298986207,2.3029291424277525,0.0958 +37.0,2.302894321165697,0.09681527339205585,2.3029268280166884,0.0958 +38.0,2.3028901745732164,0.09755796077779624,2.3029245985347893,0.0958 +39.0,2.302886181745126,0.09741513628053848,2.3029224489359157,0.0958 +40.0,2.302882333187097,0.09738657138108693,2.302920374591868,0.0958 +41.0,2.3028786202160454,0.09744370117999003,2.3029183712476917,0.0958 +42.0,2.3028750348725207,0.0974722660794416,2.3029164349828504,0.0958 +43.0,2.302871569844405,0.0976436554761509,2.302914562177353,0.0958 +44.0,2.3028682184002816,0.09767222037560247,2.3029127494820916,0.0958 +45.0,2.3028649743310323,0.0975293958783447,2.3029109937927728,0.0958 +46.0,2.302861831898485,0.0975293958783447,2.3029092922269156,0.0958 +47.0,2.3028587857900877,0.09767222037560247,2.3029076421035097,0.0958 +48.0,2.302855831078908,0.09798643426956956,2.302906040924949,0.0958 +49.0,2.302852963188079,0.09804356406847267,2.302904486360953,0.0958 +50.0,2.3028501778592187,0.09801499916902111,2.302902976234211,0.0958 +51.0,2.3028474711242892,0.09847203756024597,2.3029015085075333,0.0958 +52.0,2.302844839280435,0.09855773225860064,2.302900081272322,0.0958 +53.0,2.3028422788674385,0.09844347266079442,2.3028986927381987,0.0958 +54.0,2.3028397866475188,0.09838634286189131,2.3028973412236535,0.0958 +55.0,2.3028373595871368,0.09858629715805219,2.3028960251475996,0.0958 +56.0,2.302834994840583,0.09844347266079442,2.302894743021725,0.0958 +57.0,2.3028326897351916,0.09847203756024597,2.30289349344355,0.0958 +58.0,2.302830441757941,0.09844347266079442,2.302892275090126,0.0958 +59.0,2.3028282485432734,0.09835777796243976,2.302891086712291,0.0958 +60.0,2.3028261078621295,0.09841490776134287,2.3028899271294314,0.0958 +61.0,2.3028240176118455,0.09844347266079442,2.3028887952246992,0.0958 +62.0,2.3028219758070656,0.09870055675585841,2.302887689940629,0.0958 +63.0,2.302819980571373,0.09867199185640685,2.3028866102751295,0.0958 +64.0,2.3028180301296777,0.09884338125311617,2.302885555277795,0.0958 +65.0,2.3028161228012136,0.09875768655476151,2.302884524046522,0.0958 +66.0,2.302814256993192,0.09878625145421306,2.3028835157243908,0.0958 +67.0,2.3028124311948646,0.09878625145421306,2.302882529496792,0.0958 +68.0,2.3028106439721734,0.09870055675585841,2.302881564588779,0.0958 +69.0,2.3028088939627684,0.09872912165530996,2.302880620262613,0.0958 +70.0,2.3028071798714174,0.09872912165530996,2.3028796958155,0.0958 +71.0,2.3028055004658077,0.09870055675585841,2.3028787905774917,0.0958 +72.0,2.302803854572614,0.09867199185640685,2.302877903909538,0.0958 +73.0,2.302802241073912,0.09884338125311617,2.302877035201682,0.0958 +74.0,2.302800658903833,0.09884338125311617,2.30287618387138,0.0958 +75.0,2.3027991070454563,0.0990147706498255,2.3028753493619405,0.0958 +76.0,2.302797584527948,0.0990147706498255,2.302874531141072,0.0958 +77.0,2.3027960904238864,0.0990147706498255,2.3028737286995273,0.0958 +78.0,2.3027946238467796,0.0990719004487286,2.302872941549839,0.0958 +79.0,2.302793183948759,0.0990719004487286,2.30287216922514,0.0958 +80.0,2.3027917699184237,0.09904333554927705,2.3028714112780633,0.0958 +81.0,2.3027903809788377,0.09904333554927705,2.3028706672797075,0.0958 +82.0,2.302789016385661,0.09892907595147084,2.3028699368186745,0.0958 +83.0,2.302787675425392,0.09895764085092239,2.302869219500162,0.0958 +84.0,2.302786357413738,0.09884338125311617,2.3028685149451196,0.0958 +85.0,2.3027850616940877,0.09875768655476151,2.3028678227894512,0.0958 +86.0,2.302783787636076,0.09847203756024597,2.302867142683268,0.0958 +87.0,2.3027825346342436,0.09838634286189131,2.3028664742901888,0.0958 +88.0,2.3027813021067707,0.09838634286189131,2.3028658172866763,0.0958 +89.0,2.3027800894943056,0.09838634286189131,2.302865171361421,0.0958 +90.0,2.3027788962588445,0.09847203756024597,2.302864536214754,0.0958 diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_cifar.csv.mtd b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_cifar.csv.mtd new file mode 100644 index 00000000000..3337bcddcd5 --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_cifar.csv.mtd @@ -0,0 +1,12 @@ +{ + "data_type": "matrix", + "value_type": "double", + "rows": 90, + "cols": 5, + "nnz": 450, + "format": "csv", + "author": "nicol", + "header": false, + "sep": ",", + "created": "2026-01-17 15:02:57 MEZ" +} \ No newline at end of file diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_diagonal_mnist.csv b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_diagonal_mnist.csv new file mode 100644 index 00000000000..df2e0b9dec5 --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_diagonal_mnist.csv @@ -0,0 +1,60 @@ +1.0,0.6395330965863911,0.8015,0.3272391477006809,0.902 +2.0,0.18475208110972252,0.945375,0.22124293746528123,0.937 +3.0,0.13424154347106543,0.95975,0.18851124248025902,0.941 +4.0,0.10030261909915536,0.970375,0.1667022281154376,0.951 +5.0,0.07937873593518888,0.976,0.15600958058409162,0.957 +6.0,0.06920171125717522,0.979125,0.16498212073116425,0.957 +7.0,0.060592194621787006,0.98125,0.17178759629686993,0.958 +8.0,0.05412933187594701,0.98275,0.1633657632581319,0.962 +9.0,0.04811218647175158,0.985375,0.1497524304739882,0.963 +10.0,0.042713873413571295,0.987625,0.13528177549249587,0.964 +11.0,0.03798921289828895,0.989,0.12257248782459493,0.966 +12.0,0.033368917980423234,0.990125,0.11437361989464159,0.968 +13.0,0.02928468918448111,0.991125,0.10654460042550556,0.973 +14.0,0.025635927828060606,0.99225,0.10001953244032351,0.971 +15.0,0.022313700189017967,0.993375,0.09378573210443951,0.97 +16.0,0.01957044397816946,0.99475,0.0884900055429675,0.973 +17.0,0.01725229577517209,0.996125,0.0840339954715082,0.976 +18.0,0.015548186647948697,0.99625,0.08210346019097332,0.976 +19.0,0.014139138023695867,0.996875,0.08060906399263776,0.976 +20.0,0.012959707879691243,0.99775,0.07868875058565629,0.976 +21.0,0.012004452751684604,0.9975,0.07724035363259926,0.977 +22.0,0.0111136327233965,0.9975,0.07612728983693134,0.977 +23.0,0.010281475186653103,0.998,0.07584562740464813,0.976 +24.0,0.009677989022521137,0.998,0.07425610274151832,0.976 +25.0,0.008997284040789153,0.998,0.0736737006721429,0.977 +26.0,0.008524000701748208,0.998,0.07296239630790295,0.977 +27.0,0.007973775129518497,0.99825,0.07233596593358264,0.976 +28.0,0.0075191435573975,0.998625,0.0717720466101364,0.975 +29.0,0.007066838280190324,0.99875,0.0702526716863536,0.974 +30.0,0.006566122743096105,0.999,0.06965647460292666,0.974 +31.0,0.006089250289720929,0.999,0.06819989827467632,0.977 +32.0,0.005608455593168797,0.999125,0.06694592070023135,0.977 +33.0,0.005141466669596634,0.99925,0.06595043354269733,0.978 +34.0,0.004759061968380032,0.99925,0.06443985064969614,0.978 +35.0,0.004380594774155363,0.999375,0.06353770184635456,0.978 +36.0,0.004046733048490616,0.99975,0.06253301596760709,0.979 +37.0,0.003786776513232984,0.99975,0.061590454895157895,0.979 +38.0,0.00349652858317629,1.0,0.0608157597997437,0.979 +39.0,0.003270031704071804,1.0,0.06014196945494181,0.979 +40.0,0.0030958052763965156,1.0,0.05944900177792484,0.979 +41.0,0.0029148209401834786,1.0,0.05910852235695886,0.979 +42.0,0.002747487564857834,1.0,0.05885799247405729,0.98 +43.0,0.002601885279135758,1.0,0.05875771036367837,0.98 +44.0,0.002452998785943756,1.0,0.05875073571192066,0.98 +45.0,0.0023251227606833993,1.0,0.05883290853287607,0.98 +46.0,0.002200477357490353,1.0,0.05931266420926622,0.98 +47.0,0.0020748584217780814,1.0,0.059344483349665564,0.98 +48.0,0.001972244440490512,1.0,0.059807717143902615,0.98 +49.0,0.001866432894600097,1.0,0.06028438945471628,0.98 +50.0,0.0017845509941932457,1.0,0.06072764247481106,0.98 +51.0,0.0017029988093170635,1.0,0.06114998420916026,0.98 +52.0,0.0016256411665798158,1.0,0.06155595808269873,0.979 +53.0,0.0015603666542434512,1.0,0.061922900447717436,0.979 +54.0,0.0014921414498039287,1.0,0.06227737736439052,0.979 +55.0,0.001441256508702496,1.0,0.06265306885356355,0.979 +56.0,0.0013807884045605036,1.0,0.06304211962247053,0.979 +57.0,0.0013330958389548527,1.0,0.063426241980221,0.979 +58.0,0.0012818933369036054,1.0,0.06378686691752658,0.979 +59.0,0.0012389867278221342,1.0,0.06408567297048824,0.979 +60.0,0.001195307205982342,1.0,0.06431335760551511,0.98 diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_diagonal_mnist.csv.mtd b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_diagonal_mnist.csv.mtd new file mode 100644 index 00000000000..e3d62bb00c8 --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_diagonal_mnist.csv.mtd @@ -0,0 +1,12 @@ +{ + "data_type": "matrix", + "value_type": "double", + "rows": 60, + "cols": 5, + "nnz": 300, + "format": "csv", + "author": "nicol", + "header": false, + "sep": ",", + "created": "2026-01-17 12:21:48 MEZ" +} \ No newline at end of file diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_mnist.csv b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_mnist.csv new file mode 100644 index 00000000000..475eacfbd8b --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_mnist.csv @@ -0,0 +1,60 @@ +1.0,0.6395330965863912,0.8015,0.32723914770068085,0.902 +2.0,0.18475208110972235,0.945375,0.22124293746528062,0.937 +3.0,0.1342415434710652,0.95975,0.18851124248025777,0.941 +4.0,0.10030261909915518,0.970375,0.16670222811543603,0.951 +5.0,0.07937873593518883,0.976,0.15600958058409148,0.957 +6.0,0.06920171125717528,0.979125,0.1649821207311639,0.957 +7.0,0.06059219462178708,0.98125,0.17178759629687052,0.958 +8.0,0.05412933187594715,0.98275,0.16336576325813237,0.962 +9.0,0.048112186471751854,0.985375,0.1497524304739886,0.963 +10.0,0.04271387341357169,0.987625,0.13528177549249676,0.964 +11.0,0.037989212898289285,0.989,0.12257248782459636,0.966 +12.0,0.0333689179804235,0.990125,0.11437361989464215,0.968 +13.0,0.029284689184481228,0.991125,0.10654460042550568,0.973 +14.0,0.02563592782806072,0.99225,0.10001953244032429,0.971 +15.0,0.022313700189017974,0.993375,0.09378573210444012,0.97 +16.0,0.01957044397816949,0.99475,0.08849000554296813,0.973 +17.0,0.017252295775172097,0.996125,0.08403399547150898,0.976 +18.0,0.015548186647948742,0.99625,0.08210346019097452,0.976 +19.0,0.014139138023695867,0.996875,0.08060906399263904,0.976 +20.0,0.012959707879691218,0.99775,0.07868875058565691,0.976 +21.0,0.012004452751684557,0.9975,0.07724035363260007,0.977 +22.0,0.01111363272339649,0.9975,0.07612728983693215,0.977 +23.0,0.010281475186653124,0.998,0.07584562740464891,0.976 +24.0,0.009677989022521148,0.998,0.07425610274151902,0.976 +25.0,0.008997284040789163,0.998,0.07367370067214365,0.977 +26.0,0.008524000701748191,0.998,0.0729623963079035,0.977 +27.0,0.00797377512951854,0.99825,0.07233596593358348,0.976 +28.0,0.007519143557397548,0.998625,0.07177204661013718,0.975 +29.0,0.007066838280190385,0.99875,0.07025267168635428,0.974 +30.0,0.006566122743096165,0.999,0.06965647460292756,0.974 +31.0,0.006089250289720989,0.999,0.06819989827467741,0.977 +32.0,0.005608455593168826,0.999125,0.06694592070023227,0.977 +33.0,0.005141466669596642,0.99925,0.06595043354269849,0.978 +34.0,0.004759061968380081,0.99925,0.06443985064969704,0.978 +35.0,0.004380594774155382,0.999375,0.0635377018463556,0.978 +36.0,0.004046733048490658,0.99975,0.06253301596760812,0.979 +37.0,0.0037867765132330333,0.99975,0.06159045489515902,0.979 +38.0,0.003496528583176314,1.0,0.060815759799744844,0.979 +39.0,0.00327003170407184,1.0,0.060141969454942976,0.979 +40.0,0.0030958052763965277,1.0,0.05944900177792594,0.979 +41.0,0.0029148209401835042,1.0,0.05910852235696025,0.979 +42.0,0.0027474875648578626,1.0,0.05885799247405899,0.98 +43.0,0.0026018852791357774,1.0,0.05875771036368049,0.98 +44.0,0.002452998785943783,1.0,0.058750735711922415,0.98 +45.0,0.002325122760683419,1.0,0.058832908532878384,0.98 +46.0,0.0022004773574903613,1.0,0.05931266420926835,0.98 +47.0,0.0020748584217780996,1.0,0.059344483349667965,0.98 +48.0,0.001972244440490512,1.0,0.0598077171439049,0.98 +49.0,0.001866432894600093,1.0,0.060284389454718604,0.98 +50.0,0.001784550994193222,1.0,0.06072764247481326,0.98 +51.0,0.0017029988093170592,1.0,0.061149984209162594,0.98 +52.0,0.00162564116657979,1.0,0.061555958082701305,0.979 +53.0,0.0015603666542434351,1.0,0.061922900447719774,0.979 +54.0,0.0014921414498039198,1.0,0.062277377364392435,0.979 +55.0,0.001441256508702488,1.0,0.06265306885356571,0.979 +56.0,0.0013807884045604901,1.0,0.06304211962247282,0.979 +57.0,0.0013330958389548362,1.0,0.06342624198022304,0.979 +58.0,0.0012818933369035971,1.0,0.06378686691752886,0.979 +59.0,0.0012389867278221167,1.0,0.06408567297049027,0.979 +60.0,0.0011953072059823309,1.0,0.06431335760551778,0.98 diff --git a/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_mnist.csv.mtd b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_mnist.csv.mtd new file mode 100644 index 00000000000..604f85a9c2e --- /dev/null +++ b/scripts/staging/shampoo_optimizer/metrics/metrics_shampoo_momentum_mnist.csv.mtd @@ -0,0 +1,12 @@ +{ + "data_type": "matrix", + "value_type": "double", + "rows": 60, + "cols": 5, + "nnz": 300, + "format": "csv", + "author": "nicol", + "header": false, + "sep": ",", + "created": "2026-01-18 12:19:24 MEZ" +} \ No newline at end of file diff --git a/scripts/staging/shampoo_optimizer/shampoo_optimizer_experiments.dml b/scripts/staging/shampoo_optimizer/shampoo_optimizer_experiments.dml new file mode 100644 index 00000000000..658724473eb --- /dev/null +++ b/scripts/staging/shampoo_optimizer/shampoo_optimizer_experiments.dml @@ -0,0 +1,482 @@ +#------------------------------------------------------------- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +#------------------------------------------------------------- + +source("scripts/nn/optim/shampoo.dml") as shampoo +source("scripts/nn/optim/adagrad.dml") as adagrad +source("scripts/nn/optim/adam.dml") as adam + +source("scripts/nn/layers/conv2d_builtin.dml") as conv2d +source("scripts/nn/layers/avg_pool2d_builtin.dml") as avg_pool2d +source("scripts/nn/layers/relu.dml") as relu +source("scripts/nn/layers/cross_entropy_loss.dml") as cross_entropy_loss +source("scripts/nn/layers/softmax.dml") as softmax + +source("src/test/scripts/applications/nn/util.dml") as test_util + + +# defining forward pass +modelPredict = function(matrix[double] X, matrix[double] W1, matrix[double] b1, matrix[double] W2, matrix[double] b2, matrix[double] W_fc, matrix[double] b_fc, list[unknown] h_ins, list[unknown] w_ins, list[unknown] c_ins) + return(matrix[double] softMaxOut, matrix[double] X, matrix[double] convOut1, matrix[double] poolOut1, matrix[double] reluOut1, matrix[double] convOut2, matrix[double] poolOut2, matrix[double] reluOut2, matrix[double] pred){ # + filters = 64 + conv_kernel = 5 + pool_kernel = 4 + conv_padding = 2 + pool_padding = 1 + + h_in = as.integer(as.scalar(h_ins[1])) + h_in_1 = as.integer(as.scalar(h_ins[2])) + h_in_2 = as.integer(as.scalar(h_ins[3])) + h_in_3 = as.integer(as.scalar(h_ins[4])) + h_in_4 = as.integer(as.scalar(h_ins[5])) + + w_in = as.integer(as.scalar(w_ins[1])) + w_in_1 = as.integer(as.scalar(w_ins[2])) + w_in_2 = as.integer(as.scalar(w_ins[3])) + w_in_3 = as.integer(as.scalar(w_ins[4])) + w_in_4 = as.integer(as.scalar(w_ins[5])) + + c_in = as.integer(as.scalar(c_ins[1])) + c_in_1 = as.integer(as.scalar(c_ins[2])) + + # first block + [convOut1, Hout, Wout] = conv2d::forward(X, W1, b1, c_in, h_in, w_in, conv_kernel, conv_kernel, 1, 1, conv_padding, conv_padding) + + [poolOut1, Hout, Wout] = avg_pool2d::forward(convOut1, c_in_1, h_in_1, w_in_1, pool_kernel, pool_kernel, 2, 2, pool_padding, pool_padding) + + reluOut1 = relu::forward(poolOut1) + + # second block + [convOut2, Hout, Wout] = conv2d::forward(reluOut1, W2, b2, c_in_1, h_in_2, w_in_2, conv_kernel, conv_kernel, 1, 1, conv_padding, conv_padding) + + [poolOut2, Hout, Wout] = avg_pool2d::forward(convOut2, c_in_1, h_in_3, w_in_3, pool_kernel, pool_kernel, 2, 2, pool_padding, pool_padding) + + reluOut2 = relu::forward(poolOut2) + + pred = reluOut2 %*% t(W_fc) + t(b_fc) + + softMaxOut = softmax::forward(pred) + + #Xs = list(X, convOut1, poolOut1, reluOut1, convOut2, poolOut2, reluOut2, pred, softMaxOut) + + } + +#defining backward pass +modelBackward = function(matrix[double] target, matrix[double] W1, matrix[double] b1, matrix[double] W2, matrix[double] b2, matrix[double] W_fc, matrix[double] b_fc, list[unknown] h_ins, list[unknown] w_ins, list[unknown] c_ins, matrix[double] X, matrix[double] convOut1, matrix[double] poolOut1, matrix[double] reluOut1, matrix[double] convOut2, matrix[double] poolOut2, matrix[double] reluOut2, matrix[double] pred, matrix[double] softMaxOut) + return (matrix[double] gradient_lin_layer_W, matrix[double] gradient_lin_layer_b, matrix[double] gradient_W2, matrix[double] gradient_b2, matrix[double] gradient_W1, matrix[double] gradient_b1){ + filters = 64 + conv_kernel = 5 + pool_kernel = 4 + conv_padding = 2 + pool_padding = 1 + + h_in = as.integer(as.scalar(h_ins[1])) + h_in_1 = as.integer(as.scalar(h_ins[2])) + h_in_2 = as.integer(as.scalar(h_ins[3])) + h_in_3 = as.integer(as.scalar(h_ins[4])) + h_in_4 = as.integer(as.scalar(h_ins[5])) + + w_in = as.integer(as.scalar(w_ins[1])) + w_in_1 = as.integer(as.scalar(w_ins[2])) + w_in_2 = as.integer(as.scalar(w_ins[3])) + w_in_3 = as.integer(as.scalar(w_ins[4])) + w_in_4 = as.integer(as.scalar(w_ins[5])) + + c_in = as.integer(as.scalar(c_ins[1])) + c_in_1 = as.integer(as.scalar(c_ins[2])) + + # gradient of loss function + gradient_lossfn = cross_entropy_loss::backward(softMaxOut, target) + + # gradient Softmax + gradient_softmax = softmax::backward(gradient_lossfn, pred) + + # gradients Linear layer + gradient_lin_layer_W = t(gradient_softmax) %*% reluOut2 + gradient_lin_layer_b = t(colSums(gradient_softmax)) + gradient_lin_layer_X = gradient_softmax %*% W_fc + + # gradient second Relu + gradient_relu = relu::backward(gradient_lin_layer_X, poolOut2) + + # gradient second pooling layer + gradient_second_pooling = avg_pool2d::backward(gradient_relu, h_in_4, w_in_4, convOut2, c_in_1, h_in_3, w_in_3, pool_kernel, pool_kernel, 2, 2, pool_padding, pool_padding) + + # gradient second conv layer + [gradient_second_conv_X, gradient_W2, gradient_b2] = conv2d::backward(gradient_second_pooling, h_in_3, w_in_3, reluOut1, W2, b2, c_in_1, h_in_2, w_in_2, conv_kernel, conv_kernel, 1, 1, conv_padding, conv_padding) + + # gradient of the first Relu + gradient_relu = relu::backward(gradient_second_conv_X, poolOut1) + + # gradient first pooling layer + gradient_first_pooling = avg_pool2d::backward(gradient_relu, h_in_2, w_in_2, convOut1, c_in_1, h_in_1, w_in_1, pool_kernel, pool_kernel, 2, 2, pool_padding, pool_padding) + + # gradient first conv layer + [gradient_first_conv_X, gradient_W1, gradient_b1] = conv2d::backward(gradient_first_pooling, h_in_1, w_in_1, X, W1, b1, c_in, h_in, w_in, conv_kernel, conv_kernel, 1, 1, conv_padding, conv_padding) + + } + + +definingData = function(string dataset_name) + return(matrix[double] X_train, matrix[double] Y_train, matrix[double] X_val, matrix[double] Y_val, matrix[double] X_test, matrix[double] Y_test){ + if (dataset_name=="mnist"){ + data = read("src/test/resources/datasets/MNIST/mnist_test.csv", format="csv") + train = data[1:8999,] + test = data[9000:nrow(data),] + + images = train[,2:ncol(train)] + images = images / 255.0 + labels = train[,1] + images_test = test[,2:ncol(test)] + labels_test = test[,1] + + N = nrow(images) + N_test = nrow(images_test) + + X_train = images[1001:nrow(images),] + labels_train = labels[1001:nrow(images),] + Y_train = table(seq(1, nrow(X_train)), labels_train+1, nrow(X_train), 10) + + X_val = images[1:1000,] + labels_val = labels[1:1000,] + Y_val = table(seq(1, nrow(X_val)), labels_val+1, nrow(X_val), 10) + + X_test = images_test + Y_test = table(seq(1, N_test), labels_test+1, N_test, 10) + } + if (dataset_name=="cifar"){ + data = read("scripts/staging/shampoo_optimizer/cifar10.csv", format="csv") + train = data[1:39999,] + test = data[40000:nrow(data),] + + images = train[,2:ncol(train)] + images = images / 255.0 + labels = train[,1] + images_test = test[,2:ncol(test)] + labels_test = test[,1] + + N = nrow(images) + N_test = nrow(images_test) + + X_train = images[5001:nrow(images),] + labels_train = labels[5001:nrow(images),] + Y_train = table(seq(1, nrow(X_train)), labels_train+1, nrow(X_train), 10) + + X_val = images[1:5000,] + labels_val = labels[1:5000,] + Y_val = table(seq(1, nrow(X_val)), labels_val+1, nrow(X_val), 10) + + X_test = images_test + Y_test = table(seq(1, N_test), labels_test+1, N_test, 10) + } +} + +# Define image properties + +defining_image_properties = function(string dataset_name) + return(int h_in, int w_in, int c_in, int classes){ + if(dataset_name=="mnist"){ + h_in = 28 + w_in = 28 + c_in = 1 + classes = 10 + } + if(dataset_name=="cifar"){ + h_in = 32 + w_in = 32 + c_in = 3 + classes = 10 + } +} + +# Define training parameters +defining_training_parameters = function(string optimizer) + return(int epochs, int batch_size, double epsilon, double lr, int diagThreshold, int rootEvery, int preconEvery){ + if(optimizer=="adam"){ + epsilon = 1e-8 + lr = 0.001 + } + else if((optimizer == "shampoo_heuristic") | (optimizer == "shampoo_heuristic_diagonal")){ + epsilon = 1e-5 + lr = 0.005 + } + else{ + epsilon = 1e-3 + lr = 0.6 + } + epochs = 60 + batch_size = 64 + diagThreshold = 1200 + rootEvery = 10 + preconEvery = 2 +} + +defining_model_parameters = function() + return(int filters, int conv_kernel, int pool_kernel, int conv_padding, int pool_padding, int seed){ + + filters = 64 + conv_kernel = 5 + pool_kernel = 4 + conv_padding = 2 + pool_padding = 1 + seed = 42 +} + +# create simple nn for image classification +defining_nn_image_classification = function(int h_in, int w_in, int c_in, int classes, int filters, int conv_kernel, int pool_kernel, int conv_padding, int pool_padding, int seed) + return(list[unknown] h_ins, list[unknown] w_ins, list[unknown] c_ins, matrix[double] W1, matrix[double] b1, matrix[double] W2, matrix[double] b2, matrix[double] W_fc, matrix[double] b_fc){ + + # convolution layer 1 + [W1, b1] = conv2d::init(filters, c_in, conv_kernel, conv_kernel, seed) + h_in_1 = h_in + conv_padding*2 - (conv_kernel - 1) + w_in_1 = w_in + conv_padding*2 - (conv_kernel - 1) + c_in_1 = filters + # pooling + h_in_2 = floor((h_in_1 + pool_padding*2 - pool_kernel)/2)+1 + w_in_2 = floor((w_in_1 + pool_padding*2 - pool_kernel)/2)+1 + # relu + + # convolution layer 2 + [W2, b2] = conv2d::init(filters, c_in_1, conv_kernel, conv_kernel, seed) + h_in_3 = h_in_2 + conv_padding*2 - (conv_kernel - 1) + w_in_3 = w_in_2 + conv_padding*2 - (conv_kernel - 1) + c_in_1 = filters + # pooling + h_in_4 = floor((h_in_3 + pool_padding*2 - pool_kernel)/2)+1 + w_in_4 = floor((w_in_3 + pool_padding*2 - pool_kernel)/2)+1 + # relu + + # Linear + W_fc = rand(rows=classes, cols=h_in_4*w_in_4*c_in_1, pdf="uniform", min=-0.1, max=0.1, seed=seed) + b_fc = matrix(0, rows=classes, cols=1) + + h_ins = list(h_in, h_in_1, h_in_2, h_in_3, h_in_4) + w_ins = list(w_in, w_in_1, w_in_2, w_in_3, w_in_4) + c_ins = list(c_in, c_in_1) +} + +get_optimizer = function(int optimizer_index) + return(string optimizer){ + + if (optimizer_index==4) + { + optimizer = "adam" + } + if (optimizer_index==1) + { + optimizer = "shampoo" + } + if (optimizer_index==5) + { + optimizer = "shampoo_diagonal" + } + if (optimizer_index==2) + { + optimizer = "shampoo_momentum" + } + if (optimizer_index==6) + { + optimizer = "shampoo_momentum_diagonal" + } + if (optimizer_index==3) + { + optimizer = "shampoo_heuristic" + } + if (optimizer_index==7) + { + optimizer = "shampoo_heuristic_diagonal" + } +} + +# set parameters for the experiments +############################################################################################# +dataset_name = "mnist" #Alternatives: "mnist" or "cifar" +optimizers_to_experiment = list("shampoo", "shampoo_momentum", "shampoo_heuristic", "adam") +# Alternatives: ("shampoo", "shampoo_diagonal", "shampoo_momentum", +# "shampoo_momentum_diagonal", "shampoo_heuristic", "shampoo_heuristic_diagonal", "adam") +############################################################################################# + +for (optimizer_index in 1:length(optimizers_to_experiment)){ + optimizer = get_optimizer(optimizer_index) + print("Starting with " + optimizer) + + # get the data + [X_train, Y_train, X_val, Y_val, X_test, Y_test] = definingData(dataset_name) + + # get image properties + [h_in, w_in, c_in, classes] = defining_image_properties(dataset_name) + + # get model parameters + [filters, conv_kernel, pool_kernel, conv_padding, pool_padding, seed] = defining_model_parameters() + + #get model weights + [h_ins, w_ins, c_ins, W1, b1, W2, b2, W_fc, b_fc] = defining_nn_image_classification(h_in, w_in, c_in, classes, filters, conv_kernel, pool_kernel, conv_padding, pool_padding, seed) + + # get training parameters + [epochs, batch_size, epsilon, lr, diagThreshold, rootEvery, preconEvery]= defining_training_parameters(optimizer) + + + if ((optimizer == "shampoo") | (optimizer == "shampoo_diagonal")){ + [preconL_W1, preconR_W1, useDiag_W1] = shampoo::init(W1, epsilon, diagThreshold) + [preconL_b1, preconR_b1, useDiag_b1] = shampoo::init(b1, epsilon, diagThreshold) + [preconL_W2, preconR_W2, useDiag_W2] = shampoo::init(W2, epsilon, diagThreshold) + [preconL_b2, preconR_b2, useDiag_b2] = shampoo::init(b2, epsilon, diagThreshold) + [preconL_W_fc, preconR_W_fc, useDiag_W_fc] = shampoo::init(W_fc, epsilon, diagThreshold) + [preconL_b_fc, preconR_b_fc, useDiag_b_fc] = shampoo::init(b_fc, epsilon, diagThreshold) + } + if ((optimizer == "shampoo_momentum") | (optimizer == "shampoo_momentum_diagonal")){ + [preconL_W1, preconR_W1, momentum_W1, useDiag_W1] = shampoo::init_momentum(W1, epsilon, diagThreshold) + [preconL_b1, preconR_b1, momentum_b1, useDiag_b1] = shampoo::init_momentum(b1, epsilon, diagThreshold) + [preconL_W2, preconR_W2, momentum_W2, useDiag_W2] = shampoo::init_momentum(W2, epsilon, diagThreshold) + [preconL_b2, preconR_b2, momentum_b2, useDiag_b2] = shampoo::init_momentum(b2, epsilon, diagThreshold) + [preconL_W_fc, preconR_W_fc, momentum_W_fc, useDiag_W_fc] = shampoo::init_momentum(W_fc, epsilon, diagThreshold) + [preconL_b_fc, preconR_b_fc, momentum_b_fc, useDiag_b_fc] = shampoo::init_momentum(b_fc, epsilon, diagThreshold) + } + if ((optimizer == "shampoo_heuristic") | (optimizer == "shampoo_heuristic_diagonal")){ + [preconL_W1, preconR_W1, stepCounter_W1, bufferL_W1, bufferR_W1, momentum_W1, preconLInvPowerRoot_W1, preconRInvPowerRoot_W1, useDiag_W1] = shampoo::init_heuristic(W1, epsilon, diagThreshold) + [preconL_b1, preconR_b1, stepCounter_b1, bufferL_b1, bufferR_b1, momentum_b1, preconLInvPowerRoot_b1, preconRInvPowerRoot_b1, useDiag_b1] = shampoo::init_heuristic(b1, epsilon, diagThreshold) + [preconL_W2, preconR_W2, stepCounter_W2, bufferL_W2, bufferR_W2, momentum_W2, preconLInvPowerRoot_W2, preconRInvPowerRoot_W2, useDiag_W2] = shampoo::init_heuristic(W2, epsilon, diagThreshold) + [preconL_b2, preconR_b2, stepCounter_b2, bufferL_b2, bufferR_b2, momentum_b2, preconLInvPowerRoot_b2, preconRInvPowerRoot_b2, useDiag_b2] = shampoo::init_heuristic(b2, epsilon, diagThreshold) + [preconL_W_fc, preconR_W_fc, stepCounter_W_fc, bufferL_W_fc, bufferR_W_fc, momentum_W_fc, preconLInvPowerRoot_W_fc, preconRInvPowerRoot_W_fc, useDiag_W_fc] = shampoo::init_heuristic(W_fc, epsilon, diagThreshold) + [preconL_b_fc, preconR_b_fc, stepCounter_b_fc, bufferL_b_fc, bufferR_b_fc, momentum_b_fc, preconLInvPowerRoot_b_fc, preconRInvPowerRoot_b_fc, useDiag_b_fc] = shampoo::init_heuristic(b_fc, epsilon, diagThreshold) + } + if (optimizer == "adam"){ + [m_W1, v_W1] = adam::init(W1) + [m_b1, v_b1] = adam::init(b1) + [m_W2, v_W2] = adam::init(W2) + [m_b2, v_b2] = adam::init(b2) + [m_W_fc, v_W_fc] = adam::init(W_fc) + [m_b_fc, v_b_fc] = adam::init(b_fc) + } + + data_val_X = X_val + data_val_Y = Y_val + + # define the training + + train_losses = matrix(0, rows=epochs, cols=1) + train_accuracies = matrix(0, rows=epochs, cols=1) + val_accuracies = matrix(0, rows=epochs, cols=1) + val_losses = matrix(0, rows=epochs, cols=1) + Ntrain = nrow(X_train) + + timestep = 0 + + for(epoch in 1:epochs){ + + print("Epoch " + epoch + " of " + epochs + " epochs") + + accuracy_value = 0 + accuracy_count = 0 + loss_value = 0 + loss_count = 0 + + + for(start_index in seq(1, Ntrain, batch_size)){ + #start_index = (i - 1) * batch_size + 1 + end_index = min(start_index + batch_size - 1, Ntrain) + data_train_X = X_train[start_index:end_index,] + data_train_Y = Y_train[start_index:end_index,] + + [softMaxOut, X, convOut1, poolOut1, reluOut1, convOut2, poolOut2, reluOut2, pred] = modelPredict(data_train_X, W1, b1, W2, b2, W_fc, b_fc, h_ins, w_ins, c_ins) + + predicted_value = rowIndexMax(softMaxOut) - 1 + accuracy = sum(predicted_value==rowIndexMax(data_train_Y)-1) / length(predicted_value) + accuracy_value = accuracy_value + accuracy + accuracy_count = accuracy_count + 1 + + loss = cross_entropy_loss::forward(softMaxOut, data_train_Y) + loss_value = loss_value + loss + loss_count = loss_count + 1 + + + [gradient_lin_layer_W, gradient_lin_layer_b, gradient_W2, gradient_b2, gradient_W1, gradient_b1] = modelBackward(data_train_Y, W1, b1, W2, b2, W_fc, b_fc, h_ins, w_ins, c_ins, X, convOut1, poolOut1, reluOut1, convOut2, poolOut2, reluOut2, pred, softMaxOut) + + if ((optimizer == "shampoo") | (optimizer == "shampoo_diagonal")){ + [W1, preconL_W1, preconR_W1] = shampoo::update(W1, gradient_W1, lr, preconL_W1, preconR_W1, useDiag_W1) + [b1, preconL_b1, preconR_b1] = shampoo::update(b1, gradient_b1, lr, preconL_b1, preconR_b1, useDiag_b1) + [W2, preconL_W2, preconR_W2] = shampoo::update(W2, gradient_W2, lr, preconL_W2, preconR_W2, useDiag_W2) + [b2, preconL_b2, preconR_b2] = shampoo::update(b2, gradient_b2, lr, preconL_b2, preconR_b2, useDiag_b2) + [W_fc, preconL_W_fc, preconR_W_fc] = shampoo::update(W_fc, gradient_lin_layer_W, lr, preconL_W_fc, preconR_W_fc, useDiag_W_fc) + [b_fc, preconL_b_fc, preconR_b_fc] = shampoo::update(b_fc, gradient_lin_layer_b, lr, preconL_b_fc, preconR_b_fc, useDiag_b_fc) + } + if ((optimizer == "shampoo_momentum") | (optimizer == "shampoo_momentum_diagonal")){ + [W1, preconL_W1, preconR_W1, momentum_W1] = shampoo::update_momentum(W1, gradient_W1, lr, preconL_W1, preconR_W1, momentum_W1, useDiag_W1) + [b1, preconL_b1, preconR_b1, momentum_b1] = shampoo::update_momentum(b1, gradient_b1, lr, preconL_b1, preconR_b1, momentum_b1, useDiag_b1) + [W2, preconL_W2, preconR_W2, momentum_W2] = shampoo::update_momentum(W2, gradient_W2, lr, preconL_W2, preconR_W2, momentum_W2, useDiag_W2) + [b2, preconL_b2, preconR_b2, momentum_b2] = shampoo::update_momentum(b2, gradient_b2, lr, preconL_b2, preconR_b2, momentum_b2, useDiag_b2) + [W_fc, preconL_W_fc, preconR_W_fc, momentum_W_fc] = shampoo::update_momentum(W_fc, gradient_lin_layer_W, lr, preconL_W_fc, preconR_W_fc, momentum_W_fc, useDiag_W_fc) + [b_fc, preconL_b_fc, preconR_b_fc, momentum_b_fc] = shampoo::update_momentum(b_fc, gradient_lin_layer_b, lr, preconL_b_fc, preconR_b_fc, momentum_b_fc, useDiag_b_fc) + } + if ((optimizer == "shampoo_heuristic") | (optimizer == "shampoo_heuristic_diagonal")){ + [W1, preconL_W1, preconR_W1, momentum_W1, stepCounter_W1, bufferL_W1, bufferR_W1, preconLInvPowerRoot_W1, preconRInvPowerRoot_W1] = shampoo::update_heuristic(W1, gradient_W1, lr, preconL_W1, preconR_W1, momentum_W1, stepCounter_W1, rootEvery, preconEvery, bufferL_W1, bufferR_W1, preconLInvPowerRoot_W1, preconRInvPowerRoot_W1, useDiag_W1) + [b1, preconL_b1, preconR_b1, momentum_b1, stepCounter_b1, bufferL_b1, bufferR_b1, preconLInvPowerRoot_b1, preconRInvPowerRoot_b1] = shampoo::update_heuristic(b1, gradient_b1, lr, preconL_b1, preconR_b1, momentum_b1, stepCounter_b1, rootEvery, preconEvery, bufferL_b1, bufferR_b1, preconLInvPowerRoot_b1, preconRInvPowerRoot_b1, useDiag_b1) + [W2, preconL_W2, preconR_W2, momentum_W2, stepCounter_W2, bufferL_W2, bufferR_W2, preconLInvPowerRoot_W2, preconRInvPowerRoot_W2] = shampoo::update_heuristic(W2, gradient_W2, lr, preconL_W2, preconR_W2, momentum_W2, stepCounter_W2, rootEvery, preconEvery, bufferL_W2, bufferR_W2, preconLInvPowerRoot_W2, preconRInvPowerRoot_W2, useDiag_W2) + [b2, preconL_b2, preconR_b2, momentum_b2, stepCounter_b2, bufferL_b2, bufferR_b2, preconLInvPowerRoot_b2, preconRInvPowerRoot_b2] = shampoo::update_heuristic(b2, gradient_b2, lr, preconL_b2, preconR_b2, momentum_b2, stepCounter_b2, rootEvery, preconEvery, bufferL_b2, bufferR_b2, preconLInvPowerRoot_b2, preconRInvPowerRoot_b2, useDiag_b2) + [W_fc, preconL_W_fc, preconR_W_fc, momentum_W_fc, stepCounter_W_fc, bufferL_W_fc, bufferR_W_fc, preconLInvPowerRoot_W_fc, preconRInvPowerRoot_W_fc] = shampoo::update_heuristic(W_fc, gradient_lin_layer_W, lr, preconL_W_fc, preconR_W_fc, momentum_W_fc, stepCounter_W_fc, rootEvery, preconEvery, bufferL_W_fc, bufferR_W_fc, preconLInvPowerRoot_W_fc, preconRInvPowerRoot_W_fc, useDiag_W_fc) + [b_fc, preconL_b_fc, preconR_b_fc, momentum_b_fc, stepCounter_b_fc, bufferL_b_fc, bufferR_b_fc, preconLInvPowerRoot_b_fc, preconRInvPowerRoot_b_fc] = shampoo::update_heuristic(b_fc, gradient_lin_layer_b, lr, preconL_b_fc, preconR_b_fc, momentum_b_fc, stepCounter_b_fc, rootEvery, preconEvery, bufferL_b_fc, bufferR_b_fc, preconLInvPowerRoot_b_fc, preconRInvPowerRoot_b_fc, useDiag_b_fc) + } + if (optimizer == "adam"){ + [W1, m_W1, v_W1] = adam::update(W1, gradient_W1, lr, 0.9, 0.999, epsilon, timestep, m_W1, v_W1) + [b1, m_b1, v_b1] = adam::update(b1, gradient_b1, lr, 0.9, 0.999, epsilon, timestep, m_b1, v_b1) + [W2, m_W2, v_W2] = adam::update(W2, gradient_W2, lr, 0.9, 0.999, epsilon, timestep, m_W2, v_W2) + [b2, m_b2, v_b2] = adam::update(b2, gradient_b2, lr, 0.9, 0.999, epsilon, timestep, m_b2, v_b2) + [W_fc, m_W_fc, v_W_fc] = adam::update(W_fc, gradient_lin_layer_W, lr, 0.9, 0.999, epsilon, timestep, m_W_fc, v_W_fc) + [b_fc, m_b_fc, v_b_fc] = adam::update(b_fc, gradient_lin_layer_b, lr, 0.9, 0.999, epsilon, timestep, m_b_fc, v_b_fc) + timestep = timestep + 1 + } + } + + train_losses[epoch,1] = loss_value / loss_count + train_accuracies[epoch,1] = accuracy_value/accuracy_count + + [softMaxOut_val, X_val, convOut1_val, poolOut1_val, reluOut1_val, convOut2_val, poolOut2_val, reluOut2_val, pred_val] = modelPredict(X_val, W1, b1, W2, b2, W_fc, b_fc, h_ins, w_ins, c_ins) + + + predicted_value_val = rowIndexMax(softMaxOut_val) - 1 + accuracy_val = sum(predicted_value_val==rowIndexMax(Y_val)-1) / length(predicted_value_val) + + val_accuracies[epoch,1] = accuracy_val + + loss = cross_entropy_loss::forward(softMaxOut_val, Y_val) + val_losses[epoch,1] = loss + } + + + # define the testing + + [softMaxOut_test, X_test, convOut1_test, poolOut1_test, reluOut1_test, convOut2_test, poolOut2_test, reluOut2_test, pred_test] = modelPredict(X_test, W1, b1, W2, b2, W_fc, b_fc, h_ins, w_ins, c_ins) + + predicted_value = rowIndexMax(softMaxOut_test) - 1 + accuracy = sum(predicted_value==rowIndexMax(Y_test)-1) / length(predicted_value) + + loss = cross_entropy_loss::forward(softMaxOut_test, Y_test) + + outDir = "scripts/staging/shampoo_optimizer/metrics" + epochs = nrow(train_losses) + epoch_col = seq(1, epochs) + M = cbind(epoch_col, train_losses, train_accuracies, val_losses, val_accuracies) + write(M, outDir + "/metrics" + "_" + optimizer + "_" + dataset_name + ".csv", format="csv") + + print("Test Accuracy of " + optimizer + " on " + dataset_name + " = " + accuracy) + print("Test Loss of " + optimizer + " on " + dataset_name + " = " + loss) + +} + diff --git a/src/test/scripts/applications/nn/component/shampoo_test.dml b/src/test/scripts/applications/nn/component/shampoo_test.dml new file mode 100644 index 00000000000..2ed90845261 --- /dev/null +++ b/src/test/scripts/applications/nn/component/shampoo_test.dml @@ -0,0 +1,437 @@ +#------------------------------------------------------------- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +#------------------------------------------------------------- + +source("scripts/nn/optim/shampoo.dml") as shampoo +source("scripts/nn/optim/adagrad.dml") as adagrad +source("scripts/nn/optim/adam.dml") as adam + +source("scripts/nn/layers/conv2d_builtin.dml") as conv2d +source("scripts/nn/layers/avg_pool2d_builtin.dml") as avg_pool2d +source("scripts/nn/layers/relu.dml") as relu +source("scripts/nn/layers/cross_entropy_loss.dml") as cross_entropy_loss +source("scripts/nn/layers/softmax.dml") as softmax + + + +# defining forward pass +modelPredict = function(matrix[double] X, matrix[double] W1, matrix[double] b1, matrix[double] W2, matrix[double] b2, matrix[double] W_fc, matrix[double] b_fc, list[unknown] h_ins, list[unknown] w_ins, list[unknown] c_ins) + return(matrix[double] softMaxOut, matrix[double] X, matrix[double] convOut1, matrix[double] poolOut1, matrix[double] reluOut1, matrix[double] convOut2, matrix[double] poolOut2, matrix[double] reluOut2, matrix[double] pred){ # + filters = 64 + conv_kernel = 5 + pool_kernel = 4 + conv_padding = 2 + pool_padding = 1 + + h_in = as.integer(as.scalar(h_ins[1])) + h_in_1 = as.integer(as.scalar(h_ins[2])) + h_in_2 = as.integer(as.scalar(h_ins[3])) + h_in_3 = as.integer(as.scalar(h_ins[4])) + h_in_4 = as.integer(as.scalar(h_ins[5])) + + w_in = as.integer(as.scalar(w_ins[1])) + w_in_1 = as.integer(as.scalar(w_ins[2])) + w_in_2 = as.integer(as.scalar(w_ins[3])) + w_in_3 = as.integer(as.scalar(w_ins[4])) + w_in_4 = as.integer(as.scalar(w_ins[5])) + + c_in = as.integer(as.scalar(c_ins[1])) + c_in_1 = as.integer(as.scalar(c_ins[2])) + + # first block + [convOut1, Hout, Wout] = conv2d::forward(X, W1, b1, c_in, h_in, w_in, conv_kernel, conv_kernel, 1, 1, conv_padding, conv_padding) + + [poolOut1, Hout, Wout] = avg_pool2d::forward(convOut1, c_in_1, h_in_1, w_in_1, pool_kernel, pool_kernel, 2, 2, pool_padding, pool_padding) + + reluOut1 = relu::forward(poolOut1) + + # second block + [convOut2, Hout, Wout] = conv2d::forward(reluOut1, W2, b2, c_in_1, h_in_2, w_in_2, conv_kernel, conv_kernel, 1, 1, conv_padding, conv_padding) + + [poolOut2, Hout, Wout] = avg_pool2d::forward(convOut2, c_in_1, h_in_3, w_in_3, pool_kernel, pool_kernel, 2, 2, pool_padding, pool_padding) + + reluOut2 = relu::forward(poolOut2) + + pred = reluOut2 %*% t(W_fc) + t(b_fc) + + softMaxOut = softmax::forward(pred) + + #Xs = list(X, convOut1, poolOut1, reluOut1, convOut2, poolOut2, reluOut2, pred, softMaxOut) + + } + +#defining backward pass +modelBackward = function(matrix[double] target, matrix[double] W1, matrix[double] b1, matrix[double] W2, matrix[double] b2, matrix[double] W_fc, matrix[double] b_fc, list[unknown] h_ins, list[unknown] w_ins, list[unknown] c_ins, matrix[double] X, matrix[double] convOut1, matrix[double] poolOut1, matrix[double] reluOut1, matrix[double] convOut2, matrix[double] poolOut2, matrix[double] reluOut2, matrix[double] pred, matrix[double] softMaxOut) + return (matrix[double] gradient_lin_layer_W, matrix[double] gradient_lin_layer_b, matrix[double] gradient_W2, matrix[double] gradient_b2, matrix[double] gradient_W1, matrix[double] gradient_b1){ + filters = 64 + conv_kernel = 5 + pool_kernel = 4 + conv_padding = 2 + pool_padding = 1 + + h_in = as.integer(as.scalar(h_ins[1])) + h_in_1 = as.integer(as.scalar(h_ins[2])) + h_in_2 = as.integer(as.scalar(h_ins[3])) + h_in_3 = as.integer(as.scalar(h_ins[4])) + h_in_4 = as.integer(as.scalar(h_ins[5])) + + w_in = as.integer(as.scalar(w_ins[1])) + w_in_1 = as.integer(as.scalar(w_ins[2])) + w_in_2 = as.integer(as.scalar(w_ins[3])) + w_in_3 = as.integer(as.scalar(w_ins[4])) + w_in_4 = as.integer(as.scalar(w_ins[5])) + + c_in = as.integer(as.scalar(c_ins[1])) + c_in_1 = as.integer(as.scalar(c_ins[2])) + + # gradient of loss function + gradient_lossfn = cross_entropy_loss::backward(softMaxOut, target) + + # gradient Softmax + gradient_softmax = softmax::backward(gradient_lossfn, pred) + + # gradients Linear layer + gradient_lin_layer_W = t(gradient_softmax) %*% reluOut2 + gradient_lin_layer_b = t(colSums(gradient_softmax)) + gradient_lin_layer_X = gradient_softmax %*% W_fc + + # gradient second Relu + gradient_relu = relu::backward(gradient_lin_layer_X, poolOut2) + + # gradient second pooling layer + gradient_second_pooling = avg_pool2d::backward(gradient_relu, h_in_4, w_in_4, convOut2, c_in_1, h_in_3, w_in_3, pool_kernel, pool_kernel, 2, 2, pool_padding, pool_padding) + + # gradient second conv layer + [gradient_second_conv_X, gradient_W2, gradient_b2] = conv2d::backward(gradient_second_pooling, h_in_3, w_in_3, reluOut1, W2, b2, c_in_1, h_in_2, w_in_2, conv_kernel, conv_kernel, 1, 1, conv_padding, conv_padding) + + # gradient of the first Relu + gradient_relu = relu::backward(gradient_second_conv_X, poolOut1) + + # gradient first pooling layer + gradient_first_pooling = avg_pool2d::backward(gradient_relu, h_in_2, w_in_2, convOut1, c_in_1, h_in_1, w_in_1, pool_kernel, pool_kernel, 2, 2, pool_padding, pool_padding) + + # gradient first conv layer + [gradient_first_conv_X, gradient_W1, gradient_b1] = conv2d::backward(gradient_first_pooling, h_in_1, w_in_1, X, W1, b1, c_in, h_in, w_in, conv_kernel, conv_kernel, 1, 1, conv_padding, conv_padding) + + } + + +definingData = function() + return(matrix[double] X_train, matrix[double] Y_train, matrix[double] X_val, matrix[double] Y_val, matrix[double] X_test, matrix[double] Y_test){ + + data = read("src/test/resources/datasets/MNIST/mnist_test.csv", format="csv") + train = data[1:8999,] + test = data[9000:nrow(data),] + + images = train[,2:ncol(train)] + images = images / 255.0 + labels = train[,1] + images_test = test[,2:ncol(test)] + labels_test = test[,1] + + N = nrow(images) + N_test = nrow(images_test) + + X_train = images[1001:nrow(images),] + labels_train = labels[1001:nrow(images),] + Y_train = table(seq(1, nrow(X_train)), labels_train+1, nrow(X_train), 10) + + X_val = images[1:1000,] + labels_val = labels[1:1000,] + Y_val = table(seq(1, nrow(X_val)), labels_val+1, nrow(X_val), 10) + + X_test = images_test + Y_test = table(seq(1, N_test), labels_test+1, N_test, 10) +} + +# Define image properties + +defining_image_properties = function() + return(int h_in, int w_in, int c_in, int classes){ + + h_in = 28 + w_in = 28 + c_in = 1 + classes = 10 + +} + +# Define training parameters +defining_training_parameters = function(string optimizer) + return(int epochs, int batch_size, double epsilon, double lr, int rootEvery, int preconEvery){ + + if ((optimizer == "shampoo_heuristic") | (optimizer == "shampoo_heuristic_diagonal")){ + epochs = 30 + batch_size = 64 + epsilon = 1e-4 + lr = 0.005 + rootEvery = 10 + preconEvery = 2 + }else{ + epochs = 30 + batch_size = 64 + epsilon = 1e-4 + lr = 0.005 + rootEvery = 0 + preconEvery = 0 + } +} + +defining_model_parameters = function() + return(int filters, int conv_kernel, int pool_kernel, int conv_padding, int pool_padding, int seed){ + + filters = 64 + conv_kernel = 5 + pool_kernel = 4 + conv_padding = 2 + pool_padding = 1 + seed = 42 +} + +# create simple nn for image classification +defining_nn_image_classification = function(int h_in, int w_in, int c_in, int classes, int filters, int conv_kernel, int pool_kernel, int conv_padding, int pool_padding, int seed) + return(list[unknown] h_ins, list[unknown] w_ins, list[unknown] c_ins, matrix[double] W1, matrix[double] b1, matrix[double] W2, matrix[double] b2, matrix[double] W_fc, matrix[double] b_fc){ + + # convolution layer 1 + [W1, b1] = conv2d::init(filters, c_in, conv_kernel, conv_kernel, seed) + h_in_1 = h_in + conv_padding*2 - (conv_kernel - 1) + w_in_1 = w_in + conv_padding*2 - (conv_kernel - 1) + c_in_1 = filters + # pooling + h_in_2 = floor((h_in_1 + pool_padding*2 - pool_kernel)/2)+1 + w_in_2 = floor((w_in_1 + pool_padding*2 - pool_kernel)/2)+1 + # relu + + # convolution layer 2 + [W2, b2] = conv2d::init(filters, c_in_1, conv_kernel, conv_kernel, seed) + h_in_3 = h_in_2 + conv_padding*2 - (conv_kernel - 1) + w_in_3 = w_in_2 + conv_padding*2 - (conv_kernel - 1) + c_in_1 = filters + # pooling + h_in_4 = floor((h_in_3 + pool_padding*2 - pool_kernel)/2)+1 + w_in_4 = floor((w_in_3 + pool_padding*2 - pool_kernel)/2)+1 + # relu + + # Linear + W_fc = rand(rows=classes, cols=h_in_4*w_in_4*c_in_1, pdf="uniform", min=-0.1, max=0.1, seed=seed) + b_fc = matrix(0, rows=classes, cols=1) + + h_ins = list(h_in, h_in_1, h_in_2, h_in_3, h_in_4) + w_ins = list(w_in, w_in_1, w_in_2, w_in_3, w_in_4) + c_ins = list(c_in, c_in_1) +} + +get_optimizer = function(int optimizer_index) + return(string optimizer){ + + if (optimizer_index==1) + { + optimizer = "shampoo" + } + if (optimizer_index==2) + { + optimizer = "shampoo_diagonal" + } + if (optimizer_index==3) + { + optimizer = "shampoo_momentum" + } + if (optimizer_index==4) + { + optimizer = "shampoo_momentum_diagonal" + } + if (optimizer_index==5) + { + optimizer = "shampoo_heuristic" + } + if (optimizer_index==6) + { + optimizer = "shampoo_heuristic_diagonal" + } +} + +# set parameters for the tests +############################################################################################# +optimizers_to_experiment = list("shampoo", "shampoo_diagonal", "shampoo_momentum", + "shampoo_momentum_diagonal", "shampoo_heuristic", "shampoo_heuristic_diagonal") +############################################################################################# + +for (optimizer_index in 1:length(optimizers_to_experiment)){ + optimizer = get_optimizer(optimizer_index) + print("Starting with " + optimizer) + + # get the data + [X_train, Y_train, X_val, Y_val, X_test, Y_test] = definingData() + + # get image properties + [h_in, w_in, c_in, classes] = defining_image_properties() + + # get model parameters + [filters, conv_kernel, pool_kernel, conv_padding, pool_padding, seed] = defining_model_parameters() + + #get model weights + [h_ins, w_ins, c_ins, W1, b1, W2, b2, W_fc, b_fc] = defining_nn_image_classification(h_in, w_in, c_in, classes, filters, conv_kernel, pool_kernel, conv_padding, pool_padding, seed) + + # get training parameters + [epochs, batch_size, epsilon, lr, rootEvery, preconEvery]= defining_training_parameters(optimizer) + + + if ((optimizer == "shampoo") | (optimizer == "shampoo_momentum") | (optimizer == "shampoo_heuristic")){ + diagThreshold = 1200 + }else{ + diagThreshold = 1 + } + + if ((optimizer == "shampoo") | (optimizer == "shampoo_diagonal")){ + [preconL_W1, preconR_W1, useDiag_W1] = shampoo::init(W1, epsilon, diagThreshold) + [preconL_b1, preconR_b1, useDiag_b1] = shampoo::init(b1, epsilon, diagThreshold) + [preconL_W2, preconR_W2, useDiag_W2] = shampoo::init(W2, epsilon, diagThreshold) + [preconL_b2, preconR_b2, useDiag_b2] = shampoo::init(b2, epsilon, diagThreshold) + [preconL_W_fc, preconR_W_fc, useDiag_W_fc] = shampoo::init(W_fc, epsilon, diagThreshold) + [preconL_b_fc, preconR_b_fc, useDiag_b_fc] = shampoo::init(b_fc, epsilon, diagThreshold) + } + + if ((optimizer == "shampoo_momentum") | (optimizer == "shampoo_momentum_diagonal")){ + [preconL_W1, preconR_W1, momentum_W1, useDiag_W1] = shampoo::init_momentum(W1, epsilon, diagThreshold) + [preconL_b1, preconR_b1, momentum_b1, useDiag_b1] = shampoo::init_momentum(b1, epsilon, diagThreshold) + [preconL_W2, preconR_W2, momentum_W2, useDiag_W2] = shampoo::init_momentum(W2, epsilon, diagThreshold) + [preconL_b2, preconR_b2, momentum_b2, useDiag_b2] = shampoo::init_momentum(b2, epsilon, diagThreshold) + [preconL_W_fc, preconR_W_fc, momentum_W_fc, useDiag_W_fc] = shampoo::init_momentum(W_fc, epsilon, diagThreshold) + [preconL_b_fc, preconR_b_fc, momentum_b_fc, useDiag_b_fc] = shampoo::init_momentum(b_fc, epsilon, diagThreshold) + } + if ((optimizer == "shampoo_heuristic") | (optimizer == "shampoo_heuristic_diagonal")){ + [preconL_W1, preconR_W1, stepCounter_W1, bufferL_W1, bufferR_W1, momentum_W1, preconLInvPowerRoot_W1, preconRInvPowerRoot_W1, useDiag_W1] = shampoo::init_heuristic(W1, epsilon, diagThreshold) + [preconL_b1, preconR_b1, stepCounter_b1, bufferL_b1, bufferR_b1, momentum_b1, preconLInvPowerRoot_b1, preconRInvPowerRoot_b1, useDiag_b1] = shampoo::init_heuristic(b1, epsilon, diagThreshold) + [preconL_W2, preconR_W2, stepCounter_W2, bufferL_W2, bufferR_W2, momentum_W2, preconLInvPowerRoot_W2, preconRInvPowerRoot_W2, useDiag_W2] = shampoo::init_heuristic(W2, epsilon, diagThreshold) + [preconL_b2, preconR_b2, stepCounter_b2, bufferL_b2, bufferR_b2, momentum_b2, preconLInvPowerRoot_b2, preconRInvPowerRoot_b2, useDiag_b2] = shampoo::init_heuristic(b2, epsilon, diagThreshold) + [preconL_W_fc, preconR_W_fc, stepCounter_W_fc, bufferL_W_fc, bufferR_W_fc, momentum_W_fc, preconLInvPowerRoot_W_fc, preconRInvPowerRoot_W_fc, useDiag_W_fc] = shampoo::init_heuristic(W_fc, epsilon, diagThreshold) + [preconL_b_fc, preconR_b_fc, stepCounter_b_fc, bufferL_b_fc, bufferR_b_fc, momentum_b_fc, preconLInvPowerRoot_b_fc, preconRInvPowerRoot_b_fc, useDiag_b_fc] = shampoo::init_heuristic(b_fc, epsilon, diagThreshold) + } + print(useDiag_W1) + print(useDiag_b1) + print(useDiag_W2) + print(useDiag_b2) + print(useDiag_W_fc) + print(useDiag_b_fc) + data_val_X = X_val + data_val_Y = Y_val + + # define the training + + train_losses = matrix(0, rows=epochs, cols=1) + train_accuracies = matrix(0, rows=epochs, cols=1) + val_accuracies = matrix(0, rows=epochs, cols=1) + val_losses = matrix(0, rows=epochs, cols=1) + Ntrain = nrow(X_train) + + timestep = 0 + + for(epoch in 1:epochs){ + + print("Epoch " + epoch + " of " + epochs + " epochs") + + accuracy_value = 0 + accuracy_count = 0 + loss_value = 0 + loss_count = 0 + + + for(start_index in seq(1, Ntrain, batch_size)){ + #start_index = (i - 1) * batch_size + 1 + end_index = min(start_index + batch_size - 1, Ntrain) + data_train_X = X_train[start_index:end_index,] + data_train_Y = Y_train[start_index:end_index,] + + [softMaxOut, X, convOut1, poolOut1, reluOut1, convOut2, poolOut2, reluOut2, pred] = modelPredict(data_train_X, W1, b1, W2, b2, W_fc, b_fc, h_ins, w_ins, c_ins) + + predicted_value = rowIndexMax(softMaxOut) - 1 + accuracy = sum(predicted_value==rowIndexMax(data_train_Y)-1) / length(predicted_value) + accuracy_value = accuracy_value + accuracy + accuracy_count = accuracy_count + 1 + + loss = cross_entropy_loss::forward(softMaxOut, data_train_Y) + loss_value = loss_value + loss + loss_count = loss_count + 1 + + + [gradient_lin_layer_W, gradient_lin_layer_b, gradient_W2, rootEvery, preconEveryadient_b2, gradient_W1, gradient_b1] = modelBackward(data_train_Y, W1, b1, W2, b2, W_fc, b_fc, h_ins, w_ins, c_ins, X, convOut1, poolOut1, reluOut1, convOut2, poolOut2, reluOut2, pred, softMaxOut) + + if ((optimizer == "shampoo") | (optimizer == "shampoo_diagonal")){ + [W1, preconL_W1, preconR_W1] = shampoo::update(W1, gradient_W1, lr, preconL_W1, preconR_W1, useDiag_W1) + [b1, preconL_b1, preconR_b1] = shampoo::update(b1, gradient_b1, lr, preconL_b1, preconR_b1, useDiag_b1) + [W2, preconL_W2, preconR_W2] = shampoo::update(W2, gradient_W2, lr, preconL_W2, preconR_W2, useDiag_W2) + [b2, preconL_b2, preconR_b2] = shampoo::update(b2, gradient_b2, lr, preconL_b2, preconR_b2, useDiag_b2) + [W_fc, preconL_W_fc, preconR_W_fc] = shampoo::update(W_fc, gradient_lin_layer_W, lr, preconL_W_fc, preconR_W_fc, useDiag_W_fc) + [b_fc, preconL_b_fc, preconR_b_fc] = shampoo::update(b_fc, gradient_lin_layer_b, lr, preconL_b_fc, preconR_b_fc, useDiag_b_fc) + } + if ((optimizer == "shampoo_momentum") | (optimizer == "shampoo_momentum_diagonal")){ + [W1, preconL_W1, preconR_W1, momentum_W1] = shampoo::update_momentum(W1, gradient_W1, lr, preconL_W1, preconR_W1, momentum_W1, useDiag_W1) + [b1, preconL_b1, preconR_b1, momentum_b1] = shampoo::update_momentum(b1, gradient_b1, lr, preconL_b1, preconR_b1, momentum_b1, useDiag_b1) + [W2, preconL_W2, preconR_W2, momentum_W2] = shampoo::update_momentum(W2, gradient_W2, lr, preconL_W2, preconR_W2, momentum_W2, useDiag_W2) + [b2, preconL_b2, preconR_b2, momentum_b2] = shampoo::update_momentum(b2, gradient_b2, lr, preconL_b2, preconR_b2, momentum_b2, useDiag_b2) + [W_fc, preconL_W_fc, preconR_W_fc, momentum_W_fc] = shampoo::update_momentum(W_fc, gradient_lin_layer_W, lr, preconL_W_fc, preconR_W_fc, momentum_W_fc, useDiag_W_fc) + [b_fc, preconL_b_fc, preconR_b_fc, momentum_b_fc] = shampoo::update_momentum(b_fc, gradient_lin_layer_b, lr, preconL_b_fc, preconR_b_fc, momentum_b_fc, useDiag_b_fc) + } + if ((optimizer == "shampoo_heuristic") | (optimizer == "shampoo_heuristic_diagonal")){ + [W1, preconL_W1, preconR_W1, momentum_W1, stepCounter_W1, bufferL_W1, bufferR_W1, preconLInvPowerRoot_W1, preconRInvPowerRoot_W1] = shampoo::update_heuristic(W1, gradient_W1, lr, preconL_W1, preconR_W1, momentum_W1, stepCounter_W1, rootEvery, preconEvery, bufferL_W1, bufferR_W1, preconLInvPowerRoot_W1, preconRInvPowerRoot_W1, useDiag_W1) + [b1, preconL_b1, preconR_b1, momentum_b1, stepCounter_b1, bufferL_b1, bufferR_b1, preconLInvPowerRoot_b1, preconRInvPowerRoot_b1] = shampoo::update_heuristic(b1, gradient_b1, lr, preconL_b1, preconR_b1, momentum_b1, stepCounter_b1, rootEvery, preconEvery, bufferL_b1, bufferR_b1, preconLInvPowerRoot_b1, preconRInvPowerRoot_b1, useDiag_b1) + [W2, preconL_W2, preconR_W2, momentum_W2, stepCounter_W2, bufferL_W2, bufferR_W2, preconLInvPowerRoot_W2, preconRInvPowerRoot_W2] = shampoo::update_heuristic(W2, gradient_W2, lr, preconL_W2, preconR_W2, momentum_W2, stepCounter_W2, rootEvery, preconEvery, bufferL_W2, bufferR_W2, preconLInvPowerRoot_W2, preconRInvPowerRoot_W2, useDiag_W2) + [b2, preconL_b2, preconR_b2, momentum_b2, stepCounter_b2, bufferL_b2, bufferR_b2, preconLInvPowerRoot_b2, preconRInvPowerRoot_b2] = shampoo::update_heuristic(b2, gradient_b2, lr, preconL_b2, preconR_b2, momentum_b2, stepCounter_b2, rootEvery, preconEvery, bufferL_b2, bufferR_b2, preconLInvPowerRoot_b2, preconRInvPowerRoot_b2, useDiag_b2) + [W_fc, preconL_W_fc, preconR_W_fc, momentum_W_fc, stepCounter_W_fc, bufferL_W_fc, bufferR_W_fc, preconLInvPowerRoot_W_fc, preconRInvPowerRoot_W_fc] = shampoo::update_heuristic(W_fc, gradient_lin_layer_W, lr, preconL_W_fc, preconR_W_fc, momentum_W_fc, stepCounter_W_fc, rootEvery, preconEvery, bufferL_W_fc, bufferR_W_fc, preconLInvPowerRoot_W_fc, preconRInvPowerRoot_W_fc, useDiag_W_fc) + [b_fc, preconL_b_fc, preconR_b_fc, momentum_b_fc, stepCounter_b_fc, bufferL_b_fc, bufferR_b_fc, preconLInvPowerRoot_b_fc, preconRInvPowerRoot_b_fc] = shampoo::update_heuristic(b_fc, gradient_lin_layer_b, lr, preconL_b_fc, preconR_b_fc, momentum_b_fc, stepCounter_b_fc, rootEvery, preconEvery, bufferL_b_fc, bufferR_b_fc, preconLInvPowerRoot_b_fc, preconRInvPowerRoot_b_fc, useDiag_b_fc) + } + } + + train_losses[epoch,1] = loss_value / loss_count + train_accuracies[epoch,1] = accuracy_value/accuracy_count + + [softMaxOut_val, X_val, convOut1_val, poolOut1_val, reluOut1_val, convOut2_val, poolOut2_val, reluOut2_val, pred_val] = modelPredict(X_val, W1, b1, W2, b2, W_fc, b_fc, h_ins, w_ins, c_ins) + + + predicted_value_val = rowIndexMax(softMaxOut_val) - 1 + accuracy_val = sum(predicted_value_val==rowIndexMax(Y_val)-1) / length(predicted_value_val) + + val_accuracies[epoch,1] = accuracy_val + + loss = cross_entropy_loss::forward(softMaxOut_val, Y_val) + val_losses[epoch,1] = loss + } + + + # define the testing + + [softMaxOut_test, X_test, convOut1_test, poolOut1_test, reluOut1_test, convOut2_test, poolOut2_test, reluOut2_test, pred_test] = modelPredict(X_test, W1, b1, W2, b2, W_fc, b_fc, h_ins, w_ins, c_ins) + + predicted_value = rowIndexMax(softMaxOut_test) - 1 + accuracy = sum(predicted_value==rowIndexMax(Y_test)-1) / length(predicted_value) + + loss = cross_entropy_loss::forward(softMaxOut_test, Y_test) + + print("Test Accuracy of " + optimizer + " = " + accuracy) + print("Test Loss of " + optimizer + " = " + loss) + + if (accuracy > 0.7){ + print("Test passed") + } else{ + print("Test failed") + } + +} + diff --git a/src/test/scripts/applications/nn/component/shampoo_test.py b/src/test/scripts/applications/nn/component/shampoo_test.py new file mode 100644 index 00000000000..94028ca443d --- /dev/null +++ b/src/test/scripts/applications/nn/component/shampoo_test.py @@ -0,0 +1,225 @@ +import numpy as np + +# update shampoo +def update_shampoo(X, dX, lr, preconL, preconR, useDiag): + if(not useDiag): + + preconL = preconL + dX @ dX.T + preconR = preconR + dX.T @dX + + LEigenvalue, LEigenvector = np.linalg.eig(preconL) + preconLInvPowerRoot = LEigenvector @ np.diag(LEigenvalue**(-0.25)) @ LEigenvector.T + + REigenvalue, REigenvector = np.linalg.eig(preconR) + preconRInvPowerRoot = REigenvector @ np.diag(REigenvalue**(-0.25)) @ REigenvector.T + + X = X - lr * preconLInvPowerRoot @ dX @ preconRInvPowerRoot + + # Diagonal Shampoo: + # Memory-efficient approximation for large parameter matrices + else: + n = dX.shape[0] + m = dX.shape[1] + + preconL = preconL + (dX**2).sum(axis=1, keepdims=True) + preconR = preconR + (dX**2).sum(axis=0, keepdims=True) + + preconLScale = preconL**(-0.25) + preconRScale = preconR**(-0.25) + + preconLMatrix = preconLScale @ np.ones(shape=[1, m]) + preconRMatrix = np.ones(shape=(n, 1)) @ preconRScale + + scaledGrad = dX * preconLMatrix + scaledGrad = scaledGrad * preconRMatrix + + X = X - lr * scaledGrad + + return(X, preconL, preconR) + +# init shampoo + +def init_shampoo(X, epsilon, useDiagThreshold): + if((X.shape[0] > useDiagThreshold) or (X.shape[1] > useDiagThreshold)): + preconL = np.full(shape=(X.shape[0], 1), fill_value=epsilon, dtype=np.float64) + preconR = np.full(shape=(1, X.shape[1]), fill_value=epsilon, dtype=np.float64) + useDiag = True + else: + preconL = np.eye(X.shape[0], dtype=np.float64) * epsilon + preconR = np.eye(X.shape[1], dtype=np.float64) * epsilon + + useDiag = False + return(preconL, preconR, useDiag) + +# update shampoo +def update_shampoo_momentum(X, dX, lr, preconL, preconR, momentum, useDiag): + momentum = 0.9 * momentum + (0.1)*dX + if(not useDiag): + + preconL = preconL + dX @ dX.T + preconR = preconR + dX.T @dX + + LEigenvalue, LEigenvector = np.linalg.eig(preconL) + preconLInvPowerRoot = LEigenvector @ np.diag(LEigenvalue**(-0.25)) @ LEigenvector.T + + REigenvalue, REigenvector = np.linalg.eig(preconR) + preconRInvPowerRoot = REigenvector @ np.diag(REigenvalue**(-0.25)) @ REigenvector.T + + X = X - lr * preconLInvPowerRoot @ momentum @ preconRInvPowerRoot + + # Diagonal Shampoo: + # Memory-efficient approximation for large parameter matrices + else: + n = dX.shape[0] + m = dX.shape[1] + + preconL = preconL + (dX**2).sum(axis=1, keepdims=True) + preconR = preconR + (dX**2).sum(axis=0, keepdims=True) + + preconLScale = preconL**(-0.25) + preconRScale = preconR**(-0.25) + + preconLMatrix = preconLScale @ np.ones(shape=[1, m]) + preconRMatrix = np.ones(shape=(n, 1)) @ preconRScale + + scaledGrad = momentum * preconLMatrix + scaledGrad = scaledGrad * preconRMatrix + + X = X - lr * scaledGrad + + return(X, preconL, preconR, momentum) + +# init shampoo + +def init_shampoo_momentum(X, epsilon, useDiagThreshold): + if((X.shape[0] > useDiagThreshold) or (X.shape[1] > useDiagThreshold)): + preconL = np.full(shape=(X.shape[0], 1), fill_value=epsilon, dtype=np.float64) + preconR = np.full(shape=(1, X.shape[1]), fill_value=epsilon, dtype=np.float64) + useDiag = True + else: + preconL = np.eye(X.shape[0], dtype=np.float64) * epsilon + preconR = np.eye(X.shape[1], dtype=np.float64) * epsilon + + useDiag = False + + momentum = X * 0 + return(preconL, preconR, momentum, useDiag) + +# update shampoo +def update_shampoo_heuristic(X, dX, lr, preconL, preconR, momentum, stepCounter, rootEvery, preconEvery, bufferL, bufferR, preconLInvPowerRoot, preconRInvPowerRoot, useDiag): + momentum = 0.9 * momentum + (0.1)*dX + if(not useDiag): + bufferL = bufferL + (dX @ dX.T) + bufferR = bufferR + (dX.T @dX) + + if ((stepCounter > 0) and (stepCounter % preconEvery == 0)): + preconL = preconL + bufferL + preconR = preconR + bufferR + bufferL = bufferL * 0 + bufferR = bufferR * 0 + + if ((stepCounter > 0) and (stepCounter % rootEvery == 0)): + LEigenvalue, LEigenvector = np.linalg.eig(preconL) + preconLInvPowerRoot = LEigenvector @ np.diag(LEigenvalue**(-0.25)) @ LEigenvector.T + + REigenvalue, REigenvector = np.linalg.eig(preconR) + preconRInvPowerRoot = REigenvector @ np.diag(REigenvalue**(-0.25)) @ REigenvector.T + + X = X - lr * preconLInvPowerRoot @ momentum @ preconRInvPowerRoot + + # Diagonal Shampoo: + # Memory-efficient approximation for large parameter matrices + else: + n = dX.shape[0] + m = dX.shape[1] + + bufferL = bufferL + (dX**2).sum(axis=1, keepdims=True) + bufferR = bufferR + (dX**2).sum(axis=0, keepdims=True) + + if ((stepCounter > 0) and (stepCounter % preconEvery == 0)): + preconL = preconL + bufferL + preconR = preconR + bufferR + bufferL = bufferL * 0 + bufferR = bufferR * 0 + + if ((stepCounter > 0) and (stepCounter % rootEvery == 0)): + preconLInvPowerRoot = preconL**(-0.25) + preconRInvPowerRoot = preconR**(-0.25) + + preconLMatrix = preconLInvPowerRoot @ np.ones(shape=[1, m]) + preconRMatrix = np.ones(shape=(n, 1)) @ preconRInvPowerRoot + + scaledGrad = momentum * preconLMatrix + scaledGrad = scaledGrad * preconRMatrix + + X = X - lr * scaledGrad + + return(X, preconL, preconR, momentum, stepCounter, bufferL, bufferR, preconLInvPowerRoot, preconRInvPowerRoot) + +# init shampoo + +def init_shampoo_heuristic(X, epsilon, useDiagThreshold): + if((X.shape[0] > useDiagThreshold) or (X.shape[1] > useDiagThreshold)): + preconL = np.full(shape=(X.shape[0], 1), fill_value=epsilon, dtype=np.float64) + preconR = np.full(shape=(1, X.shape[1]), fill_value=epsilon, dtype=np.float64) + preconLInvPowerRoot = preconL**(-0.25) + preconRInvPowerRoot = preconR**(-0.25) + useDiag = True + else: + preconL = np.eye(X.shape[0], dtype=np.float64) * epsilon + preconR = np.eye(X.shape[1], dtype=np.float64) * epsilon + preconLInvPowerRoot = np.eye(X.shape[0], dtype=np.float64) * epsilon**(-0.25) + preconRInvPowerRoot = np.eye(X.shape[1], dtype=np.float64) * epsilon**(-0.25) + + useDiag = False + + momentum = X * 0 + bufferR = preconR * 0 + bufferL = preconL * 0 + stepCounter = 0 + momentum = X * 0 + return(preconL, preconR, stepCounter, bufferL, bufferR, momentum, preconLInvPowerRoot, preconRInvPowerRoot, useDiag) + +n = 5 +m = 5 +epsilon = 1e-4 +lr = 0.005 +diagThreshold = 10 +rootEvery=10 +preconEvery=10 + +# define weight matrix +X = np.array([ + [ 0.12, -0.45, 0.33, 0.08, -0.19], + [-0.27, 0.41, -0.05, 0.22, 0.14], + [ 0.09, -0.31, 0.26, -0.48, 0.37], + [ 0.44, 0.06, -0.29, 0.15, -0.11], + [-0.38, 0.24, 0.17, -0.07, 0.52], +], dtype=np.float64) + +# define gradient +dX_main = np.array([ + [ 0.015, -0.022, 0.008, 0.031, -0.012], + [-0.009, 0.027, -0.014, 0.005, 0.019], + [ 0.021, -0.006, 0.011, -0.025, 0.004], + [-0.018, 0.013, -0.029, 0.007, -0.016], + [ 0.010, -0.017, 0.024, -0.003, 0.028], +], dtype=np.float64) + +for diagThreshold in (1, 10): + X_py = X.copy() + dX = dX_main.copy() + + # preconL_py, preconR_py, useDiag_py = init_shampoo(X_py, epsilon, diagThreshold) + # X_py, preconL_py, preconR_py = update_shampoo(X_py, dX, lr, preconL_py, preconR_py, useDiag_py) + + # preconL_py, preconR_py, momentum_py, useDiag_py = init_shampoo_momentum(X_py, epsilon, diagThreshold) + # X_py, preconL_py, preconR_py, momentum_py = update_shampoo_momentum(X_py, dX, lr, preconL_py, preconR_py, momentum_py, useDiag_py) + + preconL, preconR, stepCounter, bufferL, bufferR, momentum, preconLInvPowerRoot, preconRInvPowerRoot, useDiag = init_shampoo_heuristic(X_py, epsilon, diagThreshold) + X_py, preconL, preconR, momentum, stepCounter, bufferL, bufferR, preconLInvPowerRoot, preconRInvPowerRoot = update_shampoo_heuristic(X_py, dX, lr, preconL, preconR, momentum, stepCounter, rootEvery, preconEvery, bufferL, bufferR, preconLInvPowerRoot, preconRInvPowerRoot, useDiag) + + print("diagThreshold: " + str(diagThreshold)) + print(X_py) + + \ No newline at end of file diff --git a/src/test/scripts/applications/nn/component/shampoo_test2.dml b/src/test/scripts/applications/nn/component/shampoo_test2.dml new file mode 100644 index 00000000000..750bdeef7c0 --- /dev/null +++ b/src/test/scripts/applications/nn/component/shampoo_test2.dml @@ -0,0 +1,34 @@ +source("scripts/nn/optim/shampoo.dml") as shampoo + +X_main = matrix("0.12 -0.45 0.33 0.08 -0.19 -0.27 0.41 -0.05 0.22 0.14 0.09 -0.31 0.26 -0.48 0.37 0.44 0.06 -0.29 0.15 -0.11 -0.38 0.24 0.17 -0.07 0.52", + rows=5, cols=5 +) + +dX_main = matrix("0.015 -0.022 0.008 0.031 -0.012 -0.009 0.027 -0.014 0.005 0.019 0.021 -0.006 0.011 -0.025 0.004 -0.018 0.013 -0.029 0.007 -0.016 0.010 -0.017 0.024 -0.003 0.028", + rows=5, cols=5 +) + +epsilon = 1e-4 +lr = 0.005 +diagThreshold = 10 +rootEvery = 10 +preconEvery = 10 + +for (diagThreshold in seq(1, 10, 9)){ + X = X_main + dX = dX_main + #[preconL, preconR, useDiag] = shampoo::init(X, epsilon, diagThreshold) + #[X, preconL, preconR] = shampoo::update(X, dX, lr, preconL, preconR, useDiag) + + #[preconL, preconR, momentum, useDiag] = shampoo::init_momentum(X, epsilon, diagThreshold) + #[X, preconL, preconR, momentum] = shampoo::update_momentum(X, dX, lr, preconL, preconR, momentum, useDiag) + + [preconL, preconR, stepCounter, bufferL, bufferR, momentum, preconLInvPowerRoot, preconRInvPowerRoot, useDiag] = shampoo::init_heuristic(X, epsilon, diagThreshold) + [X, preconL, preconR, momentum, stepCounter, bufferL, bufferR, preconLInvPowerRoot, preconRInvPowerRoot] = shampoo::update_heuristic(X, dX, lr, preconL, preconR, momentum, stepCounter, rootEvery, preconEvery, bufferL, bufferR, preconLInvPowerRoot, preconRInvPowerRoot, useDiag) + + print("diagThreshold: " + diagThreshold) + print(X) +} + + +