use std::io::*;
use std::time::{SystemTime, UNIX_EPOCH};
pub const INPUT_COUNT: usize = 24;
pub const OUTPUT_COUNT: usize = 12;
pub const LEARNING_RATE: f64 = 0.012;
pub fn activation_function(x: f64) -> f64 {
//1. / (1. + std::f64::consts::E.powf(-x))
//x.tanh()
x.max(0.)
}
#[allow(dead_code)]
pub fn inv_activation_function(x: f64) -> f64 {
//f64::ln(x / (1. - x)) //.max(-10000.).min(10000.)
//x.atanh()
x.max(0.)
}
#[allow(dead_code)]
pub fn pseudo_rand() -> f64 {
let start = SystemTime::now();
let since_the_epoch = start
.duration_since(UNIX_EPOCH)
.expect("Time went backwards");
#[allow(arithmetic_overflow)]
const MAX: u128 = 1000000;
// Randomish enough
let res = ((since_the_epoch.as_nanos() * 23525623626 >> 2) % MAX) as f64 / MAX as f64;
res % 1.
}
#[derive(Debug, Clone)]
pub struct DataPoint<const S: usize>(pub [f64; S]);
impl<const S: usize> DataPoint<S> {
const MIN_VALUE: f64 = -45.;
const MAX_VALUE: f64 = 45.;
pub fn new(values: &[f64]) -> Self {
let mut data_point = Self([0.; S]);
data_point.0.clone_from_slice(values);
let range = Self::MAX_VALUE - Self::MIN_VALUE;
for i in 0..S {
data_point.0[i] = (data_point.0[i] - Self::MIN_VALUE) / range
}
data_point
}
pub fn from_data(values: &[f64]) -> Self {
let mut data_point = Self([0.; S]);
data_point.0.clone_from_slice(values);
data_point
}
pub fn get(&self, i: usize) -> f64 {
let range = Self::MAX_VALUE - Self::MIN_VALUE;
self.0[i] * range + Self::MIN_VALUE
}
}
// Layer of neural network with input and output count
#[derive(Debug, Clone, Copy)]
pub struct Layer<const I: usize, const O: usize> {
weights: [[f64; I]; O],
biases: [f64; O],
}
impl<const I: usize, const O: usize> Layer<I, O> {
pub fn new() -> Self {
Self {
weights: [[0.; I]; O],
biases: [0.; O],
}
}
pub fn random() -> Self {
let mut layer = Self::new();
for i in 0..O {
for j in 0..I {
layer.weights[i][j] = (pseudo_rand() * 2. - 1.) / 10.;
}
layer.biases[i] = (pseudo_rand() * 2. - 1.) / 10.;
}
layer
}
pub fn propagate(&self, activation: &[f64; I]) -> [f64; O] {
let mut result = [0.; O];
for i in 0..O {
let mut value = 0.;
for j in 0..I {
value += activation[j] * self.weights[i][j];
}
result[i] = activation_function(value + self.biases[i]);
}
result
}
pub fn back_propagate(
&self,
changes: &mut Self,
activation: &[f64; I],
actual_outcome: &[f64; O],
desired_outcome: &[f64; O],
) -> [f64; I] {
let mut desired_activation = activation.clone();
let mut z = [0.; O];
for i in 0..O {
z[i] = self.biases[i];
for j in 0..I {
z[i] += activation[j] * self.weights[i][j];
}
}
for i in 0..O {
let dca = 2. * (actual_outcome[i] - desired_outcome[i]);
let dzb = 1.;
for j in 0..I {
let daz = inv_activation_function(z[i]);
let dzw = activation[j];
let dza = self.weights[i][j];
let dcw = dca * dzw * daz;
let dcb = dzb * daz * dca;
let dca2 = dza * daz * dca;
changes.weights[i][j] -= dcw;
changes.biases[i] -= dcb / I as f64;
desired_activation[j] -= dca2
}
}
desired_activation
}
pub fn apply_changes(&mut self, changes: Layer<I, O>, rate: f64) {
for i in 0..O {
for j in 0..I {
self.weights[i][j] += changes.weights[i][j] * rate
}
self.biases[i] += changes.biases[i] * rate;
}
}
}
#[derive(Debug, Clone)]
pub struct NeuralNetwork<const I: usize, const O: usize, const H: usize, const L: usize> {
input_layer: Layer<I, H>,
hidden_layers: [Layer<H, H>; L],
output_layer: Layer<H, O>,
}
impl<const I: usize, const O: usize, const H: usize, const L: usize> NeuralNetwork<I, O, H, L> {
#[allow(dead_code)]
pub fn new() -> Self {
Self {
input_layer: Layer::new(),
hidden_layers: [Layer::new(); L],
output_layer: Layer::new(),
}
}
#[allow(dead_code)]
pub fn random() -> Self {
Self {
input_layer: Layer::random(),
hidden_layers: [Layer::random(); L],
output_layer: Layer::random(),
}
}
pub fn predict(&self, start_activation: &DataPoint<I>) -> DataPoint<O> {
let mut activation = self.input_layer.propagate(&start_activation.0);
for hidden_layer in self.hidden_layers {
activation = hidden_layer.propagate(&activation);
}
DataPoint(self.output_layer.propagate(&activation))
}
pub fn train(
&self,
changes: &mut Self,
start_activation: &DataPoint<I>,
desired_outcome: &DataPoint<O>,
batch_size: usize,
) -> f64 {
for i in desired_outcome.0 {
if i > 1. || i < -1. {
panic!("Invalid training data");
}
}
let mut activations = [[0.; H]; L];
for i in 0..L {
let activation = if i == 0 {
self.input_layer.propagate(&start_activation.0)
} else {
activations[i - 1]
};
activations[i] = self.hidden_layers[i].propagate(&activation);
}
let last_activation = if L > 0 {
activations[L - 1]
} else {
self.input_layer.propagate(&start_activation.0)
};
let actual_outcome = self.output_layer.propagate(&last_activation);
let desired_output_activation = self.output_layer.back_propagate(
&mut changes.output_layer,
&last_activation,
&actual_outcome,
&desired_outcome.0,
);
let mut desired_activations = [[0.; H]];
for i in (0..L).rev() {
let prev_desired_activation = if i == L - 1 {
desired_output_activation
} else {
desired_activations[i]
};
let activation = if i == 0 {
self.input_layer.propagate(&start_activation.0)
} else {
activations[i]
};
for i in prev_desired_activation {
if i > 1. || i < -1. {
panic!("Invalid activation data");
}
}
let desired_activation = self.hidden_layers[i].back_propagate(
&mut changes.hidden_layers[i],
&activation,
&activations[i],
&prev_desired_activation,
);
desired_activations[i] = desired_activation;
}
let _ = self.input_layer.back_propagate(
&mut changes.input_layer,
&start_activation.0,
if L > 0 {
&activations[0]
} else {
&last_activation
},
&(if L > 0 {
desired_activations[0]
} else {
desired_output_activation
}),
);
//println!("outcome: {:#?}", actual_outcome);
//println!("Desired outcome: {:#?}", desired_outcome);
let mut loss = 0.;
for i in 0..O {
let l = (actual_outcome[i] - desired_outcome.0[i]).powi(2);
//println!("i: {} loss {}", i, l);
loss += l;
}
loss / batch_size as f64
}
pub fn apply_changes(&mut self, changes: &Self, batch_size: usize) {
self.input_layer
.apply_changes(changes.input_layer, LEARNING_RATE / batch_size as f64);
for i in 0..L {
self.hidden_layers[i]
.apply_changes(changes.hidden_layers[i], LEARNING_RATE / batch_size as f64)
}
self.output_layer
.apply_changes(changes.output_layer, LEARNING_RATE / batch_size as f64);
}
}
pub fn read_training_data() -> (Vec<DataPoint<INPUT_COUNT>>, Vec<DataPoint<OUTPUT_COUNT>>) {
let mut input = String::new();
stdin()
.read_line(&mut input)
.expect("Unexpected end of input");
let days: i32 = input.trim().parse().unwrap();
let mut input_data = Vec::new();
let mut output_data = Vec::new();
for _ in 0..days {
let mut input = String::new();
stdin()
.read_line(&mut input)
.expect("Unexpected end of input");
let arr = input
.split(" ")
.map(|s| s.trim().parse().unwrap())
.collect::<Vec<f64>>();
input_data.push(DataPoint::new(&arr[..INPUT_COUNT]));
output_data.push(DataPoint::new(&arr[INPUT_COUNT..]));
}
(input_data, output_data)
}
#[allow(dead_code)]
pub fn read_input_data() -> Vec<DataPoint<INPUT_COUNT>> {
let mut input = String::new();
stdin()
.read_line(&mut input)
.expect("Unexpected end of input");
let days: i32 = input.trim().parse().unwrap();
let mut data = Vec::new();
for _ in 0..days {
let mut input = String::new();
stdin()
.read_line(&mut input)
.expect("Unexpected end of input");
let arr = input
.split(" ")
.map(|s| s.trim().parse().unwrap())
.collect::<Vec<f64>>();
data.push(DataPoint::new(&arr[..INPUT_COUNT]));
}
data
}
#[allow(dead_code)]
pub fn main() {
#[cfg(feature = "evaluate")]
let (data, output_data) = read_training_data();
#[cfg(not(feature = "evaluate"))]
let data = read_input_data();
let neural_net = get_neural_network();
#[cfg(feature = "evaluate")]
let mut correct: i32 = 0;
#[cfg(feature = "evaluate")]
let mut incorrect: i32 = 0;
#[cfg(feature = "evaluate")]
let mut correct_distribution = [0; OUTPUT_COUNT];
#[cfg(feature = "evaluate")]
let mut error_distribution = [0; OUTPUT_COUNT];
for i in 0..data.len() {
let result = neural_net.predict(&data[i]);
for j in 0..OUTPUT_COUNT {
print!("{} ", result.get(j));
#[cfg(feature = "evaluate")]
{
let dist = (result.get(j) - output_data[i].get(j)).abs();
if dist < 0.75 {
print!("+ ");
correct += 1;
correct_distribution[j] += 1;
} else if dist > 2.05 {
print!("! ");
error_distribution[j] += 1;
incorrect += 1;
}
println!("");
}
}
#[cfg(feature = "evaluate")]
println!("");
}
#[cfg(feature = "evaluate")]
println!("correct distribution: {:#?}", correct_distribution);
#[cfg(feature = "evaluate")]
println!("error distribution: {:#?}", error_distribution);
#[cfg(feature = "evaluate")]
println!(
"Correct: {} / {}, Incorrect: {} / {}, score: {}",
correct,
data.len() * OUTPUT_COUNT,
incorrect,
data.len() * OUTPUT_COUNT,
25. * ((correct - incorrect) as f64 / data.len() as f64)
);
}
pub fn get_neural_network() -> NeuralNetwork<INPUT_COUNT, 12, 18, 0> {
NeuralNetwork {
input_layer: Layer {
weights: [
[
0.0184117681952159,
0.031948766780118106,
-0.09702881268567491,
0.06711334285073134,
-0.06127876763953794,
0.022503935109532237,
0.09473475049109924,
-0.02115625082546075,
0.05104589618663623,
-0.06532417903770522,
0.0062844978173819364,
-0.029315890856630746,
0.04196027253530437,
-0.07510017665900004,
-0.00429811040585373,
0.07775330555333071,
-0.052674390474306734,
0.02828074791212037,
-0.10282604230420606,
0.059058441846342756,
-0.07174443801425745,
0.009565396704347263,
0.0792504237832594,
-0.039053046007442145,
],
[
-0.0728924,
0.00979859999999999,
-0.026329,
0.04454899999999999,
-0.07276,
-0.0018819999999999948,
0.08080899999999999,
-0.048312999999999995,
0.02256499999999999,
-0.094744,
0.05731520000000001,
-0.05999380000000001,
0.01088420000000001,
0.0935752,
-0.0355468,
0.04714419999999999,
-0.0819778,
0.0007131999999999916,
-0.047227399999999996,
0.0354636,
-0.0936584,
-0.010967400000000006,
0.059910599999999994,
-0.0573984,
],
[
-0.0920164,
0.06004279999999999,
-0.06907920000000001,
0.013611799999999997,
0.0963028,
-0.03281920000000001,
0.0498718,
-0.0792502,
0.0034407999999999995,
-0.0444998,
0.026378199999999997,
-0.0909308,
-0.020052800000000003,
0.0626382,
-0.06648380000000001,
0.016207199999999998,
0.0870852,
-0.030223800000000002,
-0.0781646,
0.004526399999999997,
0.0754044,
-0.0419046,
0.028973399999999993,
-0.0883356,
],
[
0.05757349094293082,
0.021443178532627078,
0.09233832083241117,
-0.024932561451427932,
0.04600692253080675,
-0.07121078400016452,
-0.0002049460551453513,
0.082630598339256,
-0.04636039811275036,
-0.08239609242513118,
-0.011450829879369679,
0.07129200510332509,
-0.057786499165030224,
0.02493461771354011,
-0.10418275461946727,
-0.021547709674777274,
0.04919937518192613,
0.0011044284427444784,
0.08364952512245406,
-0.045595825017936555,
0.03699667845939909,
-0.09220075698155612,
-0.009569788712521664,
0.061265394868901785,
],
[
-0.0845224,
-0.013644400000000001,
0.0690466,
-0.0600754,
0.022615600000000003,
0.0934936,
-0.023815399999999997,
0.047062599999999996,
0.010935000000000005,
0.081813,
-0.03549600000000001,
0.035382000000000004,
-0.081927,
-0.011048999999999998,
0.07164200000000001,
-0.057479999999999996,
-0.09360779999999999,
-0.0227298,
0.059961200000000006,
-0.0691608,
0.013530200000000003,
0.0844082,
-0.032900799999999994,
0.0379772,
],
[
0.08454060000000001,
-0.04458140000000001,
0.03810960000000001,
-0.09101240000000001,
-0.008321400000000001,
0.0625566,
-0.0665654,
0.016125600000000007,
-0.0318152,
0.050875800000000006,
-0.0782462,
0.004444800000000004,
0.0753228,
-0.041986199999999994,
0.028891800000000002,
-0.0884172,
0.06364220000000001,
-0.053666799999999994,
0.017211200000000003,
0.09990220000000001,
-0.029219799999999997,
0.05347120000000001,
-0.07565079999999999,
0.007040200000000008,
],
[
0.041790400000000005,
-0.0755186,
-0.004640599999999995,
0.0780504,
-0.051071599999999995,
0.031619400000000006,
-0.0975026,
0.06636979999999999,
-0.06275220000000001,
0.01993879999999999,
0.0908168,
-0.0264922,
0.04438580000000001,
-0.0729232,
-0.0020452000000000027,
-0.038173,
0.032705000000000005,
-0.084604,
-0.013726000000000006,
0.057152,
-0.060156999999999995,
0.010721000000000003,
0.09341200000000001,
-0.03571,
],
[
0.016995272673883535,
0.08757745411930742,
-0.02987308682962995,
0.04121615706527722,
-0.07559694429340866,
-0.004316768700774171,
0.07907510069321247,
0.032159013173127096,
-0.08385411593469318,
-0.012461692751164398,
0.0707159257854464,
-0.058189766939248926,
0.024621352863036364,
0.09530390806768929,
-0.02295057457627511,
-0.07253013849667239,
0.008326235979007302,
0.07713734812530826,
-0.04301435149503198,
0.024686001100293193,
-0.09611014577146221,
-0.029103358564030565,
0.04921058472898441,
-0.003393655323052892,
],
[
0.0020151160919961845,
0.07637075782801947,
-0.04976336973543656,
0.034776263319958436,
0.10985876408667027,
0.00033207610406547677,
-0.04145483312557245,
0.04440340317631952,
0.11507799636333046,
-0.0006310132864574509,
0.07029906993638266,
0.15525749411544376,
0.029480039946089758,
0.11733835675052273,
0.07992455868534135,
0.17615205638221018,
0.05878385427435263,
0.14889632532360572,
0.03161909862859395,
0.12820555192405286,
0.2149749961093116,
0.1174566525569263,
0.21116831628749905,
0.20117908992779132,
],
[
-0.06284112079457257,
0.019845933063282153,
0.09072100858545185,
-0.02659783108507533,
0.044268240678589074,
-0.0730428113433483,
0.07901222146418338,
-0.03829793925845667,
0.03257416191956702,
-0.08472751476949521,
-0.0138439422097962,
0.06884953380413865,
-0.060267548754417706,
0.022425090632927025,
-0.025505035654766424,
0.0571969139666943,
-0.07191851577329707,
-0.0010317504591569034,
0.08167217123264625,
-0.047435529150968235,
0.0352746086527864,
-0.09382851124836937,
0.07006630181312028,
-0.05903314102659622,
],
[
-0.09377100000000001,
-0.022892999999999997,
0.059798,
-0.06932400000000001,
0.013366999999999995,
-0.0345736,
0.0481174,
-0.08100460000000001,
0.001686399999999999,
0.0725644,
-0.0447446,
0.026133399999999994,
-0.0911756,
0.060883599999999996,
-0.0564254,
0.014452599999999993,
0.0971436,
-0.0319784,
0.0507126,
-0.0784094,
0.0042815999999999965,
0.0751596,
0.039032,
-0.09009,
],
[
0.09770474437927323,
-0.03135928745607856,
0.05105343146658985,
0.12134613351028996,
0.002122431574334001,
-0.049096245652471227,
0.017395980302092245,
0.09545471353643413,
-0.03791416844687958,
0.041754659024022346,
-0.08962076000910824,
-0.008650367287941099,
0.06084436387485868,
0.023728025641846825,
-0.10563607400950717,
-0.02138069941313574,
0.05348034637848293,
-0.05837206309024311,
0.01802339286091163,
0.10557487027657982,
-0.019674032130909243,
-0.052892904222521936,
0.020409887685541306,
0.10510905229188182,
],
[
0.03253641058595543,
-0.08477257259325141,
-0.013894619422161766,
0.06879637617650773,
0.020855864099802546,
-0.0964530850874495,
-0.025574958263799744,
0.05711620895598224,
-0.07200554757901677,
0.010685569057311007,
0.0815635725638893,
-0.03574538692712783,
-0.08368613817474346,
-0.012808098449624078,
0.06988290659586208,
-0.05923927930455387,
0.023451350926097136,
0.09432918568761813,
-0.02297992632460587,
0.04789802904445432,
0.011770369845383844,
0.0826483538534896,
-0.03466066220470397,
0.036217330954728955,
],
[
-0.01868200267186812,
0.07769292579973142,
-0.016543804465157196,
0.06288986560247063,
0.03318028241977125,
0.11963138733040869,
0.006355318520748884,
0.06892512599979557,
0.12353852687263654,
-0.011448342376160936,
0.058661780897475056,
-0.067143045015155,
0.10350792284068053,
0.0020686823409450813,
0.14664545769997897,
0.10246869526735232,
0.2575128807692531,
0.4081329518400277,
0.42208157819805525,
0.6489809526394233,
0.7960152776365235,
1.084396763721281,
1.2239314763768965,
1.5880269116107337,
],
[
-0.029369051086640216,
0.04151417339707807,
-0.08760440660755087,
0.07626874398843839,
-0.052858618571014,
0.029820418987018326,
-0.09930706478634682,
-0.01662193214425753,
0.05426027058091854,
-0.06305132518926032,
0.007817156841481456,
-0.02831073299781565,
0.04256205595157782,
-0.07474988798814207,
-0.003879294763499517,
0.07880189781399094,
-0.050324835793011016,
0.03235590742392265,
-0.09677001636052268,
0.06710807134957085,
-0.06200423940270093,
0.02068566059058148,
0.09155331993057485,
-0.02576489978594899,
],
[
-0.071558824551313,
0.011173877985461928,
0.08212433624126604,
0.04610649862466901,
-0.08274802640858503,
0.00037020775204486285,
0.07171640399423004,
-0.04516900382756475,
0.02603423373322372,
0.09720467952833924,
-0.01988890834668421,
-0.0676264030379293,
0.015230168213544872,
0.08627698714780883,
-0.030821857338880425,
0.04018529076113647,
-0.07722566275131869,
-0.006631471189286675,
0.07584857431498426,
0.02779163649227445,
-0.08948080110922282,
-0.018364541071295568,
0.06469465007776827,
-0.06393707270582248,
],
[
0.10983290157511291,
-0.01927236496718559,
-0.05513596399802288,
0.016120956113102047,
0.10021962470713723,
-0.02660294224051193,
0.059124817972093235,
-0.06671376986486774,
0.0191200727974392,
0.09224051267682173,
0.05786879729119037,
0.13013541456687513,
0.013926358889531695,
0.08564037246760592,
-0.031444713829766886,
0.03829662097461373,
0.11828950387875965,
-0.014459338451737068,
-0.05446671051932328,
0.012654023884462535,
0.09209104002296474,
-0.039736923133400326,
0.040395006747915296,
-0.09104423815610217,
],
[
0.051206380849888065,
0.015171803645845566,
0.0861346570586351,
-0.030959974249313682,
0.040591184490358145,
-0.07548766310165911,
-0.0028115128561583946,
0.08190260533558308,
-0.04539569195260134,
0.038602525965695744,
-0.008413396208725452,
0.07507273698587913,
-0.05339242479285687,
0.029883001987842692,
0.10107982136790768,
-0.016632758773435844,
0.052766844220487844,
-0.06647033065386636,
0.08413090388852927,
-0.03411988045358406,
0.03626061652216161,
-0.08106242266567419,
-0.00993467620631636,
0.07317660580174498,
],
],
biases: [
0.03545907233490985,
0.013479599999999991,
-0.017457599999999997,
-0.05980260573160128,
-0.0911448,
-0.040900599999999995,
-0.0718376,
0.10981008104732747,
-0.01580838409769456,
0.023757739226829864,
-0.019211999999999996,
-0.030174417402457094,
-0.08109278758241452,
-0.5291130874095064,
0.044829265615887216,
0.016175344419745878,
0.003599274554011168,
-0.05534627719522737,
],
},
hidden_layers: [],
output_layer: Layer {
weights: [
[
-0.06688243882085217,
-0.011203600000000003,
0.059674399999999996,
-0.020946958872962742,
0.0250564,
0.0959344,
-0.0213746,
-0.08419294193780469,
-0.03752210695633487,
0.08472042199823458,
-0.03305519999999999,
0.08463789082947547,
-0.07943338039591447,
0.15810190128858317,
0.07587285213556612,
0.012328012988686998,
-0.14033401472704893,
0.015183459397493911,
],
[
-0.04309679255021571,
0.02778400000000001,
-0.089525,
-0.012438035870182938,
-0.05477460000000001,
0.01610339999999999,
0.09879439999999999,
-0.03159894399999625,
-0.00021823517820942058,
-0.07502608979402393,
0.005932399999999993,
0.10018958740301674,
0.04071395516161286,
0.14412905203013074,
-0.0035085142064066022,
0.05396232356831272,
-0.10432199794354,
0.012383216421253115,
],
[
0.00684218504986241,
-0.052046999999999996,
0.018830999999999997,
-0.07177587266445624,
-0.027600000000000003,
0.055091,
-0.074031,
-0.0069492643564413314,
0.036705862828379746,
0.044327550004005796,
-0.0857118,
0.038374510921218276,
0.06790274415860982,
0.13792492259530392,
0.035206736032872735,
-0.11740046056812711,
-0.06376098197893187,
-0.048275280848703896,
],
[
-0.07092607946094202,
-0.024872399999999996,
0.05781860000000001,
-0.03707522713352393,
0.011387600000000008,
0.08226560000000001,
-0.0350434,
-0.10006235204627578,
-0.05152161156327175,
0.07122338215874052,
-0.04672420000000001,
0.07969340035477088,
-0.0931017345810363,
0.14838994169503752,
0.06261812813656112,
0.0009637488012764587,
0.04525433186886796,
-0.0027391391179575077,
],
[
-0.049510616431649924,
0.014115199999999994,
0.08499319999999999,
-0.00045558067539918187,
-0.0802566,
0.002434399999999992,
0.07331240000000001,
-0.05688878307469279,
-0.031739799837494836,
-0.08989171499633267,
-0.0195496,
0.09670730238645858,
0.027061247383898892,
0.14952903378039595,
-0.01719496823638013,
0.032584538702505474,
-0.1008111663914027,
0.02440285303158143,
],
[
-0.022020706455685612,
-0.077529,
0.005162,
0.07837870340845675,
-0.041269,
0.029608999999999996,
-0.0877,
-0.010942068589955457,
0.06921673100803814,
0.03096375373297413,
-0.0993806,
-0.011276750901259111,
0.05420947723691072,
0.12664338537209205,
0.009744309684365524,
0.08218938270781943,
-0.06369091398439887,
-0.08651522655980781,
],
[
0.09084172614050066,
-0.0385414,
0.03233660000000001,
-0.06115563235781862,
-0.014094399999999997,
0.06859660000000001,
0.020656000000000008,
-0.11636873683731819,
0.01019356905367993,
0.056540846925495304,
-0.06039300000000001,
0.02752058178663748,
0.09321514907537812,
0.12544590611788473,
-0.06986896500392802,
-0.008048484750974597,
0.07008677237717462,
-0.019189759198145646,
],
[
-0.07642456890625345,
-0.0113668,
0.07132419999999999,
0.0402379034714159,
-0.0939254,
-0.0112344,
0.0596436,
-0.07033424358969217,
0.041870369165728574,
0.09515379617860022,
-0.0332184,
-0.06496633190554427,
0.0015341306823464594,
0.12345641638520544,
-0.04271378406764777,
0.037443452817793606,
-0.0805699368910241,
0.023480007565661796,
],
[
-0.06269191182124349,
-0.0911978,
-0.0203198,
0.04704853132060587,
-0.0667508,
0.015940200000000005,
0.0986312,
-0.044587439143928494,
0.15795797830522249,
0.0032589326047730086,
0.08695040000000001,
-0.06809133782588228,
0.0404781238996845,
0.08104144992433214,
-0.0004722401881217969,
0.058030356240662204,
-0.04598336048503036,
0.09781353864075028,
],
[
0.06115541782499728,
-0.0640232,
0.01866779999999999,
-0.0988417439197781,
-0.0277632,
0.05492779999999999,
0.00698700000000001,
0.07265731904373765,
0.0653077554075123,
0.04160157866788318,
-0.085875,
-0.019018594044278646,
0.06764588807998588,
0.09514105358353002,
-0.09554844417944751,
-0.005673040173128997,
0.08437771191277789,
-0.030957491871457025,
],
[
0.08659602642100803,
-0.0250356,
0.04584239999999999,
0.013699311847861268,
0.0924056,
-0.0367164,
0.0459746,
-0.0994957672525927,
0.07857763079548416,
0.06849572675305665,
-0.0468874,
-0.1123168141492544,
-0.012183778430227915,
0.09052151449796426,
-0.05626133660312773,
0.021544841668489002,
0.12808670173364076,
0.004498144811286426,
],
[
0.002179939772777072,
0.0951332,
-0.0339888,
0.03523188961393054,
-0.0804198,
0.0022712000000000066,
0.0731492,
-0.03659994215021838,
0.006193167846742109,
-0.010410385048309491,
0.061468600000000005,
-0.09047511614226797,
0.014967860477323645,
0.1065945311448271,
-0.02936522870561746,
0.06586210697688352,
0.056189565898110076,
0.09858877981813455,
],
],
biases: [
0.1018519616236632,
0.08514973393032707,
0.07404934430770857,
0.09310724159218675,
0.0950427583901143,
0.0694335829201137,
0.09399794126649798,
0.11226514115604691,
0.10283297451346814,
0.10232950717672981,
0.12201921429020872,
0.13977410028595247,
],
},
}
}