use std::io::*;
use std::time::{SystemTime, UNIX_EPOCH};
pub const INPUT_COUNT: usize = 24;
pub const OUTPUT_COUNT: usize = 12;
pub const LEARNING_RATE: f64 = 0.009;
pub fn activation_function(x: f64) -> f64 {
//1. / (1. + std::f64::consts::E.powf(-x))
//x.tanh()
x.max(0.)
}
#[allow(dead_code)]
pub fn inv_activation_function(x: f64) -> f64 {
//f64::ln(x / (1. - x)) //.max(-10000.).min(10000.)
//x.atanh()
x.max(0.)
}
#[allow(dead_code)]
pub fn pseudo_rand() -> f64 {
let start = SystemTime::now();
let since_the_epoch = start
.duration_since(UNIX_EPOCH)
.expect("Time went backwards");
#[allow(arithmetic_overflow)]
const MAX: u128 = 1000000;
// Randomish enough
let res = ((since_the_epoch.as_nanos() * 23525623626 >> 2) % MAX) as f64 / MAX as f64;
res % 1.
}
#[derive(Debug, Clone)]
pub struct DataPoint<const S: usize>(pub [f64; S]);
impl<const S: usize> DataPoint<S> {
const MIN_VALUE: f64 = -45.;
const MAX_VALUE: f64 = 45.;
pub fn new(values: &[f64]) -> Self {
let mut data_point = Self([0.; S]);
data_point.0.clone_from_slice(values);
let range = Self::MAX_VALUE - Self::MIN_VALUE;
for i in 0..S {
data_point.0[i] = (data_point.0[i] - Self::MIN_VALUE) / range
}
data_point
}
pub fn from_data(values: &[f64]) -> Self {
let mut data_point = Self([0.; S]);
data_point.0.clone_from_slice(values);
data_point
}
pub fn get(&self, i: usize) -> f64 {
let range = Self::MAX_VALUE - Self::MIN_VALUE;
self.0[i] * range + Self::MIN_VALUE
}
}
// Layer of neural network with input and output count
#[derive(Debug, Clone, Copy)]
pub struct Layer<const I: usize, const O: usize> {
pub weights: [[f64; I]; O],
pub biases: [f64; O],
}
impl<const I: usize, const O: usize> Layer<I, O> {
pub fn new() -> Self {
Self {
weights: [[0.; I]; O],
biases: [0.; O],
}
}
pub fn random() -> Self {
let mut layer = Self::new();
for i in 0..O {
for j in 0..I {
layer.weights[i][j] = (pseudo_rand() * 2. - 1.) / 10.;
}
layer.biases[i] = (pseudo_rand() * 2. - 1.) / 10.;
}
layer
}
pub fn propagate(&self, activation: &[f64; I]) -> [f64; O] {
let mut result = [0.; O];
for i in 0..O {
let mut value = 0.;
for j in 0..I {
value += activation[j] * self.weights[i][j];
}
result[i] = activation_function(value + self.biases[i]);
}
result
}
pub fn back_propagate(
&self,
changes: &mut Self,
activation: &[f64; I],
actual_outcome: &[f64; O],
desired_outcome: &[f64; O],
) -> [f64; I] {
let mut desired_activation = activation.clone();
let mut z = [0.; O];
for i in 0..O {
z[i] = self.biases[i];
for j in 0..I {
z[i] += activation[j] * self.weights[i][j];
}
}
for i in 0..O {
let dca = 2. * (actual_outcome[i] - desired_outcome[i]);
let dzb = 1.;
for j in 0..I {
let daz = inv_activation_function(z[i]);
let dzw = activation[j];
let dza = self.weights[i][j];
let dcw = dca * dzw * daz;
let dcb = dzb * daz * dca;
let dca2 = dza * daz * dca;
changes.weights[i][j] -= dcw;
changes.biases[i] -= dcb / I as f64;
desired_activation[j] -= dca2
}
}
desired_activation
}
pub fn apply_changes(&mut self, changes: Layer<I, O>, rate: f64) {
for i in 0..O {
for j in 0..I {
self.weights[i][j] += changes.weights[i][j] * rate
}
self.biases[i] += changes.biases[i] * rate;
}
}
}
#[derive(Debug, Clone)]
pub struct NeuralNetwork<const I: usize, const O: usize, const H: usize, const L: usize> {
pub input_layer: Layer<I, H>,
pub hidden_layers: [Layer<H, H>; L],
pub output_layer: Layer<H, O>,
}
impl<const I: usize, const O: usize, const H: usize, const L: usize> NeuralNetwork<I, O, H, L> {
#[allow(dead_code)]
pub fn new() -> Self {
Self {
input_layer: Layer::new(),
hidden_layers: [Layer::new(); L],
output_layer: Layer::new(),
}
}
#[allow(dead_code)]
pub fn random() -> Self {
Self {
input_layer: Layer::random(),
hidden_layers: [Layer::random(); L],
output_layer: Layer::random(),
}
}
pub fn predict(&self, start_activation: &DataPoint<I>) -> DataPoint<O> {
let mut activation = self.input_layer.propagate(&start_activation.0);
for hidden_layer in self.hidden_layers {
activation = hidden_layer.propagate(&activation);
}
DataPoint(self.output_layer.propagate(&activation))
}
pub fn train(
&self,
changes: &mut Self,
start_activation: &DataPoint<I>,
desired_outcome: &DataPoint<O>,
batch_size: usize,
) -> f64 {
for i in desired_outcome.0 {
if i > 1. || i < -1. {
panic!("Invalid training data");
}
}
let mut activations = [[0.; H]; L];
for i in 0..L {
let activation = if i == 0 {
self.input_layer.propagate(&start_activation.0)
} else {
activations[i - 1]
};
activations[i] = self.hidden_layers[i].propagate(&activation);
}
let last_activation = if L > 0 {
activations[L - 1]
} else {
self.input_layer.propagate(&start_activation.0)
};
let actual_outcome = self.output_layer.propagate(&last_activation);
let desired_output_activation = self.output_layer.back_propagate(
&mut changes.output_layer,
&last_activation,
&actual_outcome,
&desired_outcome.0,
);
let mut desired_activations = [[0.; H]];
for i in (0..L).rev() {
let prev_desired_activation = if i == L - 1 {
desired_output_activation
} else {
desired_activations[i]
};
let activation = if i == 0 {
self.input_layer.propagate(&start_activation.0)
} else {
activations[i]
};
for i in prev_desired_activation {
if i > 1. || i < -1. {
panic!("Invalid activation data");
}
}
let desired_activation = self.hidden_layers[i].back_propagate(
&mut changes.hidden_layers[i],
&activation,
&activations[i],
&prev_desired_activation,
);
desired_activations[i] = desired_activation;
}
let _ = self.input_layer.back_propagate(
&mut changes.input_layer,
&start_activation.0,
if L > 0 {
&activations[0]
} else {
&last_activation
},
&(if L > 0 {
desired_activations[0]
} else {
desired_output_activation
}),
);
//println!("outcome: {:#?}", actual_outcome);
//println!("Desired outcome: {:#?}", desired_outcome);
let mut loss = 0.;
for i in 0..O {
let l = (actual_outcome[i] - desired_outcome.0[i]).powi(2);
//println!("i: {} loss {}", i, l);
loss += l;
}
loss / batch_size as f64
}
pub fn apply_changes(&mut self, changes: &Self, batch_size: usize) {
self.input_layer
.apply_changes(changes.input_layer, LEARNING_RATE / batch_size as f64);
for i in 0..L {
self.hidden_layers[i]
.apply_changes(changes.hidden_layers[i], LEARNING_RATE / batch_size as f64)
}
self.output_layer
.apply_changes(changes.output_layer, LEARNING_RATE / batch_size as f64);
}
}
pub fn read_training_data() -> (Vec<DataPoint<INPUT_COUNT>>, Vec<DataPoint<OUTPUT_COUNT>>) {
let mut input = String::new();
stdin()
.read_line(&mut input)
.expect("Unexpected end of input");
let days: i32 = input.trim().parse().unwrap();
let mut input_data = Vec::new();
let mut output_data = Vec::new();
for _ in 0..days {
let mut input = String::new();
stdin()
.read_line(&mut input)
.expect("Unexpected end of input");
let arr = input
.split(" ")
.map(|s| s.trim().parse().unwrap())
.collect::<Vec<f64>>();
input_data.push(DataPoint::new(&arr[..INPUT_COUNT]));
output_data.push(DataPoint::new(&arr[INPUT_COUNT..]));
}
(input_data, output_data)
}
#[allow(dead_code)]
pub fn read_input_data() -> Vec<DataPoint<INPUT_COUNT>> {
let mut input = String::new();
stdin()
.read_line(&mut input)
.expect("Unexpected end of input");
let days: i32 = input.trim().parse().unwrap();
let mut data = Vec::new();
for _ in 0..days {
let mut input = String::new();
stdin()
.read_line(&mut input)
.expect("Unexpected end of input");
let arr = input
.split(" ")
.map(|s| s.trim().parse().unwrap())
.collect::<Vec<f64>>();
data.push(DataPoint::new(&arr[..INPUT_COUNT]));
}
data
}
#[allow(dead_code)]
pub fn main() {
#[cfg(feature = "evaluate")]
let (data, output_data) = read_training_data();
#[cfg(not(feature = "evaluate"))]
let data = read_input_data();
let neural_net = get_neural_network();
#[cfg(feature = "evaluate")]
let mut correct: i32 = 0;
#[cfg(feature = "evaluate")]
let mut incorrect: i32 = 0;
#[cfg(feature = "evaluate")]
let mut correct_distribution = [0; OUTPUT_COUNT];
#[cfg(feature = "evaluate")]
let mut error_distribution = [0; OUTPUT_COUNT];
for i in 0..data.len() {
let result = neural_net.predict(&data[i]);
for j in 0..OUTPUT_COUNT {
print!("{} ", result.get(j));
#[cfg(feature = "evaluate")]
{
let dist = (result.get(j) - output_data[i].get(j)).abs();
if dist < 0.75 {
print!("+ ");
correct += 1;
correct_distribution[j] += 1;
} else if dist > 2.05 {
print!("! ");
error_distribution[j] += 1;
incorrect += 1;
}
println!("");
}
}
#[cfg(feature = "evaluate")]
println!("");
}
#[cfg(feature = "evaluate")]
println!("correct distribution: {:#?}", correct_distribution);
#[cfg(feature = "evaluate")]
println!("error distribution: {:#?}", error_distribution);
#[cfg(feature = "evaluate")]
println!(
"Correct: {} / {}, Incorrect: {} / {}, score: {}",
correct,
data.len() * OUTPUT_COUNT,
incorrect,
data.len() * OUTPUT_COUNT,
25. * ((correct - incorrect) as f64 / data.len() as f64)
);
}
pub fn get_neural_network() -> NeuralNetwork<INPUT_COUNT, 12, 18, 0> {
NeuralNetwork {
input_layer: Layer {
weights: [
[
0.016365807084969798,
0.029897443645720924,
-0.09905563364983566,
0.06512664253557854,
-0.063178765477455,
0.020710813433538013,
0.09303970250163338,
-0.02275117349953524,
0.04953180021655051,
-0.06677368151043744,
0.004894784325966019,
-0.03065278408558707,
0.04068372499251621,
-0.07632304198682839,
-0.005478221953315343,
0.07659918291633323,
-0.053830964271285746,
0.02709068626942456,
-0.10410122662023238,
0.057646589214514564,
-0.07330684859940739,
0.007844677810847711,
0.07736899740747706,
-0.041076763144369574,
],
[
-0.0728924,
0.00979859999999999,
-0.026329,
0.04454899999999999,
-0.07276,
-0.0018819999999999948,
0.08080899999999999,
-0.048312999999999995,
0.02256499999999999,
-0.094744,
0.05731520000000001,
-0.05999380000000001,
0.01088420000000001,
0.0935752,
-0.0355468,
0.04714419999999999,
-0.0819778,
0.0007131999999999916,
-0.047227399999999996,
0.0354636,
-0.0936584,
-0.010967400000000006,
0.059910599999999994,
-0.0573984,
],
[
-0.0920164,
0.06004279999999999,
-0.06907920000000001,
0.013611799999999997,
0.0963028,
-0.03281920000000001,
0.0498718,
-0.0792502,
0.0034407999999999995,
-0.0444998,
0.026378199999999997,
-0.0909308,
-0.020052800000000003,
0.0626382,
-0.06648380000000001,
0.016207199999999998,
0.0870852,
-0.030223800000000002,
-0.0781646,
0.004526399999999997,
0.0754044,
-0.0419046,
0.028973399999999993,
-0.0883356,
],
[
0.05814753284170782,
0.022019625628913065,
0.09291114524596923,
-0.024360965009895385,
0.046576298820485904,
-0.07064855977605915,
0.0003442923707864423,
0.08315647477343562,
-0.04586000475991252,
-0.08191198188308406,
-0.01097617988375457,
0.07175877053964407,
-0.05732611781032156,
0.02538895658859303,
-0.1037287887218909,
-0.021076726077198935,
0.049704075375251885,
0.0016327966849441656,
0.08419311619793376,
-0.045040977347176256,
0.03755813645984853,
-0.0916341992547326,
-0.009000727202543574,
0.06182809521468674,
],
[
-0.0845224,
-0.013644400000000001,
0.0690466,
-0.0600754,
0.022615600000000003,
0.0934936,
-0.023815399999999997,
0.047062599999999996,
0.010935000000000005,
0.081813,
-0.03549600000000001,
0.035382000000000004,
-0.081927,
-0.011048999999999998,
0.07164200000000001,
-0.057479999999999996,
-0.09360779999999999,
-0.0227298,
0.059961200000000006,
-0.0691608,
0.013530200000000003,
0.0844082,
-0.032900799999999994,
0.0379772,
],
[
0.08454060000000001,
-0.04458140000000001,
0.03810960000000001,
-0.09101240000000001,
-0.008321400000000001,
0.0625566,
-0.0665654,
0.016125600000000007,
-0.0318152,
0.050875800000000006,
-0.0782462,
0.004444800000000004,
0.0753228,
-0.041986199999999994,
0.028891800000000002,
-0.0884172,
0.06364220000000001,
-0.053666799999999994,
0.017211200000000003,
0.09990220000000001,
-0.029219799999999997,
0.05347120000000001,
-0.07565079999999999,
0.007040200000000008,
],
[
0.041790400000000005,
-0.0755186,
-0.004640599999999995,
0.0780504,
-0.051071599999999995,
0.031619400000000006,
-0.0975026,
0.06636979999999999,
-0.06275220000000001,
0.01993879999999999,
0.0908168,
-0.0264922,
0.04438580000000001,
-0.0729232,
-0.0020452000000000027,
-0.038173,
0.032705000000000005,
-0.084604,
-0.013726000000000006,
0.057152,
-0.060156999999999995,
0.010721000000000003,
0.09341200000000001,
-0.03571,
],
[
0.021881923817085223,
0.0918126470571436,
-0.026232508640812918,
0.044781804796862355,
-0.07233746336412805,
-0.0024859206428952665,
0.07983917386363963,
0.03287112133509152,
-0.0823357424927479,
-0.010949659591082109,
0.07261769878154778,
-0.056106219780217584,
0.02705228545699406,
0.09747492779352068,
-0.021731386521725345,
-0.0725493652479984,
0.008829704163125668,
0.07952768013057757,
-0.03914040352252662,
0.029629967522289514,
-0.09208511158402441,
-0.028596120749887797,
0.04334748364583672,
-0.018722391194185176,
],
[
-0.1141335732802929,
-0.02217967008433491,
-0.13957899387935324,
-0.06369607171990145,
0.03776022011790723,
-0.0158651395288838,
0.002021670337776273,
0.13163616974121367,
0.24004052022292313,
0.14378283775737707,
0.22206453622295347,
0.32138829010222797,
0.21182148622856187,
0.3180598973011594,
0.3088127346202453,
0.42083778788654014,
0.2843404032218277,
0.3159252145324529,
0.15139481580682,
0.20737345575328897,
0.2567586555405705,
0.13964278359327084,
0.21377541348429444,
0.18236318389323095,
],
[
-0.06256561132330053,
0.020110386937770633,
0.09096908320229875,
-0.026382161730681742,
0.04443830075750825,
-0.07289775803374597,
0.07913099117411775,
-0.03820256266096591,
0.0326401057309401,
-0.08466108083974908,
-0.013780519866985974,
0.06890310797294548,
-0.06021770304600942,
0.022470876438763374,
-0.025450631917286293,
0.05726023213667222,
-0.07185139404528157,
-0.0009566508760441469,
0.08177967773331704,
-0.047290145609991126,
0.03548055524741869,
-0.09356298745622693,
0.07039512531033765,
-0.05863674177293379,
],
[
-0.09377100000000001,
-0.022892999999999997,
0.059798,
-0.06932400000000001,
0.013366999999999995,
-0.0345736,
0.0481174,
-0.08100460000000001,
0.001686399999999999,
0.0725644,
-0.0447446,
0.026133399999999994,
-0.0911756,
0.060883599999999996,
-0.0564254,
0.014452599999999993,
0.0971436,
-0.0319784,
0.0507126,
-0.0784094,
0.0042815999999999965,
0.0751596,
0.039032,
-0.09009,
],
[
0.18447984796444766,
0.050549907796518516,
0.13205348766401642,
0.20737267038571103,
0.08239263920834507,
0.01620519959373799,
0.06471884504335294,
0.12875573768574444,
-0.01774713897776613,
0.05573876569387208,
-0.07863401296420879,
-0.0026232889532777606,
0.06092267946041859,
0.017402989377819478,
-0.12062206742099178,
-0.040908468838196946,
0.03919650936200455,
-0.056749837266459775,
0.03149338137717491,
0.12947421356280683,
0.01704480238596738,
-0.004805092364709424,
0.08392569579208306,
0.1893154138505638,
],
[
0.03252799118593452,
-0.08478094183585033,
-0.013903065472896374,
0.06878787589818655,
0.020847611647330858,
-0.09646129909926573,
-0.025582942998698798,
0.05710839236668758,
-0.07201304759744981,
0.010678213523672975,
0.08155609683975408,
-0.03575286792687527,
-0.0836935462296861,
-0.012815449135374317,
0.06987588118782456,
-0.059246586624801416,
0.023443351534932915,
0.09432100008063003,
-0.02298824561907487,
0.0478897087714869,
0.011761920223817971,
0.08263993297709106,
-0.034669120714227085,
0.036208912986485986,
],
[
-0.2073553931342036,
-0.07278781733621717,
-0.11072716348490406,
0.004201603394498865,
0.004519232764451153,
0.16954648322976668,
0.11994381147775385,
0.23027012356416166,
0.29128063277017113,
0.17695268726601812,
0.2371499475361844,
0.10849485133127822,
0.26394542841429347,
0.18309276223614845,
0.35845868963749017,
0.3251631622526473,
0.35117848808377,
0.31939136020818365,
0.19088342364754116,
0.31675185880090995,
0.49798300772021264,
0.9814340243442788,
1.511350973485526,
2.511766954793907,
],
[
-0.0297368421092453,
0.041158460493318436,
-0.08794813442767176,
0.0759365508129182,
-0.05317766642809878,
0.029501194806701193,
-0.09962162604917742,
-0.01693738128712482,
0.05395468706520464,
-0.06335721108369906,
0.007501175739437978,
-0.028627284251466587,
0.042238777168887305,
-0.07507865221336169,
-0.004226046831466281,
0.07843453220836769,
-0.050708238296224666,
0.031953459257837415,
-0.09718745708073964,
0.06667791872887656,
-0.062448682798413285,
0.02021592719676636,
0.09104682579625827,
-0.02629246814799321,
],
[
-0.07288524035334594,
0.010072897491075445,
0.08126350412408932,
0.04542262876814589,
-0.08265900688265507,
0.0016932353756438335,
0.07411745647210997,
-0.04212975267876837,
0.02948138074415119,
0.1009561950219722,
-0.015958264256311125,
-0.06343789047471418,
0.019598774620825883,
0.09092181269856064,
-0.02568458554089945,
0.045767911339126464,
-0.07181504959329367,
-0.002223365660369155,
0.07915304391230038,
0.029993375870027282,
-0.08803197169081955,
-0.016949443103326044,
0.06651051605863317,
-0.06113016514838245,
],
[
0.08219739115585713,
-0.04308849451280582,
-0.07776963295951832,
-0.009857790797145776,
0.08186280269574497,
-0.03172347200944647,
0.07091734986784443,
-0.03945167282146533,
0.0635682836839723,
0.14563773739992653,
0.1184513086275251,
0.19889467990871185,
0.09123369272619673,
0.1706257663282062,
0.06100676886280245,
0.13199448572618303,
0.20881239967140322,
0.06779768251437923,
0.019910709800178478,
0.0770959455370418,
0.13959382423807776,
-0.01403565872619397,
0.03218566151801578,
-0.14744355289974156,
],
[
0.04571709499872495,
0.010160925086839613,
0.08087156413783447,
-0.03682612199789844,
0.03516963574308468,
-0.07916547572751853,
-0.003104755336139809,
0.08575544897423136,
-0.03742864743614274,
0.049174582769141786,
0.0038838492249906632,
0.08898628280945192,
-0.03813692099645037,
0.04636464858752955,
0.1180344592374395,
-0.001030289766113078,
0.06451851777947083,
-0.05933540410359508,
0.08847163499952204,
-0.03139670880960187,
0.03782262246468793,
-0.08004483854472333,
-0.009540258779623558,
0.07252362578133767,
],
],
biases: [
0.031452782348287814,
0.013479599999999991,
-0.017457599999999997,
-0.059063216360768916,
-0.0911448,
-0.040900599999999995,
-0.0718376,
0.1073884705321795,
-0.00932460799253341,
0.025024273605055485,
-0.019211999999999996,
0.030858780354485995,
-0.08110263308587991,
-0.7012227963759011,
0.043252418722894904,
0.015426117922068616,
0.06631715448522192,
-0.051785310469542385,
],
},
hidden_layers: [],
output_layer: Layer {
weights: [
[
-0.05703802526749842,
-0.011203600000000003,
0.059674399999999996,
-0.023500462594586415,
0.0250564,
0.0959344,
-0.0213746,
-0.07339371830505204,
-0.11837684512070612,
0.08672360234492896,
-0.03305519999999999,
0.08630340871104916,
-0.07943438331041205,
0.18181386366828328,
0.07705193407733937,
0.0025142231890497736,
-0.18729989186287485,
0.012506093498565855,
],
[
-0.031753477751746244,
0.02778400000000001,
-0.089525,
-0.014881949882189124,
-0.05477460000000001,
0.01610339999999999,
0.09879439999999999,
-0.0229873830932048,
-0.12375892515765144,
-0.07228331611646031,
0.005932399999999993,
0.10214470737273329,
0.04070916546711668,
0.1797685814280238,
-0.0015358924970939858,
0.04017474344742837,
-0.17859552865201775,
0.001387494730463138,
],
[
0.01920305159520628,
-0.052046999999999996,
0.018830999999999997,
-0.07255468756357401,
-0.027600000000000003,
0.055091,
-0.074031,
0.005032633378341296,
-0.11773417946100548,
0.04732698560311076,
-0.0857118,
0.07985243089928101,
0.06790294068066849,
0.18060139305022435,
0.03720171071655352,
-0.1289642658627665,
-0.147651663833331,
-0.05844859933345004,
],
[
-0.06129041097281596,
-0.024872399999999996,
0.05781860000000001,
-0.037580253780329365,
0.011387600000000008,
0.08226560000000001,
-0.0350434,
-0.09520926686694828,
-0.16909667140011037,
0.07362754625837097,
-0.04672420000000001,
0.08781944322424497,
-0.09308617617244912,
0.17946253561302594,
0.06490376837812918,
-0.011105567669666154,
-0.027287940133310113,
-0.016821907441529508,
],
[
-0.0425567175462357,
0.014115199999999994,
0.08499319999999999,
-3.631702416102432e-5,
-0.0802566,
0.002434399999999992,
0.07331240000000001,
-0.05329641845373612,
-0.084577424668251,
-0.08746673893236924,
-0.0195496,
0.09152032936597139,
0.0270887075550665,
0.15762589559398046,
-0.014612380762211809,
0.026138525816254244,
-0.13763223866296204,
0.0156790597718171,
],
[
-0.013236277291470188,
-0.077529,
0.005162,
0.08081857290711349,
-0.041269,
0.029608999999999996,
-0.0877,
-0.004133510511923142,
-0.03126421193487638,
0.03415501442447006,
-0.0993806,
0.014125518412704485,
0.054237166970245804,
0.14722748571902225,
0.01269668863102259,
0.0741281503888547,
-0.12193731979074944,
-0.10066322751979492,
],
[
0.09629643493485918,
-0.0385414,
0.03233660000000001,
-0.05940247570441866,
-0.014094399999999997,
0.06859660000000001,
0.020656000000000008,
-0.11846511460566495,
-0.04374438662791898,
0.05879828411095856,
-0.06039300000000001,
-0.0009725010625753716,
0.09325908995648328,
0.12946159405101365,
-0.06644072163542755,
-0.016071352896887967,
0.025737380357193397,
-0.036592269244843094,
],
[
-0.0716605783887,
-0.0113668,
0.07132419999999999,
0.039227993696773315,
-0.0939254,
-0.0112344,
0.0596436,
-0.06888438064183894,
0.052653480176269,
0.09737368882999387,
-0.0332184,
-0.08109643223608066,
0.0015198134279518886,
0.10848216010067242,
-0.03921600011050003,
0.0370939521274188,
-0.0793951508696832,
0.026148745848090012,
],
[
-0.05784469705460472,
-0.0911978,
-0.0203198,
0.04314522651781204,
-0.0667508,
0.015940200000000005,
0.0986312,
-0.05012790002432129,
0.1011235576277164,
0.005210271552491358,
0.08695040000000001,
-0.10734890890977591,
0.040411007643487894,
0.08667700659294068,
0.003490331585907222,
0.050552568192751106,
-0.08956296860469341,
0.08460347832138619,
],
[
0.06130318143340829,
-0.0640232,
0.01866779999999999,
-0.10528751801100177,
-0.0277632,
0.05492779999999999,
0.00698700000000001,
0.05921636390269124,
0.07459058563894118,
0.04236483247814882,
-0.085875,
-0.10633127459407658,
0.06755202417287051,
0.07687365421248915,
-0.09155449352496135,
-0.010688660970600726,
0.06934762870717552,
-0.038809066494141495,
],
[
0.08785580999116811,
-0.0250356,
0.04584239999999999,
0.006993285367626222,
0.0924056,
-0.0367164,
0.0459746,
-0.10762083218141824,
0.07104722556282012,
0.0697344793692867,
-0.0468874,
-0.16960909754507597,
-0.012267110436804741,
0.07859718684549076,
-0.05208907992725116,
0.018023234093684833,
0.11086063223529301,
0.0017069715622517166,
],
[
0.0009267403284441151,
0.0951332,
-0.0339888,
0.0269955548943452,
-0.0804198,
0.0022712000000000066,
0.0731492,
-0.047185344900259885,
0.08665367383529304,
-0.009748430926091865,
0.061468600000000005,
-0.17642604794388536,
0.014857112794169012,
0.06902555854977288,
-0.025225915730265624,
0.06737291263559271,
0.08546101628595873,
0.10696135350502953,
],
],
biases: [
0.15564504222217423,
0.1473202333557324,
0.14150325747102618,
0.1417972607008875,
0.12915401979983357,
0.11102123566726314,
0.10943827460128827,
0.11337626442372599,
0.11272235937363072,
0.08431237434673645,
0.10892382608690006,
0.10589741994315555,
],
},
}
}