use std::io::*;
use std::time::{SystemTime, UNIX_EPOCH};
pub const INPUT_COUNT: usize = 24;
pub const OUTPUT_COUNT: usize = 12;
pub const LEARNING_RATE: f64 = 0.012;
pub fn activation_function(x: f64) -> f64 {
//1. / (1. + std::f64::consts::E.powf(-x))
//x.tanh()
x.max(0.)
}
#[allow(dead_code)]
pub fn inv_activation_function(x: f64) -> f64 {
//f64::ln(x / (1. - x)) //.max(-10000.).min(10000.)
//x.atanh()
x.max(0.)
}
#[allow(dead_code)]
pub fn pseudo_rand() -> f64 {
let start = SystemTime::now();
let since_the_epoch = start
.duration_since(UNIX_EPOCH)
.expect("Time went backwards");
#[allow(arithmetic_overflow)]
const MAX: u128 = 1000000;
// Randomish enough
let res = ((since_the_epoch.as_nanos() * 23525623626 >> 2) % MAX) as f64 / MAX as f64;
res % 1.
}
#[derive(Debug, Clone)]
pub struct DataPoint<const S: usize>(pub [f64; S]);
impl<const S: usize> DataPoint<S> {
//const MIN_VALUE: f64 = -45.;
//const MAX_VALUE: f64 = 45.;
const MIN_VALUE: f64 = -38.;
const MAX_VALUE: f64 = 38.;
pub fn new(values: &[f64]) -> Self {
let mut data_point = Self([0.; S]);
data_point.0.clone_from_slice(values);
let range = Self::MAX_VALUE - Self::MIN_VALUE;
for i in 0..S {
data_point.0[i] = (data_point.0[i] - Self::MIN_VALUE) / range
}
data_point
}
pub fn get(&self, i: usize) -> f64 {
let range = Self::MAX_VALUE - Self::MIN_VALUE;
self.0[i] * range + Self::MIN_VALUE
}
}
// Layer of neural network with input and output count
#[derive(Debug, Clone, Copy)]
pub struct Layer<const I: usize, const O: usize> {
pub weights: [[f64; I]; O],
pub biases: [f64; O],
}
impl<const I: usize, const O: usize> Layer<I, O> {
pub fn new() -> Self {
Self {
weights: [[0.; I]; O],
biases: [0.; O],
}
}
pub fn random() -> Self {
let mut layer = Self::new();
for i in 0..O {
for j in 0..I {
layer.weights[i][j] = (pseudo_rand() * 2. - 1.) / 10.;
}
layer.biases[i] = (pseudo_rand() * 2. - 1.) / 10.;
}
layer
}
pub fn propagate(&self, activation: &[f64; I]) -> [f64; O] {
let mut result = [0.; O];
for i in 0..O {
let mut value = 0.;
for j in 0..I {
value += activation[j] * self.weights[i][j];
}
result[i] = activation_function(value + self.biases[i]);
}
result
}
pub fn back_propagate(
&self,
changes: &mut Self,
activation: &[f64; I],
actual_outcome: &[f64; O],
desired_outcome: &[f64; O],
) -> [f64; I] {
let mut desired_activation = activation.clone();
let mut z = [0.; O];
for i in 0..O {
z[i] = self.biases[i];
for j in 0..I {
z[i] += activation[j] * self.weights[i][j];
}
}
for i in 0..O {
let dca = 2. * (actual_outcome[i] - desired_outcome[i]);
let dzb = 1.;
for j in 0..I {
let daz = inv_activation_function(z[i]);
let dzw = activation[j];
let dza = self.weights[i][j];
let dcw = dca * dzw * daz;
let dcb = dzb * daz * dca;
let dca2 = dza * daz * dca;
changes.weights[i][j] -= dcw;
changes.biases[i] -= dcb / I as f64;
desired_activation[j] -= dca2
}
}
desired_activation
}
pub fn apply_changes(&mut self, changes: Layer<I, O>, rate: f64) {
for i in 0..O {
for j in 0..I {
self.weights[i][j] += changes.weights[i][j] * rate
}
self.biases[i] += changes.biases[i] * rate;
}
}
}
#[derive(Debug, Clone)]
pub struct NeuralNetwork<const I: usize, const O: usize, const H: usize, const L: usize> {
pub input_layer: Layer<I, H>,
pub hidden_layers: [Layer<H, H>; L],
pub output_layer: Layer<H, O>,
}
impl<const I: usize, const O: usize, const H: usize, const L: usize> NeuralNetwork<I, O, H, L> {
#[allow(dead_code)]
pub fn new() -> Self {
Self {
input_layer: Layer::new(),
hidden_layers: [Layer::new(); L],
output_layer: Layer::new(),
}
}
#[allow(dead_code)]
pub fn random() -> Self {
Self {
input_layer: Layer::random(),
hidden_layers: [Layer::random(); L],
output_layer: Layer::random(),
}
}
pub fn predict(&self, start_activation: &DataPoint<I>) -> DataPoint<O> {
let mut activation = self.input_layer.propagate(&start_activation.0);
for hidden_layer in self.hidden_layers {
activation = hidden_layer.propagate(&activation);
}
DataPoint(self.output_layer.propagate(&activation))
}
pub fn train(
&self,
changes: &mut Self,
start_activation: &DataPoint<I>,
desired_outcome: &DataPoint<O>,
batch_size: usize,
) -> f64 {
let mut activations = [[0.; H]; L];
for i in 0..L {
let activation = if i == 0 {
self.input_layer.propagate(&start_activation.0)
} else {
activations[i - 1]
};
activations[i] = self.hidden_layers[i].propagate(&activation);
}
let last_activation = if L > 0 {
activations[L - 1]
} else {
self.input_layer.propagate(&start_activation.0)
};
let actual_outcome = self.output_layer.propagate(&last_activation);
let desired_output_activation = self.output_layer.back_propagate(
&mut changes.output_layer,
&last_activation,
&actual_outcome,
&desired_outcome.0,
);
let mut desired_activations = [[0.; H]];
for i in (0..L).rev() {
let prev_desired_activation = if i == L - 1 {
desired_output_activation
} else {
desired_activations[i]
};
let activation = if i == 0 {
self.input_layer.propagate(&start_activation.0)
} else {
activations[i]
};
let desired_activation = self.hidden_layers[i].back_propagate(
&mut changes.hidden_layers[i],
&activation,
&activations[i],
&prev_desired_activation,
);
desired_activations[i] = desired_activation;
}
let _ = self.input_layer.back_propagate(
&mut changes.input_layer,
&start_activation.0,
if L > 0 {
&activations[0]
} else {
&last_activation
},
&(if L > 0 {
desired_activations[0]
} else {
desired_output_activation
}),
);
//println!("outcome: {:#?}", actual_outcome);
//println!("Desired outcome: {:#?}", desired_outcome);
let mut loss = 0.;
for i in 0..O {
let l = (actual_outcome[i] - desired_outcome.0[i]).powi(2);
//println!("i: {} loss {}", i, l);
loss += l;
}
loss / batch_size as f64
}
pub fn apply_changes(&mut self, changes: &Self, batch_size: usize) {
self.input_layer
.apply_changes(changes.input_layer, LEARNING_RATE / batch_size as f64);
for i in 0..L {
self.hidden_layers[i]
.apply_changes(changes.hidden_layers[i], LEARNING_RATE / batch_size as f64)
}
self.output_layer
.apply_changes(changes.output_layer, LEARNING_RATE / batch_size as f64);
}
}
pub fn read_training_data() -> (Vec<DataPoint<INPUT_COUNT>>, Vec<DataPoint<OUTPUT_COUNT>>) {
let mut input = String::new();
stdin()
.read_line(&mut input)
.expect("Unexpected end of input");
let days: i32 = input.trim().parse().unwrap();
let mut input_data = Vec::new();
let mut output_data = Vec::new();
for _ in 0..days {
let mut input = String::new();
stdin()
.read_line(&mut input)
.expect("Unexpected end of input");
let arr = input
.split(" ")
.map(|s| s.trim().parse().unwrap())
.collect::<Vec<f64>>();
input_data.push(DataPoint::new(&arr[..INPUT_COUNT]));
output_data.push(DataPoint::new(&arr[INPUT_COUNT..]));
}
(input_data, output_data)
}
#[allow(dead_code)]
pub fn read_input_data() -> Vec<DataPoint<INPUT_COUNT>> {
let mut input = String::new();
stdin()
.read_line(&mut input)
.expect("Unexpected end of input");
let days: i32 = input.trim().parse().unwrap();
let mut data = Vec::new();
for _ in 0..days {
let mut input = String::new();
stdin()
.read_line(&mut input)
.expect("Unexpected end of input");
let arr = input
.split(" ")
.map(|s| s.trim().parse().unwrap())
.collect::<Vec<f64>>();
data.push(DataPoint::new(&arr[..INPUT_COUNT]));
}
data
}
#[allow(dead_code)]
pub fn main() {
#[cfg(feature = "evaluate")]
let (data, output_data) = read_training_data();
#[cfg(not(feature = "evaluate"))]
let data = read_input_data();
let neural_net = get_neural_network();
#[cfg(feature = "evaluate")]
let mut correct: i32 = 0;
#[cfg(feature = "evaluate")]
let mut incorrect: i32 = 0;
#[cfg(feature = "evaluate")]
let mut correct_distribution = [0; OUTPUT_COUNT];
#[cfg(feature = "evaluate")]
let mut error_distribution = [0; OUTPUT_COUNT];
#[cfg(feature = "evaluate")]
let mut correct_diff = 0.;
#[cfg(feature = "evaluate")]
let mut incorrect_diff = 0.;
#[cfg(feature = "evaluate")]
let mut question_marks = 0;
for i in 0..data.len() {
let result = neural_net.predict(&data[i]);
for j in 0..OUTPUT_COUNT {
let diff = ((result.get(j.max(1) - 1) + result.get((j + 1).min(OUTPUT_COUNT - 1)))
/ 2.
- result.get(j))
.abs();
if diff > 0.12 {
#[cfg(feature = "evaluate")]
{
question_marks += 1;
}
print!("? ");
#[cfg(feature = "evaluate")]
print!("{} ", (result.get(j) * 100.).round() / 100.);
continue;
} else {
#[cfg(not(feature = "evaluate"))]
print!("{} ", result.get(j));
}
#[cfg(feature = "evaluate")]
{
print!("{} ", (result.get(j) * 100.).round() / 100.);
let dist = (result.get(j) - output_data[i].get(j)).abs();
if dist < 0.75 {
print!("+ ");
correct += 1;
correct_distribution[j] += 1;
correct_diff += diff;
} else if dist > 2.05 {
print!("! ");
error_distribution[j] += 1;
incorrect += 1;
incorrect_diff += diff;
}
print!("{} ", (output_data[i].get(j) * 1000.).round() / 1000.);
println!("");
}
}
#[cfg(feature = "evaluate")]
println!("");
}
#[cfg(feature = "evaluate")]
println!("correct distribution: {:#?}", correct_distribution);
#[cfg(feature = "evaluate")]
println!("error distribution: {:#?}", incorrect_diff);
#[cfg(feature = "evaluate")]
println!("correct diff: {:#?}", correct_diff / correct as f64);
#[cfg(feature = "evaluate")]
println!("incorrect diff: {:#?}", incorrect_diff / incorrect as f64);
#[cfg(feature = "evaluate")]
println!("question marks: {:#?}", question_marks);
#[cfg(feature = "evaluate")]
println!(
"Correct: {} / {}, Incorrect: {} / {}, score: {}",
correct,
data.len() * OUTPUT_COUNT,
incorrect,
data.len() * OUTPUT_COUNT,
25. * ((correct - incorrect) as f64 / data.len() as f64)
);
}
pub fn get_neural_network() -> NeuralNetwork<INPUT_COUNT, 12, 18, 0> {
NeuralNetwork {
input_layer: Layer {
weights: [
[
0.016424220571944526,
0.02994131724744406,
-0.09902130836444459,
0.06515922094379904,
-0.06315027596371998,
0.020743558630782727,
0.09305613254974321,
-0.02274987327435831,
0.04950376587878191,
-0.06681450971847293,
0.004845604796877048,
-0.030713850952743833,
0.040626951729772064,
-0.07637889557749203,
-0.0055257835315241,
0.07656783768412372,
-0.05385024399507373,
0.027098777106336527,
-0.10407096711889789,
0.05769009301545128,
-0.07325655955966477,
0.007904675924917262,
0.07743379177552981,
-0.04100498429256861,
],
[
-0.07289172199871628,
0.009799042174750673,
-0.02632863643409408,
0.044549363565906705,
-0.07275962660798733,
-0.0018815479991436275,
0.08080943234864442,
-0.04831258730356501,
0.022565226000427877,
-0.09474366591240808,
0.05731559304422269,
-0.05999342660798794,
0.01088461269643404,
0.09357575026190995,
-0.03554630869472089,
0.04714467165306683,
-0.08197732834693124,
0.0007135930442228934,
-0.047227144521255725,
0.03546385547874427,
-0.09365827226062978,
-0.010967301738944335,
0.05991062947831678,
-0.057398262434522174,
],
[
-0.0920164,
0.06004279999999999,
-0.06907920000000001,
0.013611799999999997,
0.0963028,
-0.03281920000000001,
0.0498718,
-0.0792502,
0.0034407999999999995,
-0.0444998,
0.026378199999999997,
-0.0909308,
-0.020052800000000003,
0.0626382,
-0.06648380000000001,
0.016207199999999998,
0.0870852,
-0.030223800000000002,
-0.0781646,
0.004526399999999997,
0.0754044,
-0.0419046,
0.028973399999999993,
-0.0883356,
],
[
0.05972264351073452,
0.02360167323093913,
0.09448588203743478,
-0.022752663757024304,
0.048238503080502544,
-0.0689603339871961,
0.001980490501263926,
0.0846166498043146,
-0.04460812701191559,
-0.08078532020602498,
-0.009933125493826042,
0.07273720910012976,
-0.05639875524554685,
0.02625875581386348,
-0.10283514694152784,
-0.020027572096498764,
0.05104859524555425,
0.003175764998321604,
0.08582914224373529,
-0.04333717581517572,
0.0392840395909231,
-0.08988642023765243,
-0.0072651059561966436,
0.06346002540555519,
],
[
-0.0845224,
-0.013644400000000001,
0.0690466,
-0.0600754,
0.022615600000000003,
0.0934936,
-0.023815399999999997,
0.047062599999999996,
0.010935000000000005,
0.081813,
-0.03549600000000001,
0.035382000000000004,
-0.081927,
-0.011048999999999998,
0.07164200000000001,
-0.057479999999999996,
-0.09360779999999999,
-0.0227298,
0.059961200000000006,
-0.0691608,
0.013530200000000003,
0.0844082,
-0.032900799999999994,
0.0379772,
],
[
0.08454060000000001,
-0.04458140000000001,
0.03810960000000001,
-0.09101240000000001,
-0.008321400000000001,
0.0625566,
-0.0665654,
0.016125600000000007,
-0.0318152,
0.050875800000000006,
-0.0782462,
0.004444800000000004,
0.0753228,
-0.041986199999999994,
0.028891800000000002,
-0.0884172,
0.06364220000000001,
-0.053666799999999994,
0.017211200000000003,
0.09990220000000001,
-0.029219799999999997,
0.05347120000000001,
-0.07565079999999999,
0.007040200000000008,
],
[
0.041790400000000005,
-0.0755186,
-0.004640599999999995,
0.0780504,
-0.051071599999999995,
0.031619400000000006,
-0.0975026,
0.06636979999999999,
-0.06275220000000001,
0.01993879999999999,
0.0908168,
-0.0264922,
0.04438580000000001,
-0.0729232,
-0.0020452000000000027,
-0.038173,
0.032705000000000005,
-0.084604,
-0.013726000000000006,
0.057152,
-0.060156999999999995,
0.010721000000000003,
0.09341200000000001,
-0.03571,
],
[
0.01447318575910405,
0.08407685235233549,
-0.03404355714242575,
0.037384874398230884,
-0.07928714839047844,
-0.009438903897196933,
0.0734641207062453,
0.027686350008683434,
-0.08613154375313997,
-0.014137578120071995,
0.07001310573360928,
-0.058378265463084694,
0.02520480301680767,
0.0955697588468073,
-0.02430731281903663,
-0.07651296157028532,
0.0037440935986430873,
0.07384355969697072,
-0.04524074369424982,
0.023722241482948717,
-0.09812838059071373,
-0.035670875553350564,
0.03395785686434202,
-0.032423615094337395,
],
[
-0.15907608229775655,
-0.04588209285024673,
-0.1564953015352676,
-0.09644385985929069,
0.014265196131992855,
-0.014834991889413908,
0.022048997553251667,
0.15141287867566516,
0.26143669186917645,
0.15452543400328497,
0.21674932068377464,
0.3115394252060886,
0.20070734203520849,
0.31076948436616514,
0.3181897436856928,
0.4462014429929574,
0.3087874991389462,
0.3108024073897391,
0.12552660530247478,
0.16301646726098024,
0.18853772753376202,
0.0570290762634685,
0.11272036490685991,
0.05574066499952599,
],
[
-0.062367979252166035,
0.020296451423992768,
0.09113600946762042,
-0.026272473210270743,
0.044459193014744144,
-0.07292764495142129,
0.07903974064934416,
-0.03834062872621663,
0.03244547654133815,
-0.08485979081211707,
-0.013990763027901061,
0.06867619792383099,
-0.06045322506186662,
0.02222953799314052,
-0.025671715647084432,
0.05707147154427298,
-0.07202007586767671,
-0.0010939955418279392,
0.08173111759249942,
-0.04725230176630326,
0.03563548328143464,
-0.09329692142634109,
0.07076761987717277,
-0.05814606017858598,
],
[
-0.09377100000000001,
-0.022892999999999997,
0.059798,
-0.06932400000000001,
0.013366999999999995,
-0.0345736,
0.0481174,
-0.08100460000000001,
0.001686399999999999,
0.0725644,
-0.0447446,
0.026133399999999994,
-0.0911756,
0.060883599999999996,
-0.0564254,
0.014452599999999993,
0.0971436,
-0.0319784,
0.0507126,
-0.0784094,
0.0042815999999999965,
0.0751596,
0.039032,
-0.09009,
],
[
0.20380854824535777,
0.06314627280041032,
0.1468697808702012,
0.23441120737783036,
0.10987057885274587,
0.03588068960372604,
0.0684954160665274,
0.12038630851929372,
-0.04068238563885044,
0.030667968974165623,
-0.10195092424365375,
-0.027552174490990435,
0.031955192031867134,
-0.014244014791871368,
-0.1563323512627141,
-0.07331498975020956,
0.019730538897407265,
-0.05438626292614191,
0.04212938569263786,
0.1386587397445499,
0.030023218315798197,
0.015422862684269723,
0.1260527030120082,
0.28093780694060566,
],
[
0.03247175540814799,
-0.0848378295797879,
-0.013959830613404639,
0.06873188902677518,
0.020793107013430956,
-0.09651441051260748,
-0.025635232870022787,
0.05705785266232234,
-0.07206161243533812,
0.010630314609429549,
0.08150806431570344,
-0.03580051247203129,
-0.08374046084843297,
-0.012861974817182134,
0.06983098792572605,
-0.05929275881863201,
0.02339286676413073,
0.09426875148321899,
-0.02304228264800999,
0.04783406326458911,
0.011705288980117912,
0.08258285840025217,
-0.03472688008878806,
0.03615223414051308,
],
[
-0.3633373261463179,
-0.18866780869071734,
-0.18749383575952672,
-0.0640025805753266,
-0.045583515375374556,
0.1705334352755851,
0.12573804108561396,
0.220256605693553,
0.25573954462274495,
0.13110439967573476,
0.17413461812251424,
0.04270938545471466,
0.18308586166577182,
0.13418933558495955,
0.3539136540311336,
0.38040584464541827,
0.3826565848647969,
0.27803632349458995,
0.050545391018178835,
0.029346843468026892,
0.10783121896205825,
0.5847095818028328,
1.2624361695536643,
2.6990655630624913,
],
[
-0.0301090255328522,
0.0408037857385595,
-0.08828562104614857,
0.0756165566071779,
-0.053476448403833664,
0.02920789559764347,
-0.09990508979830555,
-0.01721562512437851,
0.05369532595685286,
-0.06361274312496612,
0.00723662185046559,
-0.02888734777580648,
0.041973774137855725,
-0.07534790224327147,
-0.0045130123977817176,
0.07812650458632761,
-0.0510325374791195,
0.03160910940189758,
-0.09754772874003302,
0.06629969450442387,
-0.06284754241548057,
0.019784415870594013,
0.09056847010784437,
-0.02679948771155681,
],
[
-0.07173981810490318,
0.011633834338306025,
0.08304627743049663,
0.04712401869073444,
-0.08041524810509244,
0.004809223983853832,
0.07754620725924678,
-0.03912074570255706,
0.0320147971458381,
0.10305198094440797,
-0.014274126363057844,
-0.06187264411979106,
0.021013494837075503,
0.09244279638917886,
-0.023587163150724906,
0.04881206243518455,
-0.0680577539998071,
0.0013783223163475302,
0.08233235404616067,
0.03242258947501251,
-0.08640290165008069,
-0.01552267161043292,
0.06801805269131136,
-0.05879874811247877,
],
[
0.05549521047109373,
-0.06659834191931126,
-0.10078340873295377,
-0.036524320621736356,
0.0562187563140305,
-0.05552094352711152,
0.05140557473716675,
-0.0564852110006163,
0.0510159206294875,
0.13245288894049298,
0.10404845854209963,
0.18465923137077164,
0.07851183993719654,
0.1583223713037226,
0.0493428382786614,
0.11756422209139028,
0.19139362194638324,
0.04668008437727288,
-0.0015031957663981066,
0.05801652413562487,
0.1190984934434972,
-0.04025974808258961,
-0.010316274896839625,
-0.22513530096855425,
],
[
0.045185913376407774,
0.010424268484264169,
0.08055009277005483,
-0.0386559271247992,
0.032211629523645105,
-0.08246491238094236,
-0.005176602291257064,
0.08602572000083476,
-0.034324556672268136,
0.053691225310049516,
0.009082801297421929,
0.09505417936300677,
-0.031450340446255845,
0.053828064402810266,
0.12543988589174354,
0.004781355958848602,
0.06663894517302786,
-0.06079686570604648,
0.08606270207259868,
-0.033623970110171736,
0.03580681981047006,
-0.08157282759574525,
-0.01067390179405951,
0.07146599475467055,
],
],
biases: [
0.03208227284105914,
0.013487067840235003,
-0.017457599999999997,
-0.05747756164935169,
-0.0911448,
-0.040900599999999995,
-0.0718376,
0.09606623123275354,
-0.2248811652657712,
0.02608304261541284,
-0.019211999999999996,
0.007712062772854342,
-0.08116812063612903,
-1.2109615348982483,
0.04141431262626776,
0.013529082148481563,
0.04645111126633243,
-0.05456207770381666,
],
},
hidden_layers: [],
output_layer: Layer {
weights: [
[
-0.053775637491385926,
-0.011234605599996728,
0.059674399999999996,
-0.025662614433879308,
0.0250564,
0.0959344,
-0.0213746,
-0.06087266126664454,
-0.09821678032656868,
0.08702480623136813,
-0.03305519999999999,
0.12023113502687145,
-0.07958646750305215,
0.1955552167010648,
0.07650830018842521,
-0.0022703189412401786,
-0.1854504761752589,
0.02593169597737608,
],
[
-0.027416133462474977,
0.02775430900980534,
-0.089525,
-0.01735840627541944,
-0.05477460000000001,
0.01610339999999999,
0.09879439999999999,
-0.011539802991292859,
-0.1015240568474685,
-0.07108833077720589,
0.005932399999999993,
0.13174862558899753,
0.04050885530098843,
0.19343515704029804,
-0.001282407649538334,
0.03553495062829749,
-0.1769849058872639,
0.014996147809249582,
],
[
0.023574455214437844,
-0.052077335783319315,
0.018830999999999997,
-0.06948468769153077,
-0.027600000000000003,
0.055091,
-0.074031,
0.017128707008875456,
-0.08887424234600654,
0.04860537085666219,
-0.0857118,
0.15168426987188183,
0.06776783948749784,
0.18884225135020097,
0.037473899642206474,
-0.12824873437955978,
-0.14582861695888902,
-0.03996477012483798,
],
[
-0.057470580156878344,
-0.02490038842984195,
0.05781860000000001,
-0.032633709324248375,
0.011387600000000008,
0.08226560000000001,
-0.0350434,
-0.08685698346828206,
-0.13377698081971726,
0.07477723436612983,
-0.04672420000000001,
0.14814847292607147,
-0.09322228652208518,
0.189243377778069,
0.06556691759499342,
-0.01188868286587073,
-0.02699077906549335,
-0.005404158416417961,
],
[
-0.038279701281073844,
0.014092817819696563,
0.08499319999999999,
0.007393578736057564,
-0.0802566,
0.002434399999999992,
0.07331240000000001,
-0.047146395340584325,
-0.058770896936490874,
-0.0856313926214862,
-0.0195496,
0.13052024448467917,
0.02714308723187045,
0.1679735413946671,
-0.012863626487280549,
0.02268899448272328,
-0.14099235044171585,
0.014264570252763781,
],
[
-0.008152177071025975,
-0.07754729393411319,
0.005162,
0.0928794824358952,
-0.041269,
0.029608999999999996,
-0.0877,
0.005510759312649533,
-0.007730898384978118,
0.0363897482397726,
-0.0993806,
0.08314543996917957,
0.054404851030733205,
0.15589192437473964,
0.01486477843658134,
0.07302201265313511,
-0.1257010807833399,
-0.10122244627127287,
],
[
0.10239617901776309,
-0.038550603118904375,
0.03233660000000001,
-0.04502687549176004,
-0.014094399999999997,
0.06859660000000001,
0.020656000000000008,
-0.1094135935651576,
-0.024422590112382715,
0.061123493255479604,
-0.06039300000000001,
0.057181824010599513,
0.09347455444436392,
0.14833125219039545,
-0.063588580404621,
-0.017351748022768995,
0.017707705396870905,
-0.040571195278214706,
],
[
-0.06362013089262641,
-0.011370684319205817,
0.07132419999999999,
0.043460907694446126,
-0.0939254,
-0.0112344,
0.0596436,
-0.0581945446954325,
0.053319521096667856,
0.09951311660709589,
-0.0332184,
-0.022747634991236415,
0.001546946559638301,
0.12844869153011715,
-0.03577004008028226,
0.0357817994755233,
-0.08765237344950855,
0.026696490393403063,
],
[
-0.04938755874797273,
-0.09119741097402793,
-0.0203198,
0.034983741241926325,
-0.0667508,
0.015940200000000005,
0.0986312,
-0.04030813795368034,
0.08743560198297219,
0.0068928479279595925,
0.08695040000000001,
-0.0729870388006496,
0.03991891251916933,
0.11460779777298417,
0.007209697978746304,
0.04870131386580613,
-0.09857900507403011,
0.08514803503114668,
],
[
0.06962495964201854,
-0.06402038552499435,
0.01866779999999999,
-0.11634958175963318,
-0.0277632,
0.05492779999999999,
0.00698700000000001,
0.06870951030987597,
0.06368574780655795,
0.04399704750577696,
-0.085875,
-0.07008473052175775,
0.0668390769204744,
0.11550364233452261,
-0.08769105295968485,
-0.011045879264448801,
0.06002392360042282,
-0.03410369022356277,
],
[
0.09631850655519339,
-0.025030014358963116,
0.04584239999999999,
-0.007483763819270891,
0.0924056,
-0.0367164,
0.0459746,
-0.09511230993556774,
0.05888892898072973,
0.07123921512553644,
-0.0468874,
-0.111362500729083,
-0.013042161882684068,
0.11215208942441114,
-0.048162292607904555,
0.020346185441649105,
0.10322847132023462,
0.013419958942966038,
],
[
0.010205278584550323,
0.09514148769012777,
-0.0339888,
0.009764803109952019,
-0.0804198,
0.0022712000000000066,
0.0731492,
-0.03619993792783411,
0.06755958910956417,
-0.008129065631949263,
0.061468600000000005,
-0.13835653832912898,
0.013947949561641226,
0.10673819211854738,
-0.020964787459996678,
0.0671838691753111,
0.07630377672820546,
0.11441961349577506,
],
],
biases: [
0.2355912334785171,
0.22736889201559937,
0.21588980961575163,
0.21096378736484228,
0.20396860853940665,
0.19069138959675552,
0.18523055797120225,
0.19658059466031874,
0.2015661365090517,
0.17496631071750068,
0.1943279744347395,
0.19440116065326574,
],
},
}
}