class LinearRegression:
def __init__(self, learning_rate=0.00001, iterations=1000):
self.learning_rate = learning_rate
self.iterations = iterations
self.weights, self.bias = None, None
def predict(self, X):
return [[x+y for x, y in zip(inner, self.bias)] for inner in [[sum(a*b for a, b in zip(X_row, weights_col)) for weights_col in zip(*self.weights)] for X_row in X]]
#np.dot(X, self.weights) + self.bias
#numpy vs no numpy ;(
weights = [[0.003920396765268544, -0.022535015082990828, -0.04120641862300648, -0.04257985027580123, -0.046219840767987366, -0.028053746939430715, -0.04745222814725147, -0.04972464205995247, -0.08078582378493955, -0.07345109288787816, -0.07318628519474647, -0.061288954419205094], [0.002876546120029993, -0.0008525109953127387, -0.03134003595403203, -0.06707427545738971, -0.08419136275873522, -0.09046987126442707, -0.07336164836893283, -0.045847717596078925, -0.02484228718062048, -0.04364682628575922, -0.03780560868833654, -0.04430268208610096], [-0.0006919963974965506, 0.03442356425656293, 0.03439789336436031, 0.016055587504305996, 0.0024166886655296255, -0.005799311044723527, 0.011012873740452244, 0.013791516136577848, 0.02330445976634587, 0.04438981328928883, 0.054819942749401766, 0.0506519485443254], [0.033525384881602616, 0.06134868619438754, 0.10002214208066357, 0.11391142865436686, 0.0698940421148916, 0.006116209170749081, -0.042697877833631756, -0.05889251957391652, -0.04807610794719913, -0.058569658761130454, -0.07278608959805345, -0.05870058781567922], [-0.023991483559490337, -0.042669466141270415, -0.03187146129259866, 0.005317975805864959, -0.002355755000376818, -0.04672359553817047, -0.09239984724508239, -0.11428007614241534, -0.12467774022073329, -0.12536575896466076, -0.1194631381500062, -0.13517048994966918], [0.010418992347651614, 0.011317560781836437, 0.027554855537180553, 0.019403879298042104, 0.08502326375983714, 0.1205130052878765, 0.09336931741538033, 0.05603503496077256, 0.04407588741428865, 0.05076211527639792, 0.027062723388858352, 0.014490019299185702], [-0.016873624273752663, -0.02935443792849062, -0.03231913838560684, -0.015498397619210635, 0.040042264377419784, 0.12047281170070508, 0.19417961315307822, 0.14171681064312652, 0.07942947338535662, 0.03961840341102912, 0.02863067577785166, 0.020981978844313298], [0.0345242382694672, 0.037433021264854614, 0.04213717377711563, 0.05368439530607617, 0.044133679937582035, 0.04494668649353758, 0.060179879564989304, 0.08625455972680901, 0.03931292965189667, 0.0115777115080697, 0.004942957292734967, 0.00886705989740343], [-0.0146285014984655, -0.0004581062156971206, -0.022261180173755748, -0.04482087427797104, -0.08433901709443235, -0.10469006969598375, -0.1198873272830275, -0.10012072896391964, -0.029107391578330158, -0.001263352045823045, 0.02176559088360807, 0.02373537815004118], [-0.00890612534915016, -0.0195011960122291, 0.0010856229683160817, 0.02051581030144889, 0.018676949111417002, 0.01348833908452495, 0.04585141955675172, 0.08062334991500086, 0.10480579500626151, 0.10748685654983425, 0.07451099490060302, 0.0708036426002283], [-0.0008318882552535596, 0.012239797274227823, 0.012422267858462658, -0.0032773413140830088, 0.008098707609678362, 0.01830878593078026, 0.01278855946434962, -0.00320641495224405, -0.014624834265100648, -0.007969635488727686, 0.005177625590604816, 0.012015651990524049], [0.007933962962102352, 0.013355734220856324, 0.007543771398664037, 0.03239380650726927, 0.026903766659483422, 0.011702771296762534, 0.0024377148249192957, 0.006918566856627539, 0.01775399434153654, 0.038039445280270805, 0.06299934860103616, 0.07585300857747718], [-0.01443404311912635, -0.03765166843673306, -0.03119157914033986, -0.053605306137966595, -0.030378188037273147, 0.0008618706736584041, 0.009861562333311617, 0.0350674243979341, 0.045084466352387415, 0.04116708695300051, 0.036191370087059145, 0.03956960028686019], [0.000885665071048465, -0.011902779690485268, -0.043325042161571625, -0.0494436734788297, -0.06020303102340769, -0.08695652814401073, -0.10406917099494772, -0.10047637860202527, -0.059896777590511985, -0.034706418107874504, -0.019595965724237465, -0.013890666533481772], [-0.0015991501239309347, 0.005734223885236397, 0.012951804838685277, 0.007600428815215499, 0.004854393301935031, -0.010887444560609087, -0.023925919353464623, -0.005570551066572652, 0.033126967435347716, 0.055849663040787714, 0.07390839288461756, 0.07944852864013431], [0.006860651341549422, 0.021202480402726164, 0.03136130820302205, 0.03899023294716015, 0.02317604504007212, 0.03730289960317955, 0.09443908051973877, 0.15904950209046537, 0.1820238120055803, 0.21027984505161312, 0.21732540340160897, 0.22747340520239026], [-0.0036441265774003234, -0.0014736547972812428, -0.019979348269077823, -0.03827843956319876, -0.03865266261138494, 0.01568058354463392, 0.10109618625351612, 0.1373677590026211, 0.12347464849981125, 0.09406036385276197, 0.08031782790841532, 0.06725718660813333], [-0.0015574406473250254, -0.02325783250295883, 0.0016875563937124845, 0.020500441554436352, 0.056034287280016394, 0.1181723498926911, 0.10081334886491937, 0.028788856898929946, -0.028646527038930943, -0.04294843061320349, -0.05498262258726348, -0.03487696807593312], [0.006189575344476673, 0.0355098151204112, 0.03107166515860985, 0.08441773351553965, 0.14827531287636267, 0.1391423915206577, 0.09401717682902791, 0.06542736800619481, 0.06275627604296517, 0.0673960272775137, 0.08675863201844915, 0.06742352490722568], [-0.020677783744923354, -0.021279676153749438, 0.005717495262432509, 0.01594658465113706, 0.030365143064965787, 0.006259710815848362, -0.010142203352408232, 0.0011963169089332887, 0.011107543399294402, 0.008276013493656427, 0.011462066121387166, 0.014576994880104393], [-0.04765721008145233, -0.07355817333719758, -0.0357327408007003, -0.03875601665760375, -0.06893633248177272, -0.06816287465417407, -0.04410107903546156, -0.04571956587496957, -0.051094562761171976, -0.05321426356194221, -0.06485991451677985, -0.0489882588824427], [-0.04528890362583943, -0.040069409362985864, -0.07903397081135903, -0.09265714780736997, -0.08639086624466527, -0.082940411646447, -0.06487407982499514, -0.03130688706937004, -0.010739232316552397, 0.004762845207601827, -0.0008730185550297931, -0.02210609960468349], [0.13946441272939403, 0.11361197515585476, 0.07057057097360009, 0.04587407646433199, 0.0052294969637222185, 0.023317384526175117, 0.02619296752809361, 0.01657944587973476, 0.03182020635873358, 0.025070383373360743, 0.035331781741557995, 0.03447199419794761], [0.9484372840369811, 0.9722762541580113, 0.9782534092955284, 0.9548242291569895, 0.9165115953094654, 0.8175087763953239, 0.7323559385946031, 0.6662559885404494, 0.5960477426304079, 0.549510641337417, 0.5202590250369356, 0.5029892547956609]]
bias = [0.08396650537896681, 0.04458857506515095, -0.021959238466897242, -0.12859212993365157, -0.2221876883759667, -0.27295725351115596, -0.31422051136813006, -0.4021641246309094, -0.45465141973726386, -0.49745803045588505, -0.575081426474532, -0.6366347867269699]
data = []
n = int(input())
for i in range(n):
row = list(map(float, input().split()))
data.append(row)
#data = np.array(data)
X = [row[:24] for row in data] # Temperatures of the past 24 hours
#print(data[0])
k = 7
model = LinearRegression()
model.weights = weights # load pretrained model
model.bias = bias
pred = model.predict(data)
for j in range(len(pred)):
for value in pred[j][:k]:
print(f"{value:.1f}", end=" ")
for i in range(12-k):
print("?", end=" ")
print()