class LinearRegression:
def __init__(self, learning_rate=0.00001, iterations=1000):
self.learning_rate = learning_rate
self.iterations = iterations
self.weights, self.bias = None, None
def predict(self, X):
return [[x+y for x, y in zip(inner, self.bias)] for inner in [[sum(a*b for a, b in zip(X_row, weights_col)) for weights_col in zip(*self.weights)] for X_row in X]]
#np.dot(X, self.weights) + self.bias
#numpy vs no numpy ;(
weights = [[0.0036911773854469573, -0.021135712517553612, -0.0422795821076981, -0.04800305398801997, -0.05245872004751538, -0.03476834964868706, -0.04977189468721867, -0.0495893422326931, -0.076781442966237, -0.07181040308419226, -0.07105755426551355, -0.06104777672970897], [0.004444520901196911, 0.0017030553844912851, -0.0255998820596783, -0.057635670343347316, -0.07474599406568506, -0.0799338819471059, -0.0667229616739718, -0.044492569061805286, -0.02927094883881163, -0.04343495415978437, -0.03840322275888711, -0.043065647345435464], [0.0011171869006609395, 0.032171807020134384, 0.03292324094973227, 0.01637739474010064, -0.0014054798055755876, -0.015371796859364906, -0.004510178372802627, -0.0006848151534599918, 0.009849030112424769, 0.025769094830881252, 0.03403443644346661, 0.031094919959930415], [0.024901674102961218, 0.05039662651210524, 0.0844494116752146, 0.09751324414480513, 0.05884930497405896, -0.00023945646288353842, -0.04381717545744606, -0.057227169111493996, -0.045726352468676915, -0.05203601324796512, -0.06240791630887635, -0.05220087541967055], [-0.01516295248633479, -0.027518329362705055, -0.01303751211280084, 0.019324606000804493, 0.01636610480399803, -0.022942782026548408, -0.06754886986579922, -0.09165915503484579, -0.1005043851295159, -0.10102618258935682, -0.09880922381684497, -0.1119437636886401], [0.006444267113201166, 0.004222300300804199, 0.019351922577185557, 0.01809154618464811, 0.07775224719449636, 0.11043466561238623, 0.08851372420553913, 0.0491210686185099, 0.03092687174986998, 0.03201394760173955, 0.011321563610578447, -0.0008645368615161713], [-0.010566404808022139, -0.021606198643010527, -0.02325082041286759, -0.008979997945198177, 0.042579727825042865, 0.11413015701308896, 0.17495681413323447, 0.13010166945523957, 0.07304980145742342, 0.03786150294263295, 0.026510053384796013, 0.019187153196534638], [0.026081524387361433, 0.028447605825689157, 0.030425653716829933, 0.039122280336944974, 0.03286445883886072, 0.039918876229755686, 0.059618872536352466, 0.07996255637260648, 0.041699560245854195, 0.017452247350068804, 0.011720625572938516, 0.014177907081139765], [-0.011199752746238899, 0.00041404487946504405, -0.01597603412657642, -0.03237830613264249, -0.06632497035351767, -0.08291596244606336, -0.08997970751069745, -0.06861223034492221, -0.012261753355203827, 0.008485971683144949, 0.024514326317773606, 0.02625769049060683], [-0.007483290657797055, -0.013308139958393726, 0.0021227968732481033, 0.015641827216924215, 0.01035942919015306, 0.004011706636746063, 0.02881381175008837, 0.058789288408388374, 0.08330483891276022, 0.08832509472671589, 0.06470533725837736, 0.06293359324135034], [-0.0007376042682160713, 0.009282588407850633, 0.010382620470144783, 0.0002304319539728732, 0.008331516165153662, 0.01491567766511654, 0.01150555014486164, 0.0022313626981437366, -0.0027674333395377726, 0.00496722890852732, 0.015242396398858953, 0.02160677412179914], [0.00465203872663568, 0.007570187087332369, 0.0032685041570491447, 0.02054701984141978, 0.019432158807250487, 0.01003061624390168, 0.002137741932310575, 0.0060150830301386226, 0.015765808229916224, 0.0329415874411091, 0.05351363410498791, 0.0648789136232227], [-0.010710519225239372, -0.030132501631242873, -0.02783357036442684, -0.04429532576745737, -0.024948351264598326, -0.0031593126549484527, 5.361342885611338e-05, 0.019636969580194407, 0.03164780401604585, 0.03263524676986897, 0.032665969199017215, 0.03704770518263252], [-0.0019301255205164598, -0.013755134567680636, -0.03833738679008933, -0.044766251083812485, -0.05195371563692278, -0.07393763179210724, -0.08969177646721527, -0.08269753663869055, -0.044125303364315395, -0.020451986049750098, -0.005855654051270653, -6.3312871216942e-05], [-0.0022522160394996887, 0.0036421702550848674, 0.007807987329299975, 0.002371331929197625, -0.0032597562942506897, -0.01788466297176147, -0.02405296542679174, -0.00039887030967721145, 0.038855606412628835, 0.06263998082433007, 0.07930886455464835, 0.08514775538143646], [0.005315626262857413, 0.01818996554245066, 0.025486625379871723, 0.029125509835407494, 0.01338651270336233, 0.027761453198956267, 0.0827600225680071, 0.14269820475008824, 0.16510377773460844, 0.18857268678459305, 0.19489985537971036, 0.2032840638537468], [0.0003815298575770277, 0.002258593361212477, -0.01105071386731949, -0.02435727730156019, -0.0225817594340407, 0.03184547465021539, 0.10926044007401144, 0.13908419678707803, 0.12284307655016893, 0.09821637730882872, 0.08598615216937162, 0.07720880578887221], [0.00411360995019797, -0.01146361827797461, 0.007718245245669605, 0.02683090387370078, 0.06390441096060542, 0.12134383998846368, 0.1098507601260107, 0.04805254895280074, -0.0039025824696090963, -0.019073809331405834, -0.029316188194385952, -0.01541658262432464], [0.0032612792917920803, 0.025452626290954235, 0.026835101089263434, 0.07464811167699913, 0.13335461215243774, 0.12866818310786185, 0.08641537233622827, 0.05551525552923992, 0.04822938821598445, 0.05003919942055119, 0.0654277735239637, 0.051192122753000384], [-0.033094793959092234, -0.03334311354212006, -0.007486842932858852, 0.006716561891540795, 0.023234107875668417, 0.0024860164748309057, -0.013924371728975172, -0.006347896866331103, 0.002790956807798123, 0.002085647627275863, 0.0063644386995147535, 0.009197769018776426], [-0.05963018130027368, -0.08043104941194257, -0.049555047405011717, -0.050839120012074884, -0.07380458709651655, -0.07446308383438942, -0.05319927877182586, -0.04813534819455566, -0.047006321187813824, -0.045851343497770786, -0.05403408200281053, -0.041556155053053906], [-0.026628854997991204, -0.02580118467606165, -0.05740285848512658, -0.07119185320053897, -0.07235514897914289, -0.06941443108781896, -0.05172467753418279, -0.022862845302322916, -0.004178536198097494, 0.008256975812412313, 0.003372938695091762, -0.012655384137463178], [0.19986608147366652, 0.1799519113547624, 0.1413746422547929, 0.11680071284974758, 0.07876862092647893, 0.08582968895034335, 0.08305492948957051, 0.07132048175537375, 0.07856963552767618, 0.06930781028668194, 0.07434682603177475, 0.07049605126131056], [0.8888809932196065, 0.9080229875092154, 0.9073403426762547, 0.8814291354352618, 0.8412872824329707, 0.7514035962039082, 0.6723127642992508, 0.6085124640120777, 0.5437838620633445, 0.4993743198409579, 0.4717797774224695, 0.45413955992483235]]
bias = [0.09543910445516911, 0.06060362402361733, -3.741176868008018e-05, -0.09676719755467743, -0.18145187054340634, -0.2285095140132972, -0.2674038446857276, -0.3476972513518918, -0.39613157313378466, -0.43542267874568086, -0.5053045984050138, -0.5609163058790877]
data = []
n = int(input())
for i in range(n):
row = list(map(float, input().split()))
data.append(row)
#data = np.array(data)
X = [row[:24] for row in data] # Temperatures of the past 24 hours
#print(data[0])
k = 8
model = LinearRegression()
model.weights = weights # load pretrained model
model.bias = bias
pred = model.predict(data)
for j in range(len(pred)):
for value in pred[j][:k]:
print(f"{value:.1f}", end=" ")
for i in range(12-k):
print("?", end=" ")
print()