class LinearRegression:
def __init__(self, learning_rate=0.00001, iterations=1000):
self.learning_rate = learning_rate
self.iterations = iterations
self.weights, self.bias = None, None
def predict(self, X):
return [[x+y for x, y in zip(inner, self.bias)] for inner in [[sum(a*b for a, b in zip(X_row, weights_col)) for weights_col in zip(*self.weights)] for X_row in X]]
#np.dot(X, self.weights) + self.bias
#numpy vs no numpy ;(
weights = [[0.003582143958527445, -0.022156673140040817, -0.042244859228743645, -0.04595336320617114, -0.04984984490553436, -0.03178221416364742, -0.04878612578167108, -0.04967572795874577, -0.07864290205803136, -0.07262641685109207, -0.07206403791277863, -0.06118929585160192], [0.003666830974211947, 0.0005358611125588474, -0.02818003226870819, -0.06186689700493642, -0.07885882923737095, -0.08434916756303971, -0.06928631021484903, -0.044772624120783, -0.027056345978182565, -0.043195071379069305, -0.03777452752739495, -0.04323926802406588], [0.00047544414224846214, 0.0335024411251771, 0.033956675240595174, 0.01653734539486544, 0.00036791082619174863, -0.011239373613848231, 0.0023708602915255735, 0.005850617381915553, 0.016063483736374527, 0.03412041358219909, 0.04332629171779826, 0.03986899399545144], [0.028777176158687315, 0.05539647908113759, 0.09150239189334645, 0.10501420070941765, 0.06373733273766925, 0.0022944332147952235, -0.04362469801073538, -0.05813728660453639, -0.046827774126078595, -0.05488281338805089, -0.06686040886080218, -0.05498044502634537], [-0.01870226568725909, -0.03377990756524969, -0.020794700010394854, 0.01363107495878584, 0.008578972607602398, -0.03311200481002545, -0.07856961846450389, -0.10176426725171674, -0.11115320834653128, -0.11166131455470367, -0.10795514001212339, -0.12218893340901901], [0.007835854003722708, 0.00670599007655646, 0.022204459144755398, 0.0180221713493888, 0.08048533406349993, 0.11467135031846008, 0.0906435036229908, 0.05188900655380775, 0.03616010724487509, 0.039595343161088724, 0.017583040642158636, 0.005147109856743296], [-0.013318426867644133, -0.024996231887211808, -0.027232304239671032, -0.011960875272077868, 0.04159820978877914, 0.11729203419877239, 0.18382313128983074, 0.1357134764766429, 0.0761720154309958, 0.03891997034578843, 0.02767586288512826, 0.020231563484171235], [0.0295650921789293, 0.03221672746632555, 0.03525505010313065, 0.0451242803491439, 0.037317774847082354, 0.041696547303894035, 0.059607081967938375, 0.08243474057128262, 0.04052585116747709, 0.014751906325465514, 0.008778958526000899, 0.011901666940932072], [-0.012583395964536453, 0.00014869045339937337, -0.018483174016336198, -0.037426654452517916, -0.07401679328117898, -0.09261165440634889, -0.10332187858901436, -0.08236453673102102, -0.019523111336956063, 0.004311124554662658, 0.023275508665701487, 0.025116400295347622], [-0.008131681332393123, -0.015889435196107485, 0.0017615781519487123, 0.017697708295872475, 0.013767844264721022, 0.007744699913244254, 0.03557881992248374, 0.06755628663850191, 0.09221117716405865, 0.09639351625103414, 0.06884537757294978, 0.06624343949190417], [-0.0007590573213761215, 0.010564100442116045, 0.011390269259014249, -0.0009330701896889173, 0.008625979623415141, 0.016782764169264767, 0.012620116381424561, 0.0004900880396406719, -0.007334345357352015, -4.338413971473962e-05, 0.011364670251110947, 0.01791161189134958], [0.006067571550264876, 0.010030395892276419, 0.0051272044245194125, 0.02547664715955218, 0.022692509493391883, 0.011214503737439954, 0.002932589617729331, 0.006968026055018636, 0.01684636463041415, 0.03518145183924827, 0.05758165919479791, 0.06959434669714494], [-0.01205837816044482, -0.033150711891170685, -0.029279228800105202, -0.048203686192944004, -0.027193202752937864, -0.0013631179129129857, 0.004294881350131099, 0.026146194855504557, 0.037174930547531466, 0.03601181562946184, 0.03396525210805566, 0.03792471328224993], [-0.0006576180649708673, -0.01301041667486778, -0.040508002459832756, -0.046892141738643214, -0.05548795479104803, -0.07945725728355087, -0.09600993175247363, -0.09060103373194905, -0.051162947605447824, -0.026928669986686953, -0.012147225139360271, -0.00642205499231303], [-0.0018223628479682014, 0.004740671415790535, 0.010199957459045901, 0.004885789519147318, 0.00042253400640119323, -0.014942596972197861, -0.024209693602197515, -0.002792557786532652, 0.03640407600164499, 0.059924682805909894, 0.077236318273472, 0.08296252028812613], [0.005693786316549225, 0.019322387902909032, 0.02791361696872497, 0.03323251014576673, 0.01733895260709585, 0.03148543012540369, 0.08757498719502078, 0.14979106891660704, 0.17258874754673253, 0.19812179435830424, 0.2047868188825931, 0.21388750780419646], [-0.0021096184350977644, -0.0002228868337924403, -0.015666144472814433, -0.03116201043131753, -0.030396205809353302, 0.024304770312835733, 0.10551166001114735, 0.13822430426872095, 0.12288259882258774, 0.09619566779664088, 0.08323468114801041, 0.07272793944881917], [0.0017874635118797137, -0.016396826667486496, 0.0050799843520626525, 0.024064719197673924, 0.06061278451926024, 0.12013662356916027, 0.10603150260352152, 0.039663485175013995, -0.014685194330018856, -0.029547757093174636, -0.04057051916800909, -0.024109249647226653], [0.005677737096349039, 0.03094778745968356, 0.02986289114096615, 0.08009443850110774, 0.1410418090855711, 0.1341828630415969, 0.09017937024975342, 0.059953783700341594, 0.05458300804522503, 0.05759649702424118, 0.07472478549345385, 0.05831217444032052], [-0.026717851611815866, -0.027031287527456253, -0.0006090573676915599, 0.011904136798625313, 0.027395378059795517, 0.0048861086295391145, -0.011640408172985485, -0.002475043073877748, 0.006985364460680636, 0.005322006813004213, 0.009123094172105882, 0.01202035457330508], [-0.05529426677413853, -0.07832641470370938, -0.04447708421535712, -0.04667553714560848, -0.07283611717464727, -0.0728204706806376, -0.05006176145308186, -0.0476219703539553, -0.049269000270469196, -0.049511992795630295, -0.05930101604921234, -0.04539603452118582], [-0.03718062856089464, -0.03467654052320434, -0.06956075136204463, -0.08339684659969865, -0.08145238865099302, -0.0777966734407602, -0.059484668695386836, -0.028437188669428622, -0.008809977085659162, 0.005029286151159226, -0.0001886988511888164, -0.018391485606060735], [0.17316163301425597, 0.1507260918700634, 0.1100888356858889, 0.08556029933152595, 0.04658062103856118, 0.05855294620821166, 0.05829304738995316, 0.047589590424725055, 0.058386163048819446, 0.050284291543465746, 0.05767808498040951, 0.05510084873602295], [0.9170325141399684, 0.9383432024758472, 0.94077522157629, 0.9161013827001239, 0.8768202793847037, 0.7826554331229998, 0.700533587292334, 0.6355117994434498, 0.5682583223629294, 0.5228388101859699, 0.4945704655833359, 0.47714733221898126]]
bias = [0.09022463414025064, 0.05310089808707067, -0.010584777563736708, -0.11244323837563804, -0.20168911922597965, -0.25077721945008896, -0.2910446951042583, -0.3753722136318028, -0.4259289500502633, -0.4670443979630258, -0.540921453222484, -0.5996281161946859]
data = []
n = int(input())
for i in range(n):
row = list(map(float, input().split()))
data.append(row)
#data = np.array(data)
X = [row[:24] for row in data] # Temperatures of the past 24 hours
#print(data[0])
k = 8
model = LinearRegression()
model.weights = weights # load pretrained model
model.bias = bias
pred = model.predict(data)
for j in range(len(pred)):
for value in pred[j][:k]:
print(f"{value:.1f}", end=" ")
for i in range(12-k):
print("?", end=" ")
print()