class LinearRegression:
def __init__(self, learning_rate=0.00001, iterations=1000):
self.learning_rate = learning_rate
self.iterations = iterations
self.weights, self.bias = None, None
def predict(self, X):
return [[x+y for x, y in zip(inner, self.bias)] for inner in [[sum(a*b for a, b in zip(X_row, weights_col)) for weights_col in zip(*self.weights)] for X_row in X]]
#np.dot(X, self.weights) + self.bias
#import numpy as np
weights = [[0.024028504036373292, 0.015731034163068064, -0.0036443432181210225, -0.021377655245040976, -0.03950092438703785, -0.042074005911959186, -0.050960366230899924, -0.04693722427181879, -0.05541347039421262, -0.05501170219049457, -0.05417356063627014, -0.05140647460587822], [0.022150717549525974, 0.02180362261692074, 0.006213462115153843, -0.013713194508033903, -0.033872618977905083, -0.045121329350050546, -0.050288169000550685, -0.04434496745790147, -0.0427347552849529, -0.04574278400831911, -0.04447908303584095, -0.045346066224094345], [0.012174054027432626, 0.022178671160467907, 0.020181223039116587, 0.009737711686066165, -0.007617644843386485, -0.025028622778111277, -0.03329867507858944, -0.034012710668182594, -0.031131927029804392, -0.028144043906065876, -0.026843023511490233, -0.028013869972716227], [0.004432412247809334, 0.014462220214496361, 0.027452113303511834, 0.03226519331152729, 0.020127888972971006, -0.005035084031014314, -0.026424590114496074, -0.037018639455654055, -0.03618537027001528, -0.03814505607530782, -0.041755328968339445, -0.04163375878557722], [-0.010092322858434196, -0.008917745291324983, 0.005599933707452517, 0.021660516720793336, 0.02996715339950994, 0.017613304806955124, -0.003199611241314336, -0.024581104070450232, -0.035112663672300634, -0.039604590586719196, -0.043716550943564324, -0.049702762019671304], [-0.011915004582288816, -0.01175491063073802, 0.0026344303322351843, 0.01574858228310093, 0.046555975741294685, 0.06220649897313663, 0.05519129054075519, 0.026964709552531413, 0.005884855000941987, -0.003110262959382594, -0.013774350975507796, -0.020853963369140138], [-0.008468466902187475, -0.010610232039983129, -0.0034088182541126737, 0.009867065577225371, 0.038702848336594986, 0.06979870209795337, 0.0890253359089644, 0.0656658297967739, 0.035936400836243686, 0.01822831856324591, 0.00879328002537203, 0.003349857503034814], [0.004621926172828163, 0.005553348023782885, 0.009071535097616417, 0.016869724576553634, 0.025541167609232224, 0.0413115232504945, 0.05754166510055216, 0.05663514806519789, 0.03891307038165957, 0.026383682313934727, 0.020154263713784655, 0.01838646105896894], [-0.0003200860044431275, 0.0025181599662630776, -4.359730677466967e-05, -0.0007879183441437665, -0.006039267020165074, -0.003087811420903081, 0.005647833749301174, 0.014437697424327007, 0.025955910323195597, 0.028170729764869746, 0.028888783548346033, 0.029351865814922645], [0.002650694653621898, 0.0022392585677627995, 0.0041414531593619176, 0.006150735407037535, 0.0024120055408227274, 0.0013671227767218497, 0.011475098459930737, 0.025604444922743157, 0.03956220930710599, 0.04461388247497827, 0.04142750041499848, 0.043290369675010446], [0.004137253005919837, 0.0053454370089720754, 0.0035299456854100382, -1.722466475252099e-05, -0.0016435565572511953, -0.0032031784963711695, -0.0017503684826867371, 0.007107702262760005, 0.01976870164634129, 0.029212464056229792, 0.035000326644366385, 0.03996001343763169], [0.003089228710800573, 0.0017019938040580761, -0.002160126540198139, -0.002477689778670811, -0.005123515974899736, -0.010986910599879163, -0.013044508967798775, -0.00010398665749967579, 0.018944440606725097, 0.03324932947150465, 0.04386290609792696, 0.05098983672191057], [-0.00031293495457917304, -0.006640543080727833, -0.010722194428185453, -0.018032780207575247, -0.01744454461677413, -0.01862224349717793, -0.018907173803209346, -0.00032619994373886695, 0.02273083218447113, 0.03668762652303234, 0.04552977531704203, 0.05196008221630573], [-0.0006118246977072179, -0.005117738073307297, -0.014388387632392953, -0.021368459180146758, -0.027343660562416463, -0.03547108338803881, -0.03560834315156701, -0.014738855168261443, 0.016017011148482516, 0.03563644292994541, 0.047728359487641266, 0.05454929617769244], [0.0009616743964270985, 0.001895277360280671, -0.0016124746588910951, -0.007987332165306508, -0.014710843767950942, -0.015859868317140695, -0.004861763729700583, 0.02284206966005285, 0.05129064826750109, 0.06956111907769835, 0.08074748270218622, 0.08713743405463753], [-0.0007115751282466033, 0.003477224258619403, 0.0031237966192417515, 0.002192052189722919, -0.0006368906391679862, 0.016802379859511128, 0.04957849127809024, 0.08062677361593924, 0.09418592825442089, 0.10525530143305133, 0.11079956315444685, 0.1161777809648057], [-0.013741624110946574, -0.011545625318036996, -0.010856841952920675, -0.006043116024883094, 0.008750015778943594, 0.04872796558761019, 0.08788140815307846, 0.09929896266018819, 0.08803362107001136, 0.0809411096542573, 0.07877843371975093, 0.07936866212450051], [-0.03555379078751772, -0.03697573550805335, -0.02420886304583344, -0.003126354756225578, 0.031868241772369604, 0.07304815048995592, 0.08390700367184423, 0.06440807367027679, 0.03925465358422376, 0.030596066731795622, 0.028546412648194883, 0.032138685940850946], [-0.044740103362708034, -0.04000522483098755, -0.026182259711120204, 0.00417777213380951, 0.04421045311803189, 0.06189976573637388, 0.0538053631089471, 0.03651853325746779, 0.024015297498945334, 0.02096313625329696, 0.02557160370553545, 0.024140850984955355], [-0.029144130594187113, -0.0298720729237646, -0.012879025693120367, 0.006126156643248185, 0.02727026705185987, 0.027693619720624247, 0.019397164477422086, 0.014425795953593907, 0.013299627363379052, 0.012811486379987852, 0.015869663585804116, 0.017194018142512084], [0.03310880555092736, 0.02699951413665846, 0.03764281313704643, 0.04196795349425614, 0.03907573651851671, 0.03202584947041234, 0.029632904655316473, 0.028135996362472066, 0.02912567286728536, 0.02865698075572579, 0.026835827975199657, 0.028067350575749488], [0.14886512224586199, 0.14562941346100652, 0.13412179310516528, 0.12270017993156046, 0.10725653622547716, 0.09109526415757857, 0.08378725809772028, 0.0833936534881166, 0.08433042233640071, 0.0815108640592344, 0.07610902508853741, 0.06856113521070509], [0.3293236635770077, 0.3227748643891149, 0.30047150602763717, 0.27543144390075525, 0.24013495653302866, 0.21365972773128833, 0.19331324220223922, 0.17588890969711196, 0.16550045883706643, 0.1510832601796178, 0.14231485454788828, 0.13236973244400163], [0.5571725570037251, 0.5588528132400427, 0.5391542335695609, 0.5064548241716391, 0.46193246106599406, 0.4079882627957199, 0.3648920104649499, 0.32913429437850966, 0.29737746468340776, 0.27041360622479554, 0.2512829813471434, 0.2356684674567127]]
bias = [0.09144079320457557, 0.08138337012390025, 0.05826884766443891, 0.02330212668391184, -0.00959032857436627, -0.029624940755450684, -0.04500332208235005, -0.06988655738922422, -0.08618045727859389, -0.09989770600565935, -0.12151824996907695, -0.13890616467429365]
data = []
n = int(input())
for i in range(n):
row = list(map(float, input().split()))
data.append(row)
#data = np.array(data)
X = [row[:24] for row in data] # Temperatures of the past 24 hours
#print(data[0])
lr = 0.001
iters = 6000
k = 8
model = LinearRegression(learning_rate=lr, iterations=iters)
model.weights = weights # load pretrained model
model.bias = bias
pred = model.predict(data)
for j in range(len(pred)):
for value in pred[j][:k]:
print(f"{value:.1f}", end=" ")
for i in range(12-k):
print("?", end=" ")
print()