Coverage for adhoc-cicd-odoo-odoo / odoo / tools / speedscope.py: 7%

139 statements  

« prev     ^ index     » next       coverage.py v7.13.4, created at 2026-03-09 18:15 +0000

1# -*- coding: utf-8 -*- 

2# Part of Odoo. See LICENSE file for full copyright and licensing details. 

3import reprlib 

4 

5shortener = reprlib.Repr() 

6shortener.maxstring = 150 

7shorten = shortener.repr 

8 

9class Speedscope: 

10 def __init__(self, name='Speedscope', init_stack_trace=None): 

11 self.init_stack_trace = init_stack_trace or [] 

12 self.init_stack_trace_level = len(self.init_stack_trace) 

13 self.caller_frame = None 

14 self.convert_stack(self.init_stack_trace) 

15 

16 self.init_caller_frame = None 

17 if self.init_stack_trace: 

18 self.init_caller_frame = self.init_stack_trace[-1] 

19 self.profiles_raw = {} 

20 self.name = name 

21 self.frames_indexes = {} 

22 self.frame_count = 0 

23 self.profiles = [] 

24 

25 def add(self, key, profile): 

26 for entry in profile: 

27 self.caller_frame = self.init_caller_frame 

28 self.convert_stack(entry['stack'] or []) 

29 if 'query' in entry: 

30 query = entry['query'] 

31 full_query = entry['full_query'] 

32 entry['stack'].append((f'sql({shorten(query)})', full_query, None)) 

33 self.profiles_raw[key] = profile 

34 

35 def convert_stack(self, stack): 

36 for index, frame in enumerate(stack): 

37 method = frame[2] 

38 line = '' 

39 number = '' 

40 if self.caller_frame and len(self.caller_frame) == 4: 

41 line = f"called at {self.caller_frame[0]} ({self.caller_frame[3].strip()})" 

42 number = self.caller_frame[1] 

43 stack[index] = (method, line, number,) 

44 self.caller_frame = frame 

45 

46 def add_output(self, names, complete=True, display_name=None, use_context=True, constant_time=False, context_per_name = None, **params): 

47 """ 

48 Add a profile output to the list of profiles 

49 :param names: list of keys to combine in this output. Keys corresponds to the one used in add 

50 :param display_name: name of the tab for this output 

51 :param complete: display the complete stack. If False, don't display the stack bellow the profiler. 

52 :param use_context: use execution context (added by ExecutionContext context manager) to display the profile. 

53 :param constant_time: hide temporality. Useful to compare query counts 

54 :param context_per_name: a dictionary of additionnal context per name 

55 """ 

56 entries = [] 

57 display_name = display_name or ','.join(names) 

58 for name in names: 

59 raw = self.profiles_raw.get(name) 

60 if not raw: 

61 continue 

62 entries += raw 

63 entries.sort(key=lambda e: e['start']) 

64 result = self.process(entries, use_context=use_context, constant_time=constant_time, **params) 

65 if not result: 

66 return self 

67 start = result[0]['at'] 

68 end = result[-1]['at'] 

69 

70 if complete: 

71 start_stack = [] 

72 end_stack = [] 

73 init_stack_trace_ids = self.stack_to_ids(self.init_stack_trace, use_context and entries[0].get('exec_context')) 

74 for frame_id in init_stack_trace_ids: 

75 start_stack.append({ 

76 "type": "O", 

77 "frame": frame_id, 

78 "at": start 

79 }) 

80 for frame_id in reversed(init_stack_trace_ids): 

81 end_stack.append({ 

82 "type": "C", 

83 "frame": frame_id, 

84 "at": end 

85 }) 

86 result = start_stack + result + end_stack 

87 

88 self.profiles.append({ 

89 "name": display_name, 

90 "type": "evented", 

91 "unit": "entries" if constant_time else "seconds", 

92 "startValue": 0, 

93 "endValue": end - start, 

94 "events": result 

95 }) 

96 return self 

97 

98 def add_default(self,**params): 

99 if len(self.profiles_raw) > 1: 

100 if params['combined_profile']: 

101 self.add_output(self.profiles_raw, display_name='Combined', **params) 

102 for key, profile in self.profiles_raw.items(): 

103 sql = profile and profile[0].get('query') 

104 if sql: 

105 if params['sql_no_gap_profile']: 

106 self.add_output([key], hide_gaps=True, display_name=f'{key} (no gap)', **params) 

107 if params['sql_density_profile']: 

108 self.add_output([key], continuous=False, complete=False, display_name=f'{key} (density)',**params) 

109 

110 elif params['frames_profile']: 

111 self.add_output([key], display_name=key,**params) 

112 return self 

113 

114 def make(self, **params): 

115 if not self.profiles: 

116 self.add_default(**params) 

117 return { 

118 "name": self.name, 

119 "activeProfileIndex": 0, 

120 "$schema": "https://www.speedscope.app/file-format-schema.json", 

121 "shared": { 

122 "frames": [{ 

123 "name": frame[0], 

124 "file": frame[1], 

125 "line": frame[2] 

126 } for frame in self.frames_indexes] 

127 }, 

128 "profiles": self.profiles, 

129 } 

130 

131 def get_frame_id(self, frame): 

132 if frame not in self.frames_indexes: 

133 self.frames_indexes[frame] = self.frame_count 

134 self.frame_count += 1 

135 return self.frames_indexes[frame] 

136 

137 def stack_to_ids(self, stack, context, aggregate_sql=False, stack_offset=0): 

138 """ 

139 :param stack: A list of hashable frame 

140 :param context: an iterable of (level, value) ordered by level 

141 :param stack_offset: offset level for stack 

142 

143 Assemble stack and context and return a list of ids representing 

144 this stack, adding each corresponding context at the corresponding 

145 level. 

146 """ 

147 stack_ids = [] 

148 context_iterator = iter(context or ()) 

149 context_level, context_value = next(context_iterator, (None, None)) 

150 # consume iterator until we are over stack_offset 

151 while context_level is not None and context_level < stack_offset: 

152 context_level, context_value = next(context_iterator, (None, None)) 

153 for level, frame in enumerate(stack, start=stack_offset + 1): 

154 if aggregate_sql: 

155 frame = (frame[0], '', frame[2]) 

156 while context_level == level: 

157 context_frame = (", ".join(f"{k}={v}" for k, v in context_value.items()), '', '') 

158 stack_ids.append(self.get_frame_id(context_frame)) 

159 context_level, context_value = next(context_iterator, (None, None)) 

160 stack_ids.append(self.get_frame_id(frame)) 

161 return stack_ids 

162 

163 def process(self, entries, continuous=True, hide_gaps=False, use_context=True, constant_time=False, aggregate_sql=False, **params): 

164 # constant_time parameters is mainly useful to hide temporality when focussing on sql determinism 

165 entry_end = previous_end = None 

166 if not entries: 

167 return [] 

168 events = [] 

169 current_stack_ids = [] 

170 frames_start = entries[0]['start'] 

171 

172 # add last closing entry if missing 

173 last_entry = entries[-1] 

174 if last_entry['stack']: 

175 entries.append({'stack': [], 'start': last_entry['start'] + last_entry.get('time', 0)}) 

176 

177 for index, entry in enumerate(entries): 

178 if constant_time: 

179 entry_start = close_time = index 

180 else: 

181 previous_end = entry_end 

182 if hide_gaps and previous_end: 

183 entry_start = previous_end 

184 else: 

185 entry_start = entry['start'] - frames_start 

186 

187 if previous_end and previous_end > entry_start: 

188 # skip entry if entry starts after another entry end 

189 continue 

190 

191 if previous_end: 

192 close_time = min(entry_start, previous_end) 

193 else: 

194 close_time = entry_start 

195 

196 entry_time = entry.get('time') 

197 entry_end = None if entry_time is None else entry_start + entry_time 

198 

199 entry_stack_ids = self.stack_to_ids( 

200 entry['stack'] or [], 

201 use_context and entry.get('exec_context'), 

202 aggregate_sql, 

203 self.init_stack_trace_level 

204 ) 

205 level = 0 

206 if continuous: 

207 level = -1 

208 for current, new in zip(current_stack_ids, entry_stack_ids): 

209 level += 1 

210 if current != new: 

211 break 

212 else: 

213 level += 1 

214 

215 for frame in reversed(current_stack_ids[level:]): 

216 events.append({ 

217 "type": "C", 

218 "frame": frame, 

219 "at": close_time 

220 }) 

221 for frame in entry_stack_ids[level:]: 

222 events.append({ 

223 "type": "O", 

224 "frame": frame, 

225 "at": entry_start 

226 }) 

227 current_stack_ids = entry_stack_ids 

228 

229 return events