Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(312)

Side by Side Diff: PRESUBMIT.py

Issue 3010153002: PRESUBMIT: Enforce tracker prefix for all BUG entries (Closed)
Patch Set: gclient sync Created 3 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | presubmit_test.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 1 # Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
2 # 2 #
3 # Use of this source code is governed by a BSD-style license 3 # Use of this source code is governed by a BSD-style license
4 # that can be found in the LICENSE file in the root of the source 4 # that can be found in the LICENSE file in the root of the source
5 # tree. An additional intellectual property rights grant can be found 5 # tree. An additional intellectual property rights grant can be found
6 # in the file PATENTS. All contributing project authors may 6 # in the file PATENTS. All contributing project authors may
7 # be found in the AUTHORS file in the root of the source tree. 7 # be found in the AUTHORS file in the root of the source tree.
8 8
9 import json 9 import json
10 import os 10 import os
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
99 p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, 99 p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
100 cwd=cwd) 100 cwd=cwd)
101 stdout = p.stdout.read() 101 stdout = p.stdout.read()
102 stderr = p.stderr.read() 102 stderr = p.stderr.read()
103 p.wait() 103 p.wait()
104 p.stdout.close() 104 p.stdout.close()
105 p.stderr.close() 105 p.stderr.close()
106 return p.returncode, stdout, stderr 106 return p.returncode, stdout, stderr
107 107
108 108
109 def _VerifyNativeApiHeadersListIsValid(input_api, output_api): 109 def VerifyNativeApiHeadersListIsValid(input_api, output_api):
110 """Ensures the list of native API header directories is up to date.""" 110 """Ensures the list of native API header directories is up to date."""
111 non_existing_paths = [] 111 non_existing_paths = []
112 native_api_full_paths = [ 112 native_api_full_paths = [
113 input_api.os_path.join(input_api.PresubmitLocalPath(), 113 input_api.os_path.join(input_api.PresubmitLocalPath(),
114 *path.split('/')) for path in API_DIRS] 114 *path.split('/')) for path in API_DIRS]
115 for path in native_api_full_paths: 115 for path in native_api_full_paths:
116 if not os.path.isdir(path): 116 if not os.path.isdir(path):
117 non_existing_paths.append(path) 117 non_existing_paths.append(path)
118 if non_existing_paths: 118 if non_existing_paths:
119 return [output_api.PresubmitError( 119 return [output_api.PresubmitError(
(...skipping 15 matching lines...) Expand all
135 simple, 1-2 weeks might be good; if they need to do serious work, 135 simple, 1-2 weeks might be good; if they need to do serious work,
136 up to 3 months may be called for.) 136 up to 3 months may be called for.)
137 4. Update/inform existing downstream code owners to stop using the 137 4. Update/inform existing downstream code owners to stop using the
138 deprecated stuff. (Send announcements to 138 deprecated stuff. (Send announcements to
139 discuss-webrtc@googlegroups.com and webrtc-users@google.com.) 139 discuss-webrtc@googlegroups.com and webrtc-users@google.com.)
140 5. Remove the deprecated stuff, once the agreed-upon amount of time 140 5. Remove the deprecated stuff, once the agreed-upon amount of time
141 has passed. 141 has passed.
142 Related files: 142 Related files:
143 """ 143 """
144 144
145 def _CheckNativeApiHeaderChanges(input_api, output_api): 145 def CheckNativeApiHeaderChanges(input_api, output_api):
146 """Checks to remind proper changing of native APIs.""" 146 """Checks to remind proper changing of native APIs."""
147 files = [] 147 files = []
148 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile): 148 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
149 if f.LocalPath().endswith('.h'): 149 if f.LocalPath().endswith('.h'):
150 for path in API_DIRS: 150 for path in API_DIRS:
151 if os.path.dirname(f.LocalPath()) == path: 151 if os.path.dirname(f.LocalPath()) == path:
152 files.append(f) 152 files.append(f)
153 153
154 if files: 154 if files:
155 return [output_api.PresubmitNotifyResult(API_CHANGE_MSG, files)] 155 return [output_api.PresubmitNotifyResult(API_CHANGE_MSG, files)]
156 return [] 156 return []
157 157
158 158
159 def _CheckNoIOStreamInHeaders(input_api, output_api): 159 def CheckNoIOStreamInHeaders(input_api, output_api):
160 """Checks to make sure no .h files include <iostream>.""" 160 """Checks to make sure no .h files include <iostream>."""
161 files = [] 161 files = []
162 pattern = input_api.re.compile(r'^#include\s*<iostream>', 162 pattern = input_api.re.compile(r'^#include\s*<iostream>',
163 input_api.re.MULTILINE) 163 input_api.re.MULTILINE)
164 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile): 164 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
165 if not f.LocalPath().endswith('.h'): 165 if not f.LocalPath().endswith('.h'):
166 continue 166 continue
167 contents = input_api.ReadFile(f) 167 contents = input_api.ReadFile(f)
168 if pattern.search(contents): 168 if pattern.search(contents):
169 files.append(f) 169 files.append(f)
170 170
171 if len(files): 171 if len(files):
172 return [output_api.PresubmitError( 172 return [output_api.PresubmitError(
173 'Do not #include <iostream> in header files, since it inserts static ' + 173 'Do not #include <iostream> in header files, since it inserts static ' +
174 'initialization into every file including the header. Instead, ' + 174 'initialization into every file including the header. Instead, ' +
175 '#include <ostream>. See http://crbug.com/94794', 175 '#include <ostream>. See http://crbug.com/94794',
176 files)] 176 files)]
177 return [] 177 return []
178 178
179 179
180 def _CheckNoPragmaOnce(input_api, output_api): 180 def CheckNoPragmaOnce(input_api, output_api):
181 """Make sure that banned functions are not used.""" 181 """Make sure that banned functions are not used."""
182 files = [] 182 files = []
183 pattern = input_api.re.compile(r'^#pragma\s+once', 183 pattern = input_api.re.compile(r'^#pragma\s+once',
184 input_api.re.MULTILINE) 184 input_api.re.MULTILINE)
185 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile): 185 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
186 if not f.LocalPath().endswith('.h'): 186 if not f.LocalPath().endswith('.h'):
187 continue 187 continue
188 contents = input_api.ReadFile(f) 188 contents = input_api.ReadFile(f)
189 if pattern.search(contents): 189 if pattern.search(contents):
190 files.append(f) 190 files.append(f)
191 191
192 if files: 192 if files:
193 return [output_api.PresubmitError( 193 return [output_api.PresubmitError(
194 'Do not use #pragma once in header files.\n' 194 'Do not use #pragma once in header files.\n'
195 'See http://www.chromium.org/developers/coding-style#TOC-File-headers', 195 'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
196 files)] 196 files)]
197 return [] 197 return []
198 198
199 199
200 def _CheckNoFRIEND_TEST(input_api, output_api): # pylint: disable=invalid-name 200 def CheckNoFRIEND_TEST(input_api, output_api): # pylint: disable=invalid-name
201 """Make sure that gtest's FRIEND_TEST() macro is not used, the 201 """Make sure that gtest's FRIEND_TEST() macro is not used, the
202 FRIEND_TEST_ALL_PREFIXES() macro from testsupport/gtest_prod_util.h should be 202 FRIEND_TEST_ALL_PREFIXES() macro from testsupport/gtest_prod_util.h should be
203 used instead since that allows for FLAKY_, FAILS_ and DISABLED_ prefixes.""" 203 used instead since that allows for FLAKY_, FAILS_ and DISABLED_ prefixes."""
204 problems = [] 204 problems = []
205 205
206 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.h')) 206 file_filter = lambda f: f.LocalPath().endswith(('.cc', '.h'))
207 for f in input_api.AffectedFiles(file_filter=file_filter): 207 for f in input_api.AffectedFiles(file_filter=file_filter):
208 for line_num, line in f.ChangedContents(): 208 for line_num, line in f.ChangedContents():
209 if 'FRIEND_TEST(' in line: 209 if 'FRIEND_TEST(' in line:
210 problems.append(' %s:%d' % (f.LocalPath(), line_num)) 210 problems.append(' %s:%d' % (f.LocalPath(), line_num))
211 211
212 if not problems: 212 if not problems:
213 return [] 213 return []
214 return [output_api.PresubmitPromptWarning('WebRTC\'s code should not use ' 214 return [output_api.PresubmitPromptWarning('WebRTC\'s code should not use '
215 'gtest\'s FRIEND_TEST() macro. Include testsupport/gtest_prod_util.h and ' 215 'gtest\'s FRIEND_TEST() macro. Include testsupport/gtest_prod_util.h and '
216 'use FRIEND_TEST_ALL_PREFIXES() instead.\n' + '\n'.join(problems))] 216 'use FRIEND_TEST_ALL_PREFIXES() instead.\n' + '\n'.join(problems))]
217 217
218 218
219 def _IsLintBlacklisted(blacklist_paths, file_path): 219 def IsLintBlacklisted(blacklist_paths, file_path):
220 """ Checks if a file is blacklisted for lint check.""" 220 """ Checks if a file is blacklisted for lint check."""
221 for path in blacklist_paths: 221 for path in blacklist_paths:
222 if file_path == path or os.path.dirname(file_path).startswith(path): 222 if file_path == path or os.path.dirname(file_path).startswith(path):
223 return True 223 return True
224 return False 224 return False
225 225
226 226
227 def _CheckApprovedFilesLintClean(input_api, output_api, 227 def CheckApprovedFilesLintClean(input_api, output_api,
228 source_file_filter=None): 228 source_file_filter=None):
229 """Checks that all new or non-blacklisted .cc and .h files pass cpplint.py. 229 """Checks that all new or non-blacklisted .cc and .h files pass cpplint.py.
230 This check is based on _CheckChangeLintsClean in 230 This check is based on CheckChangeLintsClean in
231 depot_tools/presubmit_canned_checks.py but has less filters and only checks 231 depot_tools/presubmit_canned_checks.py but has less filters and only checks
232 added files.""" 232 added files."""
233 result = [] 233 result = []
234 234
235 # Initialize cpplint. 235 # Initialize cpplint.
236 import cpplint 236 import cpplint
237 # Access to a protected member _XX of a client class 237 # Access to a protected member _XX of a client class
238 # pylint: disable=W0212 238 # pylint: disable=W0212
239 cpplint._cpplint_state.ResetErrorCounts() 239 cpplint._cpplint_state.ResetErrorCounts()
240 240
241 lint_filters = cpplint._Filters() 241 lint_filters = cpplint._Filters()
242 lint_filters.extend(BLACKLIST_LINT_FILTERS) 242 lint_filters.extend(BLACKLIST_LINT_FILTERS)
243 cpplint._SetFilters(','.join(lint_filters)) 243 cpplint._SetFilters(','.join(lint_filters))
244 244
245 # Create a platform independent blacklist for cpplint. 245 # Create a platform independent blacklist for cpplint.
246 blacklist_paths = [input_api.os_path.join(*path.split('/')) 246 blacklist_paths = [input_api.os_path.join(*path.split('/'))
247 for path in CPPLINT_BLACKLIST] 247 for path in CPPLINT_BLACKLIST]
248 248
249 # Use the strictest verbosity level for cpplint.py (level 1) which is the 249 # Use the strictest verbosity level for cpplint.py (level 1) which is the
250 # default when running cpplint.py from command line. To make it possible to 250 # default when running cpplint.py from command line. To make it possible to
251 # work with not-yet-converted code, we're only applying it to new (or 251 # work with not-yet-converted code, we're only applying it to new (or
252 # moved/renamed) files and files not listed in CPPLINT_BLACKLIST. 252 # moved/renamed) files and files not listed in CPPLINT_BLACKLIST.
253 verbosity_level = 1 253 verbosity_level = 1
254 files = [] 254 files = []
255 for f in input_api.AffectedSourceFiles(source_file_filter): 255 for f in input_api.AffectedSourceFiles(source_file_filter):
256 # Note that moved/renamed files also count as added. 256 # Note that moved/renamed files also count as added.
257 if f.Action() == 'A' or not _IsLintBlacklisted(blacklist_paths, 257 if f.Action() == 'A' or not IsLintBlacklisted(blacklist_paths,
258 f.LocalPath()): 258 f.LocalPath()):
259 files.append(f.AbsoluteLocalPath()) 259 files.append(f.AbsoluteLocalPath())
260 260
261 for file_name in files: 261 for file_name in files:
262 cpplint.ProcessFile(file_name, verbosity_level) 262 cpplint.ProcessFile(file_name, verbosity_level)
263 263
264 if cpplint._cpplint_state.error_count > 0: 264 if cpplint._cpplint_state.error_count > 0:
265 if input_api.is_committing: 265 if input_api.is_committing:
266 res_type = output_api.PresubmitError 266 res_type = output_api.PresubmitError
267 else: 267 else:
268 res_type = output_api.PresubmitPromptWarning 268 res_type = output_api.PresubmitPromptWarning
269 result = [res_type('Changelist failed cpplint.py check.')] 269 result = [res_type('Changelist failed cpplint.py check.')]
270 270
271 return result 271 return result
272 272
273 def _CheckNoSourcesAbove(input_api, gn_files, output_api): 273 def CheckNoSourcesAbove(input_api, gn_files, output_api):
274 # Disallow referencing source files with paths above the GN file location. 274 # Disallow referencing source files with paths above the GN file location.
275 source_pattern = input_api.re.compile(r' +sources \+?= \[(.*?)\]', 275 source_pattern = input_api.re.compile(r' +sources \+?= \[(.*?)\]',
276 re.MULTILINE | re.DOTALL) 276 re.MULTILINE | re.DOTALL)
277 file_pattern = input_api.re.compile(r'"((\.\./.*?)|(//.*?))"') 277 file_pattern = input_api.re.compile(r'"((\.\./.*?)|(//.*?))"')
278 violating_gn_files = set() 278 violating_gn_files = set()
279 violating_source_entries = [] 279 violating_source_entries = []
280 for gn_file in gn_files: 280 for gn_file in gn_files:
281 contents = input_api.ReadFile(gn_file) 281 contents = input_api.ReadFile(gn_file)
282 for source_block_match in source_pattern.finditer(contents): 282 for source_block_match in source_pattern.finditer(contents):
283 # Find all source list entries starting with ../ in the source block 283 # Find all source list entries starting with ../ in the source block
284 # (exclude overrides entries). 284 # (exclude overrides entries).
285 for file_list_match in file_pattern.finditer(source_block_match.group(1)): 285 for file_list_match in file_pattern.finditer(source_block_match.group(1)):
286 source_file = file_list_match.group(1) 286 source_file = file_list_match.group(1)
287 if 'overrides/' not in source_file: 287 if 'overrides/' not in source_file:
288 violating_source_entries.append(source_file) 288 violating_source_entries.append(source_file)
289 violating_gn_files.add(gn_file) 289 violating_gn_files.add(gn_file)
290 if violating_gn_files: 290 if violating_gn_files:
291 return [output_api.PresubmitError( 291 return [output_api.PresubmitError(
292 'Referencing source files above the directory of the GN file is not ' 292 'Referencing source files above the directory of the GN file is not '
293 'allowed. Please introduce new GN targets in the proper location ' 293 'allowed. Please introduce new GN targets in the proper location '
294 'instead.\n' 294 'instead.\n'
295 'Invalid source entries:\n' 295 'Invalid source entries:\n'
296 '%s\n' 296 '%s\n'
297 'Violating GN files:' % '\n'.join(violating_source_entries), 297 'Violating GN files:' % '\n'.join(violating_source_entries),
298 items=violating_gn_files)] 298 items=violating_gn_files)]
299 return [] 299 return []
300 300
301 def _CheckNoMixingCAndCCSources(input_api, gn_files, output_api): 301 def CheckNoMixingCAndCCSources(input_api, gn_files, output_api):
302 # Disallow mixing .c and .cc source files in the same target. 302 # Disallow mixing .c and .cc source files in the same target.
303 source_pattern = input_api.re.compile(r' +sources \+?= \[(.*?)\]', 303 source_pattern = input_api.re.compile(r' +sources \+?= \[(.*?)\]',
304 re.MULTILINE | re.DOTALL) 304 re.MULTILINE | re.DOTALL)
305 file_pattern = input_api.re.compile(r'"(.*)"') 305 file_pattern = input_api.re.compile(r'"(.*)"')
306 violating_gn_files = dict() 306 violating_gn_files = dict()
307 for gn_file in gn_files: 307 for gn_file in gn_files:
308 contents = input_api.ReadFile(gn_file) 308 contents = input_api.ReadFile(gn_file)
309 for source_block_match in source_pattern.finditer(contents): 309 for source_block_match in source_pattern.finditer(contents):
310 c_files = [] 310 c_files = []
311 cc_files = [] 311 cc_files = []
312 for file_list_match in file_pattern.finditer(source_block_match.group(1)): 312 for file_list_match in file_pattern.finditer(source_block_match.group(1)):
313 source_file = file_list_match.group(1) 313 source_file = file_list_match.group(1)
314 if source_file.endswith('.c'): 314 if source_file.endswith('.c'):
315 c_files.append(source_file) 315 c_files.append(source_file)
316 if source_file.endswith('.cc'): 316 if source_file.endswith('.cc'):
317 cc_files.append(source_file) 317 cc_files.append(source_file)
318 if c_files and cc_files: 318 if c_files and cc_files:
319 violating_gn_files[gn_file.LocalPath()] = sorted(c_files + cc_files) 319 violating_gn_files[gn_file.LocalPath()] = sorted(c_files + cc_files)
320 if violating_gn_files: 320 if violating_gn_files:
321 return [output_api.PresubmitError( 321 return [output_api.PresubmitError(
322 'GN targets cannot mix .cc and .c source files. Please create a ' 322 'GN targets cannot mix .cc and .c source files. Please create a '
323 'separate target for each collection of sources.\n' 323 'separate target for each collection of sources.\n'
324 'Mixed sources: \n' 324 'Mixed sources: \n'
325 '%s\n' 325 '%s\n'
326 'Violating GN files:' % json.dumps(violating_gn_files, indent=2), 326 'Violating GN files:' % json.dumps(violating_gn_files, indent=2),
327 items=violating_gn_files.keys())] 327 items=violating_gn_files.keys())]
328 return [] 328 return []
329 329
330 def _CheckNoPackageBoundaryViolations(input_api, gn_files, output_api): 330 def CheckNoPackageBoundaryViolations(input_api, gn_files, output_api):
331 cwd = input_api.PresubmitLocalPath() 331 cwd = input_api.PresubmitLocalPath()
332 script_path = os.path.join('tools_webrtc', 'presubmit_checks_lib', 332 script_path = os.path.join('tools_webrtc', 'presubmit_checks_lib',
333 'check_package_boundaries.py') 333 'check_package_boundaries.py')
334 webrtc_path = os.path.join('webrtc') 334 webrtc_path = os.path.join('webrtc')
335 command = [sys.executable, script_path, webrtc_path] 335 command = [sys.executable, script_path, webrtc_path]
336 command += [gn_file.LocalPath() for gn_file in gn_files] 336 command += [gn_file.LocalPath() for gn_file in gn_files]
337 returncode, _, stderr = _RunCommand(command, cwd) 337 returncode, _, stderr = _RunCommand(command, cwd)
338 if returncode: 338 if returncode:
339 return [output_api.PresubmitError( 339 return [output_api.PresubmitError(
340 'There are package boundary violations in the following GN files:\n\n' 340 'There are package boundary violations in the following GN files:\n\n'
341 '%s' % stderr)] 341 '%s' % stderr)]
342 return [] 342 return []
343 343
344 def _CheckGnChanges(input_api, output_api): 344 def CheckGnChanges(input_api, output_api):
345 source_file_filter = lambda x: input_api.FilterSourceFile( 345 source_file_filter = lambda x: input_api.FilterSourceFile(
346 x, white_list=(r'.+\.(gn|gni)$',)) 346 x, white_list=(r'.+\.(gn|gni)$',))
347 347
348 gn_files = [] 348 gn_files = []
349 for f in input_api.AffectedSourceFiles(source_file_filter): 349 for f in input_api.AffectedSourceFiles(source_file_filter):
350 if f.LocalPath().startswith('webrtc'): 350 if f.LocalPath().startswith('webrtc'):
351 gn_files.append(f) 351 gn_files.append(f)
352 352
353 result = [] 353 result = []
354 if gn_files: 354 if gn_files:
355 result.extend(_CheckNoSourcesAbove(input_api, gn_files, output_api)) 355 result.extend(CheckNoSourcesAbove(input_api, gn_files, output_api))
356 result.extend(_CheckNoMixingCAndCCSources(input_api, gn_files, output_api)) 356 result.extend(CheckNoMixingCAndCCSources(input_api, gn_files, output_api))
357 result.extend(_CheckNoPackageBoundaryViolations( 357 result.extend(CheckNoPackageBoundaryViolations(
358 input_api, gn_files, output_api)) 358 input_api, gn_files, output_api))
359 return result 359 return result
360 360
361 def _CheckUnwantedDependencies(input_api, output_api): 361 def CheckUnwantedDependencies(input_api, output_api):
362 """Runs checkdeps on #include statements added in this 362 """Runs checkdeps on #include statements added in this
363 change. Breaking - rules is an error, breaking ! rules is a 363 change. Breaking - rules is an error, breaking ! rules is a
364 warning. 364 warning.
365 """ 365 """
366 # Copied from Chromium's src/PRESUBMIT.py. 366 # Copied from Chromium's src/PRESUBMIT.py.
367 367
368 # We need to wait until we have an input_api object and use this 368 # We need to wait until we have an input_api object and use this
369 # roundabout construct to import checkdeps because this file is 369 # roundabout construct to import checkdeps because this file is
370 # eval-ed and thus doesn't have __file__. 370 # eval-ed and thus doesn't have __file__.
371 original_sys_path = sys.path 371 original_sys_path = sys.path
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
415 if warning_descriptions: 415 if warning_descriptions:
416 results.append(output_api.PresubmitPromptOrNotify( 416 results.append(output_api.PresubmitPromptOrNotify(
417 'You added one or more #includes of files that are temporarily\n' 417 'You added one or more #includes of files that are temporarily\n'
418 'allowed but being removed. Can you avoid introducing the\n' 418 'allowed but being removed. Can you avoid introducing the\n'
419 '#include? See relevant DEPS file(s) for details and contacts.\n' 419 '#include? See relevant DEPS file(s) for details and contacts.\n'
420 'See https://cs.chromium.org/chromium/src/buildtools/checkdeps/ for ' 420 'See https://cs.chromium.org/chromium/src/buildtools/checkdeps/ for '
421 'more details about checkdeps.', 421 'more details about checkdeps.',
422 warning_descriptions)) 422 warning_descriptions))
423 return results 423 return results
424 424
425 def _CheckChangeHasBugField(input_api, output_api): 425 def CheckCommitMessageBugEntry(input_api, output_api):
426 """Check that bug entries are well-formed in commit message."""
427 bogus_bug_msg = (
428 'Bogus BUG entry: %s. Please specify the issue tracker prefix and the '
429 'issue number, separated by a colon, e.g. webrtc:123 or chromium:12345.')
430 results = []
431 for bug in (input_api.change.BUG or '').split(','):
432 bug = bug.strip()
433 if bug.lower() == 'none':
434 continue
435 if ':' not in bug:
436 try:
437 if int(bug) > 100000:
438 # Rough indicator for current chromium bugs.
439 prefix_guess = 'chromium'
440 else:
441 prefix_guess = 'webrtc'
442 results.append('BUG entry requires issue tracker prefix, e.g. %s:%s' %
443 (prefix_guess, bug))
444 except ValueError:
445 results.append(bogus_bug_msg % bug)
446 elif not re.match(r'\w+:\d+', bug):
447 results.append(bogus_bug_msg % bug)
448 return [output_api.PresubmitError(r) for r in results]
449
450 def CheckChangeHasBugField(input_api, output_api):
426 """Requires that the changelist have a BUG= field. 451 """Requires that the changelist have a BUG= field.
427 452
428 This check is stricter than the one in depot_tools/presubmit_canned_checks.py 453 This check is stricter than the one in depot_tools/presubmit_canned_checks.py
429 since it fails the presubmit if the BUG= field is missing or doesn't contain 454 since it fails the presubmit if the BUG= field is missing or doesn't contain
430 a bug reference. 455 a bug reference.
431 """ 456 """
432 if input_api.change.BUG: 457 if input_api.change.BUG:
433 return [] 458 return []
434 else: 459 else:
435 return [output_api.PresubmitError( 460 return [output_api.PresubmitError(
436 'The BUG=[bug number] field is mandatory. Please create a bug and ' 461 'The BUG=[bug number] field is mandatory. Please create a bug and '
437 'reference it using either of:\n' 462 'reference it using either of:\n'
438 ' * https://bugs.webrtc.org - reference it using BUG=webrtc:XXXX\n' 463 ' * https://bugs.webrtc.org - reference it using BUG=webrtc:XXXX\n'
439 ' * https://crbug.com - reference it using BUG=chromium:XXXXXX')] 464 ' * https://crbug.com - reference it using BUG=chromium:XXXXXX')]
440 465
441 def _CheckJSONParseErrors(input_api, output_api): 466 def CheckJSONParseErrors(input_api, output_api):
442 """Check that JSON files do not contain syntax errors.""" 467 """Check that JSON files do not contain syntax errors."""
443 468
444 def FilterFile(affected_file): 469 def FilterFile(affected_file):
445 return input_api.os_path.splitext(affected_file.LocalPath())[1] == '.json' 470 return input_api.os_path.splitext(affected_file.LocalPath())[1] == '.json'
446 471
447 def GetJSONParseError(input_api, filename): 472 def GetJSONParseError(input_api, filename):
448 try: 473 try:
449 contents = input_api.ReadFile(filename) 474 contents = input_api.ReadFile(filename)
450 input_api.json.loads(contents) 475 input_api.json.loads(contents)
451 except ValueError as e: 476 except ValueError as e:
452 return e 477 return e
453 return None 478 return None
454 479
455 results = [] 480 results = []
456 for affected_file in input_api.AffectedFiles( 481 for affected_file in input_api.AffectedFiles(
457 file_filter=FilterFile, include_deletes=False): 482 file_filter=FilterFile, include_deletes=False):
458 parse_error = GetJSONParseError(input_api, 483 parse_error = GetJSONParseError(input_api,
459 affected_file.AbsoluteLocalPath()) 484 affected_file.AbsoluteLocalPath())
460 if parse_error: 485 if parse_error:
461 results.append(output_api.PresubmitError('%s could not be parsed: %s' % 486 results.append(output_api.PresubmitError('%s could not be parsed: %s' %
462 (affected_file.LocalPath(), parse_error))) 487 (affected_file.LocalPath(), parse_error)))
463 return results 488 return results
464 489
465 490
466 def _RunPythonTests(input_api, output_api): 491 def RunPythonTests(input_api, output_api):
467 def Join(*args): 492 def Join(*args):
468 return input_api.os_path.join(input_api.PresubmitLocalPath(), *args) 493 return input_api.os_path.join(input_api.PresubmitLocalPath(), *args)
469 494
470 test_directories = [ 495 test_directories = [
496 '/',
471 Join('webrtc', 'rtc_tools', 'py_event_log_analyzer'), 497 Join('webrtc', 'rtc_tools', 'py_event_log_analyzer'),
472 Join('webrtc', 'rtc_tools'), 498 Join('webrtc', 'rtc_tools'),
473 Join('webrtc', 'audio', 'test', 'unittests'), 499 Join('webrtc', 'audio', 'test', 'unittests'),
474 ] + [ 500 ] + [
475 root for root, _, files in os.walk(Join('tools_webrtc')) 501 root for root, _, files in os.walk(Join('tools_webrtc'))
476 if any(f.endswith('_test.py') for f in files) 502 if any(f.endswith('_test.py') for f in files)
477 ] 503 ]
478 504
479 tests = [] 505 tests = []
480 for directory in test_directories: 506 for directory in test_directories:
481 tests.extend( 507 tests.extend(
482 input_api.canned_checks.GetUnitTestsInDirectory( 508 input_api.canned_checks.GetUnitTestsInDirectory(
483 input_api, 509 input_api,
484 output_api, 510 output_api,
485 directory, 511 directory,
486 whitelist=[r'.+_test\.py$'])) 512 whitelist=[r'.+_test\.py$']))
487 return input_api.RunTests(tests, parallel=True) 513 return input_api.RunTests(tests, parallel=True)
488 514
489 515
490 def _CheckUsageOfGoogleProtobufNamespace(input_api, output_api): 516 def CheckUsageOfGoogleProtobufNamespace(input_api, output_api):
491 """Checks that the namespace google::protobuf has not been used.""" 517 """Checks that the namespace google::protobuf has not been used."""
492 files = [] 518 files = []
493 pattern = input_api.re.compile(r'google::protobuf') 519 pattern = input_api.re.compile(r'google::protobuf')
494 proto_utils_path = os.path.join('webrtc', 'rtc_base', 'protobuf_utils.h') 520 proto_utils_path = os.path.join('webrtc', 'rtc_base', 'protobuf_utils.h')
495 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile): 521 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
496 if f.LocalPath() in [proto_utils_path, 'PRESUBMIT.py']: 522 if f.LocalPath() in [proto_utils_path, 'PRESUBMIT.py']:
497 continue 523 continue
498 contents = input_api.ReadFile(f) 524 contents = input_api.ReadFile(f)
499 if pattern.search(contents): 525 if pattern.search(contents):
500 files.append(f) 526 files.append(f)
501 527
502 if files: 528 if files:
503 return [output_api.PresubmitError( 529 return [output_api.PresubmitError(
504 'Please avoid to use namespace `google::protobuf` directly.\n' 530 'Please avoid to use namespace `google::protobuf` directly.\n'
505 'Add a using directive in `%s` and include that header instead.' 531 'Add a using directive in `%s` and include that header instead.'
506 % proto_utils_path, files)] 532 % proto_utils_path, files)]
507 return [] 533 return []
508 534
509 535
510 def _CommonChecks(input_api, output_api): 536 def CommonChecks(input_api, output_api):
511 """Checks common to both upload and commit.""" 537 """Checks common to both upload and commit."""
512 results = [] 538 results = []
513 # Filter out files that are in objc or ios dirs from being cpplint-ed since 539 # Filter out files that are in objc or ios dirs from being cpplint-ed since
514 # they do not follow C++ lint rules. 540 # they do not follow C++ lint rules.
515 black_list = input_api.DEFAULT_BLACK_LIST + ( 541 black_list = input_api.DEFAULT_BLACK_LIST + (
516 r".*\bobjc[\\\/].*", 542 r".*\bobjc[\\\/].*",
517 r".*objc\.[hcm]+$", 543 r".*objc\.[hcm]+$",
518 r"webrtc\/build\/ios\/SDK\/.*", 544 r"webrtc\/build\/ios\/SDK\/.*",
519 ) 545 )
520 source_file_filter = lambda x: input_api.FilterSourceFile(x, None, black_list) 546 source_file_filter = lambda x: input_api.FilterSourceFile(x, None, black_list)
521 results.extend(_CheckApprovedFilesLintClean( 547 results.extend(CheckApprovedFilesLintClean(
522 input_api, output_api, source_file_filter)) 548 input_api, output_api, source_file_filter))
523 results.extend(input_api.canned_checks.RunPylint(input_api, output_api, 549 results.extend(input_api.canned_checks.RunPylint(input_api, output_api,
524 black_list=(r'^base[\\\/].*\.py$', 550 black_list=(r'^base[\\\/].*\.py$',
525 r'^build[\\\/].*\.py$', 551 r'^build[\\\/].*\.py$',
526 r'^buildtools[\\\/].*\.py$', 552 r'^buildtools[\\\/].*\.py$',
527 r'^infra[\\\/].*\.py$', 553 r'^infra[\\\/].*\.py$',
528 r'^ios[\\\/].*\.py$', 554 r'^ios[\\\/].*\.py$',
529 r'^out.*[\\\/].*\.py$', 555 r'^out.*[\\\/].*\.py$',
530 r'^testing[\\\/].*\.py$', 556 r'^testing[\\\/].*\.py$',
531 r'^third_party[\\\/].*\.py$', 557 r'^third_party[\\\/].*\.py$',
(...skipping 25 matching lines...) Expand all
557 source_file_filter=hundred_char_sources)) 583 source_file_filter=hundred_char_sources))
558 584
559 results.extend(input_api.canned_checks.CheckChangeHasNoTabs( 585 results.extend(input_api.canned_checks.CheckChangeHasNoTabs(
560 input_api, output_api)) 586 input_api, output_api))
561 results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace( 587 results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace(
562 input_api, output_api)) 588 input_api, output_api))
563 results.extend(input_api.canned_checks.CheckAuthorizedAuthor( 589 results.extend(input_api.canned_checks.CheckAuthorizedAuthor(
564 input_api, output_api)) 590 input_api, output_api))
565 results.extend(input_api.canned_checks.CheckChangeTodoHasOwner( 591 results.extend(input_api.canned_checks.CheckChangeTodoHasOwner(
566 input_api, output_api)) 592 input_api, output_api))
567 results.extend(_CheckNativeApiHeaderChanges(input_api, output_api)) 593 results.extend(CheckNativeApiHeaderChanges(input_api, output_api))
568 results.extend(_CheckNoIOStreamInHeaders(input_api, output_api)) 594 results.extend(CheckNoIOStreamInHeaders(input_api, output_api))
569 results.extend(_CheckNoPragmaOnce(input_api, output_api)) 595 results.extend(CheckNoPragmaOnce(input_api, output_api))
570 results.extend(_CheckNoFRIEND_TEST(input_api, output_api)) 596 results.extend(CheckNoFRIEND_TEST(input_api, output_api))
571 results.extend(_CheckGnChanges(input_api, output_api)) 597 results.extend(CheckGnChanges(input_api, output_api))
572 results.extend(_CheckUnwantedDependencies(input_api, output_api)) 598 results.extend(CheckUnwantedDependencies(input_api, output_api))
573 results.extend(_CheckJSONParseErrors(input_api, output_api)) 599 results.extend(CheckJSONParseErrors(input_api, output_api))
574 results.extend(_RunPythonTests(input_api, output_api)) 600 results.extend(RunPythonTests(input_api, output_api))
575 results.extend(_CheckUsageOfGoogleProtobufNamespace(input_api, output_api)) 601 results.extend(CheckUsageOfGoogleProtobufNamespace(input_api, output_api))
576 results.extend(_CheckOrphanHeaders(input_api, output_api)) 602 results.extend(CheckOrphanHeaders(input_api, output_api))
577 results.extend(_CheckNewLineAtTheEndOfProtoFiles(input_api, output_api)) 603 results.extend(CheckNewLineAtTheEndOfProtoFiles(input_api, output_api))
578 return results 604 return results
579 605
580 606
581 def CheckChangeOnUpload(input_api, output_api): 607 def CheckChangeOnUpload(input_api, output_api):
582 results = [] 608 results = []
583 results.extend(_CommonChecks(input_api, output_api)) 609 results.extend(CommonChecks(input_api, output_api))
584 results.extend( 610 results.extend(
585 input_api.canned_checks.CheckGNFormatted(input_api, output_api)) 611 input_api.canned_checks.CheckGNFormatted(input_api, output_api))
586 return results 612 return results
587 613
588 614
589 def CheckChangeOnCommit(input_api, output_api): 615 def CheckChangeOnCommit(input_api, output_api):
590 results = [] 616 results = []
591 results.extend(_CommonChecks(input_api, output_api)) 617 results.extend(CommonChecks(input_api, output_api))
592 results.extend(_VerifyNativeApiHeadersListIsValid(input_api, output_api)) 618 results.extend(VerifyNativeApiHeadersListIsValid(input_api, output_api))
593 results.extend(input_api.canned_checks.CheckOwners(input_api, output_api)) 619 results.extend(input_api.canned_checks.CheckOwners(input_api, output_api))
594 results.extend(input_api.canned_checks.CheckChangeWasUploaded( 620 results.extend(input_api.canned_checks.CheckChangeWasUploaded(
595 input_api, output_api)) 621 input_api, output_api))
596 results.extend(input_api.canned_checks.CheckChangeHasDescription( 622 results.extend(input_api.canned_checks.CheckChangeHasDescription(
597 input_api, output_api)) 623 input_api, output_api))
598 results.extend(_CheckChangeHasBugField(input_api, output_api)) 624 results.extend(CheckChangeHasBugField(input_api, output_api))
625 results.extend(CheckCommitMessageBugEntry(input_api, output_api))
599 results.extend(input_api.canned_checks.CheckTreeIsOpen( 626 results.extend(input_api.canned_checks.CheckTreeIsOpen(
600 input_api, output_api, 627 input_api, output_api,
601 json_url='http://webrtc-status.appspot.com/current?format=json')) 628 json_url='http://webrtc-status.appspot.com/current?format=json'))
602 return results 629 return results
603 630
604 631
605 def _CheckOrphanHeaders(input_api, output_api): 632 def CheckOrphanHeaders(input_api, output_api):
606 # We need to wait until we have an input_api object and use this 633 # We need to wait until we have an input_api object and use this
607 # roundabout construct to import prebubmit_checks_lib because this file is 634 # roundabout construct to import prebubmit_checks_lib because this file is
608 # eval-ed and thus doesn't have __file__. 635 # eval-ed and thus doesn't have __file__.
609 error_msg = """Header file {} is not listed in any GN target. 636 error_msg = """Header file {} is not listed in any GN target.
610 Please create a target or add it to an existing one in {}""" 637 Please create a target or add it to an existing one in {}"""
611 results = [] 638 results = []
612 original_sys_path = sys.path 639 original_sys_path = sys.path
613 try: 640 try:
614 sys.path = sys.path + [input_api.os_path.join( 641 sys.path = sys.path + [input_api.os_path.join(
615 input_api.PresubmitLocalPath(), 'tools_webrtc', 'presubmit_checks_lib')] 642 input_api.PresubmitLocalPath(), 'tools_webrtc', 'presubmit_checks_lib')]
616 from check_orphan_headers import GetBuildGnPathFromFilePath 643 from check_orphan_headers import GetBuildGnPathFromFilePath
617 from check_orphan_headers import IsHeaderInBuildGn 644 from check_orphan_headers import IsHeaderInBuildGn
618 finally: 645 finally:
619 # Restore sys.path to what it was before. 646 # Restore sys.path to what it was before.
620 sys.path = original_sys_path 647 sys.path = original_sys_path
621 648
622 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile): 649 for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
623 if f.LocalPath().endswith('.h') and f.Action() == 'A': 650 if f.LocalPath().endswith('.h') and f.Action() == 'A':
624 file_path = os.path.abspath(f.LocalPath()) 651 file_path = os.path.abspath(f.LocalPath())
625 root_dir = os.getcwd() 652 root_dir = os.getcwd()
626 gn_file_path = GetBuildGnPathFromFilePath(file_path, os.path.exists, 653 gn_file_path = GetBuildGnPathFromFilePath(file_path, os.path.exists,
627 root_dir) 654 root_dir)
628 in_build_gn = IsHeaderInBuildGn(file_path, gn_file_path) 655 in_build_gn = IsHeaderInBuildGn(file_path, gn_file_path)
629 if not in_build_gn: 656 if not in_build_gn:
630 results.append(output_api.PresubmitError(error_msg.format( 657 results.append(output_api.PresubmitError(error_msg.format(
631 file_path, gn_file_path))) 658 file_path, gn_file_path)))
632 return results 659 return results
633 660
634 661
635 def _CheckNewLineAtTheEndOfProtoFiles(input_api, output_api): 662 def CheckNewLineAtTheEndOfProtoFiles(input_api, output_api):
636 """Checks that all .proto files are terminated with a newline.""" 663 """Checks that all .proto files are terminated with a newline."""
637 error_msg = 'File {} must end with exactly one newline.' 664 error_msg = 'File {} must end with exactly one newline.'
638 results = [] 665 results = []
639 source_file_filter = lambda x: input_api.FilterSourceFile( 666 source_file_filter = lambda x: input_api.FilterSourceFile(
640 x, white_list=(r'.+\.proto$',)) 667 x, white_list=(r'.+\.proto$',))
641 for f in input_api.AffectedSourceFiles(source_file_filter): 668 for f in input_api.AffectedSourceFiles(source_file_filter):
642 file_path = f.LocalPath() 669 file_path = f.LocalPath()
643 with open(file_path) as f: 670 with open(file_path) as f:
644 lines = f.readlines() 671 lines = f.readlines()
645 if lines[-1] != '\n' or lines[-2] == '\n': 672 if lines[-1] != '\n' or lines[-2] == '\n':
646 results.append(output_api.PresubmitError(error_msg.format(file_path))) 673 results.append(output_api.PresubmitError(error_msg.format(file_path)))
647 return results 674 return results
OLDNEW
« no previous file with comments | « no previous file | presubmit_test.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698