Merge branch 'blender-v4.0-release'

This commit is contained in:
Campbell Barton 2023-10-06 21:48:34 +11:00
commit c5d01df691
8 changed files with 8 additions and 8 deletions

@ -492,7 +492,7 @@ def run_checks_on_project(
import multiprocessing import multiprocessing
if jobs <= 0: if jobs <= 0:
jobs = multiprocessing.cpu_count() * 2 jobs = multiprocessing.cpu_count()
if jobs > 1: if jobs > 1:
with multiprocessing.Pool(processes=jobs) as pool: with multiprocessing.Pool(processes=jobs) as pool:

@ -557,7 +557,7 @@ def main() -> None:
import multiprocessing import multiprocessing
job_total = multiprocessing.cpu_count() job_total = multiprocessing.cpu_count()
pool = multiprocessing.Pool(processes=job_total * 2) pool = multiprocessing.Pool(processes=job_total)
pool.map(operation_wrap, filepath_args) pool.map(operation_wrap, filepath_args)
else: else:
for filepath in [ for filepath in [

@ -205,7 +205,7 @@ def main() -> None:
jobs = args.jobs jobs = args.jobs
if jobs <= 0: if jobs <= 0:
jobs = multiprocessing.cpu_count() * 2 jobs = multiprocessing.cpu_count()
base_path = args.base base_path = args.base
if not base_path: if not base_path:

@ -367,7 +367,7 @@ def main() -> None:
if jobs <= 0: if jobs <= 0:
# Clamp the value, higher values give errors with too many open files. # Clamp the value, higher values give errors with too many open files.
# Allow users to manually pass very high values in as they might want to tweak system limits themselves. # Allow users to manually pass very high values in as they might want to tweak system limits themselves.
jobs = min(multiprocessing.cpu_count() * 2, 400) jobs = min(multiprocessing.cpu_count(), 400)
credits.process(GitCommitIter(args.source_dir, commit_range), jobs=jobs) credits.process(GitCommitIter(args.source_dir, commit_range), jobs=jobs)

@ -106,7 +106,7 @@ def main() -> None:
if USE_MULTIPROCESS: if USE_MULTIPROCESS:
import multiprocessing import multiprocessing
job_total = multiprocessing.cpu_count() job_total = multiprocessing.cpu_count()
pool = multiprocessing.Pool(processes=job_total * 2) pool = multiprocessing.Pool(processes=job_total)
pool.map(autopep8_format_file, paths) pool.map(autopep8_format_file, paths)
else: else:
for f in paths: for f in paths:

@ -358,7 +358,7 @@ def main() -> None:
if jobs <= 0: if jobs <= 0:
# Clamp the value, higher values give errors with too many open files. # Clamp the value, higher values give errors with too many open files.
# Allow users to manually pass very high values in as they might want to tweak system limits themselves. # Allow users to manually pass very high values in as they might want to tweak system limits themselves.
jobs = min(multiprocessing.cpu_count() * 2, 400) jobs = min(multiprocessing.cpu_count(), 400)
credits.process(GitCommitIter(args.source_dir, commit_range), jobs=jobs) credits.process(GitCommitIter(args.source_dir, commit_range), jobs=jobs)

@ -1882,7 +1882,7 @@ def run_edits_on_directory(
return 1 return 1
if jobs <= 0: if jobs <= 0:
jobs = multiprocessing.cpu_count() * 2 jobs = multiprocessing.cpu_count()
if args is None: if args is None:
# Error will have been reported. # Error will have been reported.

@ -57,7 +57,7 @@ def run(
] ]
import multiprocessing import multiprocessing
job_total = multiprocessing.cpu_count() job_total = multiprocessing.cpu_count()
pool = multiprocessing.Pool(processes=job_total * 2) pool = multiprocessing.Pool(processes=job_total)
pool.starmap(operation_wrap, args) pool.starmap(operation_wrap, args)
else: else:
for directory in directories: for directory in directories: