Skip to content

Commit

Permalink
[py312][2/3] fixing SyntaxWarning warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
smuzaffar committed Oct 2, 2024
1 parent 3b3c2a3 commit 1d962c6
Show file tree
Hide file tree
Showing 15 changed files with 61 additions and 57 deletions.
14 changes: 8 additions & 6 deletions logRootQA.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def getCommonFiles(d1, d2, pattern):


def getWorkflow(f):
m = re.search("/\d+\.\d+_", f)
m = re.search("/\\d+\\.\\d+_", f)
if not m:
return "(none)"
return m.group().replace("/", "").replace("_", "")
Expand All @@ -73,10 +73,12 @@ def filteredLines(f):
retval = {}
for l in openfile(f):
# look for and remove timestamps
l = re.sub("20\d\d-\d\d-\d\d \d\d:\d\d:\d\d(\.\d+|)", "DATETIME", l)
l = re.sub("\d\d-(\d\d|[A-ZA-z]{3})-20\d\d \d\d:\d\d:\d\d(\.\d+|)", "DATETIME", l)
l = re.sub("20\\d\\d-\\d\\d-\\d\\d \\d\\d:\\d\\d:\\d\\d(\\.\\d+|)", "DATETIME", l)
l = re.sub(
"\\d\\d-(\\d\\d|[A-ZA-z]{3})-20\\d\\d \\d\\d:\\d\\d:\\d\\d(\\.\\d+|)", "DATETIME", l
)
if "Begin processing the" in l:
l = re.sub(" on stream \d", " on stream N", l)
l = re.sub(" on stream \\d", " on stream N", l)
sl = l.strip()
skip = False
for data in Log_Lines_Filter:
Expand Down Expand Up @@ -217,7 +219,7 @@ def checkDQMSize(r1, r2, diff, wfs):
]
)
lines = output.splitlines()
total = re.search("-?\d+\.\d+", lines[-1])
total = re.search("-?\\d+\\.\\d+", lines[-1])
if not total:
print("Weird output", r1)
print(output)
Expand All @@ -227,7 +229,7 @@ def checkDQMSize(r1, r2, diff, wfs):
print(lines, diff)
maxdiff = 10
for line in lines:
if re.match("\s*-?\d+.*", line): # normal output line
if re.match("\\s*-?\\d+.*", line): # normal output line
if line not in diff:
if len(diff) == maxdiff:
diff.append(" ... <truncated>")
Expand Down
4 changes: 2 additions & 2 deletions modify_comment.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,8 @@
from process_pr import modify_comment, find_last_comment
from process_pr import TRIGERING_TESTS_MSG, TRIGERING_STYLE_TEST_MSG

valid_types["JENKINS_TEST_URL"] = ["^\s*" + TRIGERING_TESTS_MSG + ".*$", None]
valid_types["JENKINS_STYLE_URL"] = ["^\s*" + TRIGERING_STYLE_TEST_MSG + ".*$", None]
valid_types["JENKINS_TEST_URL"] = ["^\\s*" + TRIGERING_TESTS_MSG + ".*$", None]
valid_types["JENKINS_STYLE_URL"] = ["^\\s*" + TRIGERING_STYLE_TEST_MSG + ".*$", None]
gh = Github(login_or_token=open(expanduser(repo_config.GH_TOKEN)).read().strip())
issue = gh.get_repo(opts.repository).get_issue(int(args[0]))
last_comment = find_last_comment(
Expand Down
2 changes: 1 addition & 1 deletion parse_jenkins_builds.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ def grep(filename, pattern, verbose=False):
all_local = []
path = "/build/builds"
document = "builds-data"
rematch = re.compile(".*/\d+$")
rematch = re.compile(r".*/\d+$")
for root, dirs, files in os.walk(path):
if rematch.match(root):
logFile = root + "/build.xml"
Expand Down
2 changes: 1 addition & 1 deletion pr-checks/check-pr-files.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def check_commits_files(repo, pr, detail=False):
if e:
print(o)
return all_ok
for l in [re.sub("\s+", " ", x.strip()) for x in o.split("\n") if x.strip()]:
for l in [re.sub("\\s+", " ", x.strip()) for x in o.split("\n") if x.strip()]:
(t, f) = l.split(" ")
if not f in data:
data[f] = []
Expand Down
4 changes: 3 additions & 1 deletion pr_testing/run-das-query.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,9 @@
if e:
err = 1
if os.getenv("MATRIX_EXTRAS", ""):
e, o = run_cmd("grep -E '^[1-9][0-9]*(\.[0-9]*|)_' runall-report-step123-.log | sed 's|_.*||'")
e, o = run_cmd(
"grep -E '^[1-9][0-9]*(\\.[0-9]*|)_' runall-report-step123-.log | sed 's|_.*||'"
)
all_wfs = [wf for wf in o.split("\n") if wf]
print("All WFS:", all_wfs)
new_wfs = []
Expand Down
4 changes: 2 additions & 2 deletions process-build-release-request.py
Original file line number Diff line number Diff line change
Expand Up @@ -944,15 +944,15 @@ def guess_prev_rel_name(release_name, issue):
rel_match = (
rel_name_match.group(1)
+ prev_num_str
+ "\(_[a-zA-Z]*patch[0-9][0-9]*\|\)"
+ "\\(_[a-zA-Z]*patch[0-9][0-9]*\\|\\)"
+ rel_name_match.group(5)
+ ";"
)
if number == 0:
rel_match = (
rel_name_match.group(1)
+ rel_name_match.group(2)
+ "_pre\([0-9][0-9]*\)"
+ "_pre\\([0-9][0-9]*\\)"
+ rel_name_match.group(5)
+ ";"
)
Expand Down
4 changes: 2 additions & 2 deletions process-error-reports.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@
# - Replace regexp special caratecters with their escaped counter parts
# - Replace back @@@ to be "(.*)" for the matching
def reEscape(s):
s = re.sub("%\([a-z_A-Z]+\)s", "@@@", s)
s = re.sub("([\[\]\(\)\*\+\.])", "\\\\\\1", s)
s = re.sub("%\\([a-z_A-Z]+\\)s", "@@@", s)
s = re.sub("([\\[\\]\\(\\)\\*\\+\\.])", "\\\\\\1", s)
s = s.replace("\n", "\\n")
s = re.sub("@@@", "(.*)", s)
return s
Expand Down
46 changes: 23 additions & 23 deletions process_pr.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,30 +96,30 @@ def format(s, **kwds):
TRIGERING_TESTS_MSG1 = "Jenkins tests started for "
TRIGERING_STYLE_TEST_MSG = "The project style tests are being triggered in jenkins."
IGNORING_TESTS_MSG = "Ignoring test request."
TESTS_RESULTS_MSG = "^\s*([-|+]1|I had the issue.*)\s*$"
TESTS_RESULTS_MSG = r"^\s*([-|+]1|I had the issue.*)\s*$"
FAILED_TESTS_MSG = "The jenkins tests job failed, please try again."
PUSH_TEST_ISSUE_MSG = "^\[Jenkins CI\] Testing commit: [0-9a-f]+$"
PUSH_TEST_ISSUE_MSG = r"^\[Jenkins CI\] Testing commit: [0-9a-f]+$"
HOLD_MSG = "Pull request has been put on hold by "
# Regexp to match the test requests
CODE_CHECKS_REGEXP = re.compile(
"code-checks(\s+with\s+cms.week[0-9].PR_[0-9a-f]{8}/[^\s]+|)(\s+and\s+apply\s+patch|)$"
r"code-checks(\s+with\s+cms.week[0-9].PR_[0-9a-f]{8}/[^\s]+|)(\s+and\s+apply\s+patch|)$"
)
WF_PATTERN = "[1-9][0-9]*(\.[0-9]+|)"
WF_PATTERN = r"[1-9][0-9]*(\.[0-9]+|)"
CMSSW_QUEUE_PATTERN = "CMSSW_[0-9]+_[0-9]+_(X|[A-Z][A-Z0-9]+_X|[0-9]+(_[a-zA-Z0-9_]+|))"
CMSSW_PACKAGE_PATTERN = "[A-Z][a-zA-Z0-9]+(/[a-zA-Z0-9]+|)"
ARCH_PATTERN = "[a-z0-9]+_[a-z0-9]+_[a-z0-9]+"
CMSSW_RELEASE_QUEUE_PATTERN = format(
"(%(cmssw)s|%(arch)s|%(cmssw)s/%(arch)s)", cmssw=CMSSW_QUEUE_PATTERN, arch=ARCH_PATTERN
)
RELVAL_OPTS = "[-][a-zA-Z0-9_.,\s/'-]+"
CLOSE_REQUEST = re.compile("^\s*((@|)cmsbuild\s*[,]*\s+|)(please\s*[,]*\s+|)close\s*$", re.I)
REOPEN_REQUEST = re.compile("^\s*((@|)cmsbuild\s*[,]*\s+|)(please\s*[,]*\s+|)(re|)open\s*$", re.I)
RELVAL_OPTS = r"[-][a-zA-Z0-9_.,\s/'-]+"
CLOSE_REQUEST = re.compile(r"^\s*((@|)cmsbuild\s*[,]*\s+|)(please\s*[,]*\s+|)close\s*$", re.I)
REOPEN_REQUEST = re.compile(r"^\s*((@|)cmsbuild\s*[,]*\s+|)(please\s*[,]*\s+|)(re|)open\s*$", re.I)
CMS_PR_PATTERN = format(
"(#[1-9][0-9]*|(%(cmsorgs)s)/+[a-zA-Z0-9_-]+#[1-9][0-9]*|https://+github.com/+(%(cmsorgs)s)/+[a-zA-Z0-9_-]+/+pull/+[1-9][0-9]*)",
cmsorgs="|".join(EXTERNAL_REPOS),
)
TEST_REGEXP = format(
"^\s*((@|)cmsbuild\s*[,]*\s+|)(please\s*[,]*\s+|)test(\s+workflow(s|)\s+(%(workflow)s(\s*,\s*%(workflow)s|)*)|)(\s+with\s+(%(cms_pr)s(\s*,\s*%(cms_pr)s)*)|)(\s+for\s+%(release_queue)s|)(\s+using\s+full\s+cmssw|\s+using\s+(cms-|)addpkg\s+(%(pkg)s(,%(pkg)s)*)|)\s*$",
r"^\s*((@|)cmsbuild\s*[,]*\s+|)(please\s*[,]*\s+|)test(\s+workflow(s|)\s+(%(workflow)s(\s*,\s*%(workflow)s|)*)|)(\s+with\s+(%(cms_pr)s(\s*,\s*%(cms_pr)s)*)|)(\s+for\s+%(release_queue)s|)(\s+using\s+full\s+cmssw|\s+using\s+(cms-|)addpkg\s+(%(pkg)s(,%(pkg)s)*)|)\s*$",
workflow=WF_PATTERN,
cms_pr=CMS_PR_PATTERN,
pkg=CMSSW_PACKAGE_PATTERN,
Expand All @@ -129,29 +129,29 @@ def format(s, **kwds):
AUTO_TEST_REPOS = ["cms-sw/cmssw"]
REGEX_TEST_REG = re.compile(TEST_REGEXP, re.I)
REGEX_TEST_ABORT = re.compile(
"^\s*((@|)cmsbuild\s*[,]*\s+|)(please\s*[,]*\s+|)abort(\s+test|)$", re.I
r"^\s*((@|)cmsbuild\s*[,]*\s+|)(please\s*[,]*\s+|)abort(\s+test|)$", re.I
)
REGEX_TEST_IGNORE = re.compile(
r"^\s*(?:(?:@|)cmsbuild\s*[,]*\s+|)(?:please\s*[,]*\s+|)ignore\s+tests-rejected\s+(?:with|)([a-z -]+)$",
re.I,
)
REGEX_COMMITS_CACHE = re.compile(r"<!-- (?:commits|bot) cache: (.*) -->", re.DOTALL)
REGEX_IGNORE_COMMIT_COUNT = "\+commit-count"
REGEX_IGNORE_FILE_COUNT = "\+file-count"
REGEX_IGNORE_COMMIT_COUNT = r"\+commit-count"
REGEX_IGNORE_FILE_COUNT = r"\+file-count"
TEST_WAIT_GAP = 720
ALL_CHECK_FUNCTIONS = None
EXTRA_RELVALS_TESTS = ["threading", "gpu", "high-stats", "nano"]
EXTRA_RELVALS_TESTS_OPTS = "_" + "|_".join(EXTRA_RELVALS_TESTS)
EXTRA_TESTS = "|".join(EXTRA_RELVALS_TESTS) + "|hlt_p2_timing|profiling|none"
SKIP_TESTS = "|".join(["static", "header"])
ENABLE_TEST_PTRN = "enable(_test(s|)|)"
JENKINS_NODES = "[a-zA-Z0-9_|&\s()-]+"
JENKINS_NODES = r"[a-zA-Z0-9_|&\s()-]+"
MULTILINE_COMMENTS_MAP = {
"(workflow|relval)(s|)("
+ EXTRA_RELVALS_TESTS_OPTS
+ "|)": [format("%(workflow)s(\s*,\s*%(workflow)s|)*", workflow=WF_PATTERN), "MATRIX_EXTRAS"],
+ "|)": [format(r"%(workflow)s(\s*,\s*%(workflow)s|)*", workflow=WF_PATTERN), "MATRIX_EXTRAS"],
"(workflow|relval)(s|)_profiling": [
format("%(workflow)s(\s*,\s*%(workflow)s|)*", workflow=WF_PATTERN),
format(r"%(workflow)s(\s*,\s*%(workflow)s|)*", workflow=WF_PATTERN),
"PROFILING_WORKFLOWS",
],
"pull_request(s|)": [
Expand All @@ -162,12 +162,12 @@ def format(s, **kwds):
"disable_poison": ["true|false", "DISABLE_POISON"],
"use_ib_tag": ["true|false", "USE_IB_TAG"],
"baseline": ["self|default", "USE_BASELINE"],
"skip_test(s|)": [format("(%(tests)s)(\s*,\s*(%(tests)s))*", tests=SKIP_TESTS), "SKIP_TESTS"],
"skip_test(s|)": [format(r"(%(tests)s)(\s*,\s*(%(tests)s))*", tests=SKIP_TESTS), "SKIP_TESTS"],
"dry_run": ["true|false", "DRY_RUN"],
"jenkins_(slave|node)": [JENKINS_NODES, "RUN_ON_SLAVE"],
"(arch(itecture(s|))|release|release/arch)": [CMSSW_RELEASE_QUEUE_PATTERN, "RELEASE_FORMAT"],
ENABLE_TEST_PTRN: [
format("(%(tests)s)(\s*,\s*(%(tests)s))*", tests=EXTRA_TESTS),
format(r"(%(tests)s)(\s*,\s*(%(tests)s))*", tests=EXTRA_TESTS),
"ENABLE_BOT_TESTS",
],
"ignore_test(s|)": ["build-warnings|clang-warnings", "IGNORE_BOT_TESTS"],
Expand Down Expand Up @@ -491,7 +491,7 @@ def has_user_emoji(bot_cache, comment, repository, emoji, user):

def get_assign_categories(line, extra_labels):
m = re.match(
"^\s*(New categories assigned:\s*|(unassign|assign)\s+(from\s+|package\s+|))([a-zA-Z0-9/,\s-]+)\s*$",
r"^\s*(New categories assigned:\s*|(unassign|assign)\s+(from\s+|package\s+|))([a-zA-Z0-9/,\s-]+)\s*$",
line,
re.I,
)
Expand Down Expand Up @@ -921,17 +921,17 @@ def process_pr(repo_config, gh, repo, issue, dryRun, cmsbuild_user=None, force=F
# Process Pull Request
pkg_categories = set([])
REGEX_TYPE_CMDS = (
"^(type|(build-|)state)\s+(([-+]|)[a-z][a-z0-9_-]+)(\s*,\s*([-+]|)[a-z][a-z0-9_-]+)*$"
r"^(type|(build-|)state)\s+(([-+]|)[a-z][a-z0-9_-]+)(\s*,\s*([-+]|)[a-z][a-z0-9_-]+)*$"
)
REGEX_EX_CMDS = "^urgent$|^backport\s+(of\s+|)(#|http(s|):/+github\.com/+%s/+pull/+)\d+$" % (
REGEX_EX_CMDS = r"^urgent$|^backport\s+(of\s+|)(#|http(s|):/+github\.com/+%s/+pull/+)\d+$" % (
repo.full_name
)
known_ignore_tests = "%s" % MULTILINE_COMMENTS_MAP["ignore_test(s|)"][0]
REGEX_EX_IGNORE_CHKS = "^ignore\s+((%s)(\s*,\s*(%s))*|none)$" % (
REGEX_EX_IGNORE_CHKS = r"^ignore\s+((%s)(\s*,\s*(%s))*|none)$" % (
known_ignore_tests,
known_ignore_tests,
)
REGEX_EX_ENABLE_TESTS = "^enable\s+(%s)$" % MULTILINE_COMMENTS_MAP[ENABLE_TEST_PTRN][0]
REGEX_EX_ENABLE_TESTS = r"^enable\s+(%s)$" % MULTILINE_COMMENTS_MAP[ENABLE_TEST_PTRN][0]
L2_DATA = init_l2_data(repo_config, cms_repo)
last_commit_date = None
last_commit_obj = None
Expand Down Expand Up @@ -1301,7 +1301,7 @@ def process_pr(repo_config, gh, repo, issue, dryRun, cmsbuild_user=None, force=F
if valid_commenter:
enable_tests, ignore = check_enable_bot_tests(first_line.split(" ", 1)[-1])
comment_emoji = "+1"
elif re.match("^allow\s+@([^ ]+)\s+test\s+rights$", first_line, re.I):
elif re.match(r"^allow\s+@([^ ]+)\s+test\s+rights$", first_line, re.I):
comment_emoji = "-1"
if commenter_categories or (commenter in releaseManagers):
tester = first_line.split("@", 1)[-1].split(" ", 1)[0]
Expand Down Expand Up @@ -1522,7 +1522,7 @@ def process_pr(repo_config, gh, repo, issue, dryRun, cmsbuild_user=None, force=F

if issue.pull_request or push_test_issue:
# Check if the release manager asked for merging this.
if re.match("^\s*(merge)\s*$", first_line, re.I):
if re.match(r"^\s*(merge)\s*$", first_line, re.I):
emoji = "-1"
if (commenter in releaseManagers) or ("orp" in commenter_categories):
mustMerge = True
Expand Down
2 changes: 1 addition & 1 deletion reco_profiling/profileRunner.py
Original file line number Diff line number Diff line change
Expand Up @@ -423,7 +423,7 @@ def writeProfilingScript(wfdir, runscript, cmdlist):

# ensure that compiler include paths are added to ROOT_INCLUDE_PATH
fi.write(
"for path in $(LC_ALL=C g++ -xc++ -E -v /dev/null 2>&1 | sed -n -e '/^.include/,${' -e '/^ \/.*++/p' -e '}');do ROOT_INCLUDE_PATH=$path:$ROOT_INCLUDE_PATH; done"
r"for path in $(LC_ALL=C g++ -xc++ -E -v /dev/null 2>&1 | sed -n -e '/^.include/,${' -e '/^ \/.*++/p' -e '}');do ROOT_INCLUDE_PATH=$path:$ROOT_INCLUDE_PATH; done"
)
fi.write("\n")

Expand Down
8 changes: 4 additions & 4 deletions release_notes_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@
from hashlib import md5
import time

RX_RELEASE = re.compile("CMSSW_(\d+)_(\d+)_(\d+)(_pre[0-9]+)*(_cand[0-9]+)*(_patch[0-9]+)*")
RX_AUTHOR = re.compile("(.*)(@[a-zA-Z-_0-9]+)")
RX_COMPARE = re.compile("(https://github.*compare.*\.\.\..*)")
RX_COMMIT = re.compile("^-\s+(:arrow_right:\s*|)([^/]+\/[^/]+|)\#(\d{0,5})( from.*)")
RX_RELEASE = re.compile(r"CMSSW_(\d+)_(\d+)_(\d+)(_pre[0-9]+)*(_cand[0-9]+)*(_patch[0-9]+)*")
RX_AUTHOR = re.compile(r"(.*)(@[a-zA-Z-_0-9]+)")
RX_COMPARE = re.compile(r"(https://github.*compare.*\.\.\..*)")
RX_COMMIT = re.compile(r"^-\s+(:arrow_right:\s*|)([^/]+\/[^/]+|)\#(\d{0,5})( from.*)")

Release = namedtuple(
"Release", ["major", "minor", "subminor", "pre", "cand", "patch", "published_at"]
Expand Down
2 changes: 1 addition & 1 deletion report-build-release-status.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ def get_test_log(logfile):
try:
logfile = join(getenv("WORKSPACE"), logfile)
try:
logmsg = "\n\nTests results:\n" + getoutput("grep 'ERROR\| tests passed' " + logfile)
logmsg = "\n\nTests results:\n" + getoutput("grep 'ERROR\\| tests passed' " + logfile)
except:
logmsg = "\n\nUnable to read tests log: No such file " + logfile
except:
Expand Down
4 changes: 2 additions & 2 deletions report-pull-request-results.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ def read_matrix_log_file(matrix_log):
line = line.strip()
if "ERROR executing" in line:
print("processing: %s" % line)
parts = re.sub("\s+", " ", line).split(" ")
parts = re.sub("\\s+", " ", line).split(" ")
workflow_info = parse_workflow_info(parts, relval_dir)
if "number" in workflow_info:
workflows_with_error.append(workflow_info)
Expand Down Expand Up @@ -231,7 +231,7 @@ def read_matrix_log_file(matrix_log):
#
def cmd_to_addon_test(command, addon_dir):
try:
cmdMatch = re.match("^\[(.+):(\d+)\] +(.*)", command)
cmdMatch = re.match("^\\[(.+):(\\d+)\\] +(.*)", command)
addon_subdir = cmdMatch.group(1)
logfile = "step%s.log" % cmdMatch.group(2)
e, o = run_cmd("ls -d %s/%s/%s 2>/dev/null | tail -1" % (addon_dir, addon_subdir, logfile))
Expand Down
12 changes: 6 additions & 6 deletions runPyRelValThread.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def runThreadMatrix(basedir, workflow, args="", logger=None, wf_err=None):
False,
wfdir,
)
logRE = re.compile("^(.*/[0-9]+(\.[0-9]+|)_([^/]+))/step1_dasquery.log$")
logRE = re.compile("^(.*/[0-9]+(\\.[0-9]+|)_([^/]+))/step1_dasquery.log$")
for logFile in glob.glob(outfolder + "/step1_dasquery.log"):
m = logRE.match(logFile)
if not m:
Expand All @@ -103,7 +103,7 @@ def runThreadMatrix(basedir, workflow, args="", logger=None, wf_err=None):
def find_argv(args, arg):
val = ""
fullval = ""
reX = re.compile("\s*((" + arg + ")(\s+|=)([^ ]+))")
reX = re.compile("\\s*((" + arg + ")(\\s+|=)([^ ]+))")
m = reX.search(args)
if m:
glen = len(m.groups())
Expand Down Expand Up @@ -174,7 +174,7 @@ def getWorkFlows(self, args):
+ self.args["s"]
+ " "
+ self.args["l"]
+ " | grep -v ' workflows with ' | grep -E '^[0-9][0-9]*(\.[0-9][0-9]*|)\s\s*' | sort -nr | awk '{print $1}'"
+ " | grep -v ' workflows with ' | grep -E '^[0-9][0-9]*(\\.[0-9][0-9]*|)\\s\\s*' | sort -nr | awk '{print $1}'"
)
print("RunTheMatrix>>", workflowsCmd)
cmsstat, workflows = doCmd(workflowsCmd)
Expand Down Expand Up @@ -267,7 +267,7 @@ def update_runall(self):
for logFile in glob.glob(self.basedir + "/*/workflow.log"):
inFile = open(logFile)
for line in inFile:
if re.match("^\s*(\d+\s+)+tests passed,\s+(\d+\s+)+failed\s*$", line):
if re.match("^\\s*(\\d+\\s+)+tests passed,\\s+(\\d+\\s+)+failed\\s*$", line):
res = line.strip().split(" tests passed, ")
res[0] = res[0].split()
res[1] = res[1].replace(" failed", "").split()
Expand Down Expand Up @@ -324,7 +324,7 @@ def update_wftime(self):
inFile = open(logFile)
line = inFile.readline().strip()
inFile.close()
m = re.match("^(\d+)(\.\d+|)$", line)
m = re.match("^(\\d+)(\\.\\d+|)$", line)
if m:
time_info[wf] = int(m.group(1))
except Exception as e:
Expand All @@ -335,7 +335,7 @@ def update_wftime(self):

def parseLog(self):
logData = {}
logRE = re.compile("^.*/([1-9][0-9]*(\.[0-9]+|))_[^/]+/step([1-9])_.*\.log$")
logRE = re.compile("^.*/([1-9][0-9]*(\\.[0-9]+|))_[^/]+/step([1-9])_.*\\.log$")
max_steps = 0
for logFile in glob.glob(self.basedir + "/[1-9]*/step[0-9]*.log"):
m = logRE.match(logFile)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_logreaderUtils.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def test_unittestlogs(self):
"control_type": ResultTypeEnum.ISSUE,
},
{
"str_to_match": '===== Test "([^\s]+)" ====',
"str_to_match": '===== Test "([^\\s]+)" ====',
"name": "{0}",
"control_type": ResultTypeEnum.TEST,
},
Expand Down
Loading

0 comments on commit 1d962c6

Please sign in to comment.