mirror of
https://github.com/jerryscript-project/jerryscript.git
synced 2025-12-15 16:29:21 +00:00
Modernize python and update pylint (#5096)
Update code to conform to the newer version of pylint available in
ubuntu-22.04, with few exceptions:
- disabled `import-outside-toplevel` for `main()` in
`jerry_client.py`
- disabled `consider-using-with` for the logfile of `TestSuite` in
`test262-harness.py` as using `with` is not practical in that case
Update test262-harness.py to use argparse instead of the now deprecated
optparse
Rename variables in jerry_client_main.py that redefined python builtins
or shadowed variables from an outer scope
Update python files to use f-stirngs
Add minimum python versions (3.6 and 3.8) to the CI jobs: without it the
default python version did not support the `with` statement for
`subprocess.Popen` used in `build.py` on macos, or in some cases f-stirngs
Remove `from __future__` imports that are no-ops in python 3
Remove shebang from non executable files
Re-enable most pylint checkers, except `missing-docstring`
JerryScript-DCO-1.0-Signed-off-by: Máté Tokodi mate.tokodi@szteszoftver.hu
This commit is contained in:
parent
a588e49661
commit
bc408b159b
70
.github/workflows/gh-actions.yml
vendored
70
.github/workflows/gh-actions.yml
vendored
@ -17,7 +17,8 @@ jobs:
|
||||
python-version: '3.10'
|
||||
- run: sudo apt update
|
||||
# TODO: update checkers to current versions available in ubuntu 22.04
|
||||
# - run: sudo apt install doxygen clang-format-10 cppcheck pylint python-serial
|
||||
# - run: sudo apt install doxygen clang-format-10 cppcheck python-serial
|
||||
- run: sudo apt install pylint
|
||||
- run: $RUNNER --check-signed-off=gh-actions
|
||||
if: ${{ always() }}
|
||||
# - run: $RUNNER --check-doxygen
|
||||
@ -28,8 +29,8 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
# - run: $RUNNER --check-strings
|
||||
# if: ${{ always() }}
|
||||
# - run: $RUNNER --check-pylint
|
||||
# if: ${{ always() }}
|
||||
- run: $RUNNER --check-pylint
|
||||
if: ${{ always() }}
|
||||
# - run: $RUNNER --check-cppcheck
|
||||
# if: ${{ always() }}
|
||||
|
||||
@ -37,7 +38,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: $RUNNER -q --jerry-tests
|
||||
- run: $RUNNER -q --jerry-tests --build-debug
|
||||
- run: $RUNNER -q --jerry-debugger
|
||||
@ -47,6 +50,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install gcc-multilib
|
||||
- run: $RUNNER -q --jerry-tests --buildoptions=--compile-flag=-m32,--cpointer-32bit=on
|
||||
@ -56,6 +62,9 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: $RUNNER -q --jerry-tests
|
||||
- run: $RUNNER -q --unittests
|
||||
|
||||
@ -63,6 +72,9 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: $RUNNER -q --jerry-tests --build-debug
|
||||
- run: $RUNNER -q --unittests --build-debug
|
||||
|
||||
@ -70,6 +82,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install gcc-multilib
|
||||
- run: $RUNNER --buildoption-test
|
||||
@ -78,6 +93,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: $RUNNER --test262 update
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: success() || failure()
|
||||
@ -90,6 +108,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: $RUNNER --test262 update --build-debug --test262-test-list=built-ins,annexB,harness,intl402
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: success() || failure()
|
||||
@ -102,6 +123,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: $RUNNER --test262 update --build-debug --test262-test-list=language
|
||||
- uses: actions/upload-artifact@v2
|
||||
if: success() || failure()
|
||||
@ -114,6 +138,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: $RUNNER -q --unittests
|
||||
- run: $RUNNER -q --unittests --build-debug
|
||||
|
||||
@ -123,6 +150,9 @@ jobs:
|
||||
CC: clang
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install gcc-multilib
|
||||
- run: $RUNNER -q --unittests
|
||||
@ -135,6 +165,9 @@ jobs:
|
||||
ASAN_OPTIONS: detect_stack_use_after_return=1:check_initialization_order=true:strict_init_order=true
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install gcc-multilib
|
||||
- run: >-
|
||||
@ -148,6 +181,9 @@ jobs:
|
||||
ASAN_OPTIONS: detect_stack_use_after_return=1:check_initialization_order=true:strict_init_order=true
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install gcc-multilib
|
||||
- run: >-
|
||||
@ -161,6 +197,9 @@ jobs:
|
||||
UBSAN_OPTIONS: print_stacktrace=1
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install gcc-multilib
|
||||
- run: >-
|
||||
@ -179,6 +218,9 @@ jobs:
|
||||
TIMEOUT: 300
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install gcc-arm-linux-gnueabihf libc6-dev-armhf-cross qemu-user-static
|
||||
- run: >-
|
||||
@ -192,6 +234,9 @@ jobs:
|
||||
TIMEOUT: 300
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install gcc-arm-linux-gnueabihf libc6-dev-armhf-cross qemu-user-static
|
||||
- run: >-
|
||||
@ -205,6 +250,9 @@ jobs:
|
||||
TIMEOUT: 300
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install gcc-aarch64-linux-gnu libc6-dev-armhf-cross qemu-user-static
|
||||
- run: >-
|
||||
@ -218,6 +266,9 @@ jobs:
|
||||
TIMEOUT: 300
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install gcc-aarch64-linux-gnu libc6-dev-armhf-cross qemu-user-static
|
||||
- run: >-
|
||||
@ -242,9 +293,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.x'
|
||||
python-version: '>=3.6'
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install gperf dfu-util device-tree-compiler
|
||||
- run: make -f ./targets/os/zephyr/Makefile.travis install
|
||||
@ -254,6 +305,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '>=3.6'
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install gcc-arm-none-eabi libnewlib-arm-none-eabi gperf
|
||||
- run: make -f ./targets/os/nuttx/Makefile.travis install-noapt
|
||||
@ -276,9 +330,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.8'
|
||||
python-version: '>=3.8'
|
||||
- run: make -f ./targets/baremetal-sdk/espressif/esp8266-rtos-sdk/Makefile.travis install-noapt
|
||||
- run: make -f ./targets/baremetal-sdk/espressif/esp8266-rtos-sdk/Makefile.travis script
|
||||
|
||||
|
||||
@ -14,7 +14,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
from cmd import Cmd
|
||||
from pprint import pprint
|
||||
import math
|
||||
@ -42,7 +41,7 @@ class DebuggerPrompt(Cmd):
|
||||
def precmd(self, line):
|
||||
self.stop = False
|
||||
if self.debugger.non_interactive:
|
||||
print("%s" % line)
|
||||
print(f"{line}")
|
||||
return line
|
||||
|
||||
def postcmd(self, stop, line):
|
||||
@ -104,14 +103,14 @@ class DebuggerPrompt(Cmd):
|
||||
if res_type == result.END:
|
||||
self.quit = True
|
||||
return
|
||||
elif res_type == result.TEXT:
|
||||
if res_type == result.TEXT:
|
||||
write(result.get_text())
|
||||
elif res_type == result.PROMPT:
|
||||
break
|
||||
|
||||
args -= 1
|
||||
except ValueError as val_errno:
|
||||
print("Error: expected a positive integer: %s" % val_errno)
|
||||
print(f"Error: expected a positive integer: {val_errno}")
|
||||
do_n = do_next
|
||||
|
||||
def do_step(self, _):
|
||||
@ -185,10 +184,10 @@ class DebuggerPrompt(Cmd):
|
||||
code = args[1]
|
||||
|
||||
if index < 0 or index > 65535:
|
||||
raise ValueError("Invalid scope chain index: %d (must be between 0 and 65535)" % index)
|
||||
raise ValueError(f"Invalid scope chain index: {index} (must be between 0 and 65535)")
|
||||
|
||||
except ValueError as val_errno:
|
||||
print("Error: %s" % (val_errno))
|
||||
print(f"Error: {val_errno}")
|
||||
return
|
||||
|
||||
self.debugger.eval_at(code, index)
|
||||
@ -259,10 +258,10 @@ def src_check_args(args):
|
||||
|
||||
return line_num
|
||||
except ValueError as val_errno:
|
||||
print("Error: Non-negative integer number expected: %s" % (val_errno))
|
||||
print(f"Error: Non-negative integer number expected: {val_errno}")
|
||||
return -1
|
||||
|
||||
# pylint: disable=too-many-branches,too-many-locals,too-many-statements
|
||||
# pylint: disable=too-many-branches,too-many-locals,too-many-statements,import-outside-toplevel
|
||||
def main():
|
||||
args = jerry_client_main.arguments_parse()
|
||||
|
||||
@ -325,7 +324,7 @@ def main():
|
||||
|
||||
if res_type == result.END:
|
||||
break
|
||||
elif res_type == result.PROMPT:
|
||||
if res_type == result.PROMPT:
|
||||
prompt.cmdloop()
|
||||
elif res_type == result.TEXT:
|
||||
write(result.get_text())
|
||||
@ -339,7 +338,7 @@ if __name__ == "__main__":
|
||||
MSG = str(error_msg)
|
||||
if ERRNO == 111:
|
||||
sys.exit("Failed to connect to the JerryScript debugger.")
|
||||
elif ERRNO == 32 or ERRNO == 104:
|
||||
elif ERRNO in (32, 104):
|
||||
sys.exit("Connection closed.")
|
||||
else:
|
||||
sys.exit("Failed to connect to the JerryScript debugger.\nError: %s" % (MSG))
|
||||
sys.exit(f"Failed to connect to the JerryScript debugger.\nError: {MSG}")
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright JS Foundation and other contributors, http://js.foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -14,7 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
import logging
|
||||
import re
|
||||
@ -146,7 +143,7 @@ def arguments_parse():
|
||||
return args
|
||||
|
||||
|
||||
class JerryBreakpoint(object):
|
||||
class JerryBreakpoint:
|
||||
|
||||
def __init__(self, line, offset, function):
|
||||
self.line = line
|
||||
@ -156,19 +153,18 @@ class JerryBreakpoint(object):
|
||||
|
||||
def __str__(self):
|
||||
result = self.function.source_name or "<unknown>"
|
||||
result += ":%d" % (self.line)
|
||||
result += f":{self.line}"
|
||||
|
||||
if self.function.is_func:
|
||||
result += " (in "
|
||||
result += self.function.name or "function"
|
||||
result += "() at line:%d, col:%d)" % (self.function.line, self.function.column)
|
||||
result += f"() at line:{self.function.line}, col:{self.function.column})"
|
||||
return result
|
||||
|
||||
def __repr__(self):
|
||||
return ("Breakpoint(line:%d, offset:%d, active_index:%d)"
|
||||
% (self.line, self.offset, self.active_index))
|
||||
return f"Breakpoint(line:{self.line}, offset:{self.offset}, active_index:{self.active_index})"
|
||||
|
||||
class JerryPendingBreakpoint(object):
|
||||
class JerryPendingBreakpoint:
|
||||
def __init__(self, line=None, source_name=None, function=None):
|
||||
self.function = function
|
||||
self.line = line
|
||||
@ -179,13 +175,13 @@ class JerryPendingBreakpoint(object):
|
||||
def __str__(self):
|
||||
result = self.source_name
|
||||
if self.line:
|
||||
result += ":%d" % (self.line)
|
||||
result += f":{self.line}"
|
||||
else:
|
||||
result += "%s()" % (self.function)
|
||||
result += f"{self.function}()"
|
||||
return result
|
||||
|
||||
|
||||
class JerryFunction(object):
|
||||
class JerryFunction:
|
||||
# pylint: disable=too-many-instance-attributes,too-many-arguments
|
||||
def __init__(self, is_func, byte_code_cp, source, source_name, line, column, name, lines, offsets):
|
||||
self.is_func = bool(is_func)
|
||||
@ -205,20 +201,20 @@ class JerryFunction(object):
|
||||
|
||||
for i, _line in enumerate(lines):
|
||||
offset = offsets[i]
|
||||
breakpoint = JerryBreakpoint(_line, offset, self)
|
||||
self.lines[_line] = breakpoint
|
||||
self.offsets[offset] = breakpoint
|
||||
breakpt = JerryBreakpoint(_line, offset, self)
|
||||
self.lines[_line] = breakpt
|
||||
self.offsets[offset] = breakpt
|
||||
|
||||
def __repr__(self):
|
||||
result = ("Function(byte_code_cp:0x%x, source_name:%r, name:%r, line:%d, column:%d { "
|
||||
% (self.byte_code_cp, self.source_name, self.name, self.line, self.column))
|
||||
result = f"Function(byte_code_cp:0x{self.byte_code_cp:x}, source_name:{self.source_name!r}, "\
|
||||
f"name:{self.name!r}, line:{self.line}, column:{self.column} {{ "
|
||||
|
||||
result += ','.join([str(breakpoint) for breakpoint in self.lines.values()])
|
||||
|
||||
return result + " })"
|
||||
|
||||
|
||||
class Multimap(object):
|
||||
class Multimap:
|
||||
|
||||
def __init__(self):
|
||||
self.map = {}
|
||||
@ -243,10 +239,10 @@ class Multimap(object):
|
||||
del items[items.index(value)]
|
||||
|
||||
def __repr__(self):
|
||||
return "Multimap(%r)" % (self.map)
|
||||
return f"Multimap({self.map!r})"
|
||||
|
||||
|
||||
class DebuggerAction(object):
|
||||
class DebuggerAction:
|
||||
END = 0
|
||||
WAIT = 1
|
||||
TEXT = 2
|
||||
@ -263,7 +259,7 @@ class DebuggerAction(object):
|
||||
return self.action_text
|
||||
|
||||
|
||||
class JerryDebugger(object):
|
||||
class JerryDebugger:
|
||||
# pylint: disable=too-many-instance-attributes,too-many-statements,too-many-public-methods,no-self-use
|
||||
def __init__(self, channel):
|
||||
self.prompt = False
|
||||
@ -327,8 +323,8 @@ class JerryDebugger(object):
|
||||
|
||||
self.version = struct.unpack(self.byte_order + self.idx_format, result[2:6])[0]
|
||||
if self.version != JERRY_DEBUGGER_VERSION:
|
||||
raise Exception("Incorrect debugger version from target: %d expected: %d" %
|
||||
(self.version, JERRY_DEBUGGER_VERSION))
|
||||
raise Exception(f"Incorrect debugger version from target: {self.version} "
|
||||
f"expected: {JERRY_DEBUGGER_VERSION}")
|
||||
|
||||
logging.debug("Compressed pointer size: %d", self.cp_size)
|
||||
|
||||
@ -370,20 +366,20 @@ class JerryDebugger(object):
|
||||
return self._set_breakpoint(args, False)
|
||||
|
||||
except ValueError as val_errno:
|
||||
return "Error: Positive breakpoint index expected: %s" % (val_errno)
|
||||
return f"Error: Positive breakpoint index expected: {val_errno}"
|
||||
|
||||
return self._set_breakpoint(args, False)
|
||||
|
||||
def breakpoint_list(self):
|
||||
result = ''
|
||||
if self.active_breakpoint_list:
|
||||
result += "=== %sActive breakpoints %s ===\n" % (self.green_bg, self.nocolor)
|
||||
for breakpoint in self.active_breakpoint_list.values():
|
||||
result += " %d: %s\n" % (breakpoint.active_index, breakpoint)
|
||||
result += f"=== {self.green_bg}Active breakpoints {self.nocolor} ===\n"
|
||||
for breakpt in self.active_breakpoint_list.values():
|
||||
result += f" {breakpt.active_index}: {breakpt}\n"
|
||||
if self.pending_breakpoint_list:
|
||||
result += "=== %sPending breakpoints%s ===\n" % (self.yellow_bg, self.nocolor)
|
||||
for breakpoint in self.pending_breakpoint_list.values():
|
||||
result += " %d: %s (pending)\n" % (breakpoint.index, breakpoint)
|
||||
result += f"=== {self.yellow_bg}Pending breakpoints{self.nocolor} ===\n"
|
||||
for breakpt in self.pending_breakpoint_list.values():
|
||||
result += f" {breakpt.index}: {breakpt} (pending)\n"
|
||||
|
||||
if not self.active_breakpoint_list and not self.pending_breakpoint_list:
|
||||
result += "No breakpoints\n"
|
||||
@ -395,13 +391,13 @@ class JerryDebugger(object):
|
||||
return "Error: Breakpoint index expected\n" \
|
||||
"Delete the given breakpoint, use 'delete all|active|pending' " \
|
||||
"to clear all the given breakpoints\n "
|
||||
elif args in ['all', 'pending', 'active']:
|
||||
if args in ['all', 'pending', 'active']:
|
||||
if args != "pending":
|
||||
for i in list(self.active_breakpoint_list.values()):
|
||||
breakpoint = self.active_breakpoint_list[i.active_index]
|
||||
breakpt = self.active_breakpoint_list[i.active_index]
|
||||
del self.active_breakpoint_list[i.active_index]
|
||||
breakpoint.active_index = -1
|
||||
self._send_breakpoint(breakpoint)
|
||||
breakpt.active_index = -1
|
||||
self._send_breakpoint(breakpt)
|
||||
|
||||
if args != "active":
|
||||
if self.pending_breakpoint_list:
|
||||
@ -412,21 +408,20 @@ class JerryDebugger(object):
|
||||
try:
|
||||
breakpoint_index = int(args)
|
||||
except ValueError as val_errno:
|
||||
return "Error: Integer number expected, %s\n" % (val_errno)
|
||||
return f"Error: Integer number expected, {val_errno}\n"
|
||||
|
||||
if breakpoint_index in self.active_breakpoint_list:
|
||||
breakpoint = self.active_breakpoint_list[breakpoint_index]
|
||||
breakpt = self.active_breakpoint_list[breakpoint_index]
|
||||
del self.active_breakpoint_list[breakpoint_index]
|
||||
breakpoint.active_index = -1
|
||||
self._send_breakpoint(breakpoint)
|
||||
return "Breakpoint %d deleted\n" % (breakpoint_index)
|
||||
elif breakpoint_index in self.pending_breakpoint_list:
|
||||
breakpt.active_index = -1
|
||||
self._send_breakpoint(breakpt)
|
||||
return f"Breakpoint {breakpoint_index} deleted\n"
|
||||
if breakpoint_index in self.pending_breakpoint_list:
|
||||
del self.pending_breakpoint_list[breakpoint_index]
|
||||
if not self.pending_breakpoint_list:
|
||||
self._send_parser_config(0)
|
||||
return "Pending breakpoint %d deleted\n" % (breakpoint_index)
|
||||
else:
|
||||
return "Error: Breakpoint %d not found\n" % (breakpoint_index)
|
||||
return f"Pending breakpoint {breakpoint_index} deleted\n"
|
||||
return f"Error: Breakpoint {breakpoint_index} not found\n"
|
||||
|
||||
def next(self):
|
||||
self.prompt = False
|
||||
@ -469,7 +464,7 @@ class JerryDebugger(object):
|
||||
return "Error: Positive integer number expected\n"
|
||||
|
||||
except ValueError as val_errno:
|
||||
return "Error: Positive integer number expected, %s\n" % (val_errno)
|
||||
return f"Error: Positive integer number expected, {val_errno}\n"
|
||||
|
||||
self.frame_index = min_depth
|
||||
|
||||
@ -541,7 +536,7 @@ class JerryDebugger(object):
|
||||
return ""
|
||||
|
||||
except ValueError as val_errno:
|
||||
return "Error: Non negative integer number expected, %s\n" % (val_errno)
|
||||
return f"Error: Non negative integer number expected, {val_errno}\n"
|
||||
|
||||
message = struct.pack(self.byte_order + "BB" + self.idx_format,
|
||||
1 + 4,
|
||||
@ -605,22 +600,22 @@ class JerryDebugger(object):
|
||||
|
||||
self.channel.send_message(self.byte_order, message + args[prev_offset:offset])
|
||||
|
||||
def _breakpoint_pending_exists(self, breakpoint):
|
||||
def _breakpoint_pending_exists(self, breakpt):
|
||||
for existing_bp in self.pending_breakpoint_list.values():
|
||||
if (breakpoint.line and existing_bp.source_name == breakpoint.source_name and \
|
||||
existing_bp.line == breakpoint.line) \
|
||||
or (not breakpoint.line and existing_bp.function == breakpoint.function):
|
||||
if (breakpt.line and existing_bp.source_name == breakpt.source_name and \
|
||||
existing_bp.line == breakpt.line) \
|
||||
or (not breakpt.line and existing_bp.function == breakpt.function):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _send_breakpoint(self, breakpoint):
|
||||
def _send_breakpoint(self, breakpt):
|
||||
message = struct.pack(self.byte_order + "BBB" + self.cp_format + self.idx_format,
|
||||
1 + 1 + self.cp_size + 4,
|
||||
JERRY_DEBUGGER_UPDATE_BREAKPOINT,
|
||||
int(breakpoint.active_index >= 0),
|
||||
breakpoint.function.byte_code_cp,
|
||||
breakpoint.offset)
|
||||
int(breakpt.active_index >= 0),
|
||||
breakpt.function.byte_code_cp,
|
||||
breakpt.offset)
|
||||
self.channel.send_message(self.byte_order, message)
|
||||
|
||||
def _send_bytecode_cp(self, byte_code_cp):
|
||||
@ -658,7 +653,7 @@ class JerryDebugger(object):
|
||||
sys.exit("Error: Javascript file expected!")
|
||||
return
|
||||
|
||||
with open(path, 'r') as src_file:
|
||||
with open(path, 'r', encoding='utf8') as src_file:
|
||||
content = path + "\0" + src_file.read()
|
||||
self._send_string(content, JERRY_DEBUGGER_CLIENT_SOURCE)
|
||||
|
||||
@ -710,24 +705,24 @@ class JerryDebugger(object):
|
||||
elif buffer_type in [JERRY_DEBUGGER_BREAKPOINT_HIT, JERRY_DEBUGGER_EXCEPTION_HIT]:
|
||||
breakpoint_data = struct.unpack(self.byte_order + self.cp_format + self.idx_format, data[1:])
|
||||
|
||||
breakpoint = self._get_breakpoint(breakpoint_data)
|
||||
self.last_breakpoint_hit = breakpoint[0]
|
||||
breakpt = self._get_breakpoint(breakpoint_data)
|
||||
self.last_breakpoint_hit = breakpt[0]
|
||||
|
||||
if buffer_type == JERRY_DEBUGGER_EXCEPTION_HIT:
|
||||
result += "Exception throw detected (to disable automatic stop type exception 0)\n"
|
||||
if self.exception_string:
|
||||
result += "Exception hint: %s\n" % (self.exception_string)
|
||||
result += f"Exception hint: {self.exception_string}\n"
|
||||
self.exception_string = ""
|
||||
|
||||
if breakpoint[1]:
|
||||
if breakpt[1]:
|
||||
breakpoint_info = "at"
|
||||
else:
|
||||
breakpoint_info = "around"
|
||||
|
||||
if breakpoint[0].active_index >= 0:
|
||||
breakpoint_info += " breakpoint:%s%d%s" % (self.red, breakpoint[0].active_index, self.nocolor)
|
||||
if breakpt[0].active_index >= 0:
|
||||
breakpoint_info += f" breakpoint:{self.red}{breakpt[0].active_index}{self.nocolor}"
|
||||
|
||||
result += "Stopped %s %s\n" % (breakpoint_info, breakpoint[0])
|
||||
result += f"Stopped {breakpoint_info} {breakpt[0]}\n"
|
||||
|
||||
if self.display > 0:
|
||||
result += self.print_source(self.display, self.src_offset)
|
||||
@ -740,7 +735,7 @@ class JerryDebugger(object):
|
||||
|
||||
elif buffer_type == JERRY_DEBUGGER_BACKTRACE_TOTAL:
|
||||
total = struct.unpack(self.byte_order + self.idx_format, data[1:])[0]
|
||||
result += "Total number of frames: %d\n" % (total)
|
||||
result += f"Total number of frames: {total}\n"
|
||||
return DebuggerAction(DebuggerAction.TEXT, result)
|
||||
|
||||
elif buffer_type in [JERRY_DEBUGGER_BACKTRACE, JERRY_DEBUGGER_BACKTRACE_END]:
|
||||
@ -751,9 +746,9 @@ class JerryDebugger(object):
|
||||
breakpoint_data = struct.unpack(self.byte_order + self.cp_format + self.idx_format,
|
||||
data[buffer_pos: buffer_pos + self.cp_size + 4])
|
||||
|
||||
breakpoint = self._get_breakpoint(breakpoint_data)
|
||||
breakpt = self._get_breakpoint(breakpoint_data)
|
||||
|
||||
result += "Frame %d: %s\n" % (frame_index, breakpoint[0])
|
||||
result += f"Frame {frame_index}: {breakpt[0]}\n"
|
||||
|
||||
frame_index += 1
|
||||
buffer_pos += self.cp_size + 4
|
||||
@ -779,11 +774,11 @@ class JerryDebugger(object):
|
||||
memory_stats = struct.unpack(self.byte_order + self.idx_format * 5,
|
||||
data[1: 1 + 4 * 5])
|
||||
|
||||
result += "Allocated bytes: %s\n" % memory_stats[0]
|
||||
result += "Byte code bytes: %s\n" % memory_stats[1]
|
||||
result += "String bytes: %s\n" % memory_stats[2]
|
||||
result += "Object bytes: %s\n" % memory_stats[3]
|
||||
result += "Property bytes: %s\n" % memory_stats[4]
|
||||
result += f"Allocated bytes: {memory_stats[0]}\n"
|
||||
result += f"Byte code bytes: {memory_stats[1]}\n"
|
||||
result += f"String bytes: {memory_stats[2]}\n"
|
||||
result += f"Object bytes: {memory_stats[3]}\n"
|
||||
result += f"Property bytes: {memory_stats[4]}\n"
|
||||
|
||||
self.prompt = True
|
||||
return DebuggerAction(DebuggerAction.TEXT, result)
|
||||
@ -828,7 +823,7 @@ class JerryDebugger(object):
|
||||
|
||||
lines = last_bp.function.source
|
||||
if last_bp.function.source_name:
|
||||
msg += "Source: %s\n" % (last_bp.function.source_name)
|
||||
msg += f"Source: {last_bp.function.source_name}\n"
|
||||
|
||||
if line_num == 0:
|
||||
start = 0
|
||||
@ -849,10 +844,9 @@ class JerryDebugger(object):
|
||||
|
||||
for i in range(start, end):
|
||||
if i == last_bp.line - 1:
|
||||
msg += "%s%4d%s %s>%s %s\n" % (self.green, i + 1, self.nocolor, self.red, \
|
||||
self.nocolor, lines[i])
|
||||
msg += f"{self.green}{i + 1:>4}{self.nocolor} {self.red}>{self.nocolor} {lines[i]}\n"
|
||||
else:
|
||||
msg += "%s%4d%s %s\n" % (self.green, i + 1, self.nocolor, lines[i])
|
||||
msg += f"{self.green}{i + 1:>4}{self.nocolor} {lines[i]}\n"
|
||||
|
||||
return msg
|
||||
|
||||
@ -883,7 +877,7 @@ class JerryDebugger(object):
|
||||
logging.error("Syntax error found")
|
||||
return ""
|
||||
|
||||
elif buffer_type in [JERRY_DEBUGGER_SOURCE_CODE, JERRY_DEBUGGER_SOURCE_CODE_END]:
|
||||
if buffer_type in [JERRY_DEBUGGER_SOURCE_CODE, JERRY_DEBUGGER_SOURCE_CODE_END]:
|
||||
source_code += data[1:]
|
||||
|
||||
elif buffer_type in [JERRY_DEBUGGER_SOURCE_CODE_NAME, JERRY_DEBUGGER_SOURCE_CODE_NAME_END]:
|
||||
@ -977,33 +971,33 @@ class JerryDebugger(object):
|
||||
self.function_list.update(new_function_list)
|
||||
|
||||
for function in new_function_list.values():
|
||||
for line, breakpoint in function.lines.items():
|
||||
self.line_list.insert(line, breakpoint)
|
||||
for line, breakpt in function.lines.items():
|
||||
self.line_list.insert(line, breakpt)
|
||||
|
||||
# Try to set the pending breakpoints
|
||||
if self.pending_breakpoint_list:
|
||||
logging.debug("Pending breakpoints available")
|
||||
bp_list = self.pending_breakpoint_list
|
||||
|
||||
for breakpoint_index, breakpoint in list(bp_list.items()):
|
||||
for breakpoint_index, breakpt in list(bp_list.items()):
|
||||
source_lines = 0
|
||||
for src in new_function_list.values():
|
||||
if (src.source_name == breakpoint.source_name or
|
||||
src.source_name.endswith("/" + breakpoint.source_name) or
|
||||
src.source_name.endswith("\\" + breakpoint.source_name)):
|
||||
if (src.source_name == breakpt.source_name or
|
||||
src.source_name.endswith("/" + breakpt.source_name) or
|
||||
src.source_name.endswith("\\" + breakpt.source_name)):
|
||||
source_lines = len(src.source)
|
||||
break
|
||||
|
||||
if breakpoint.line:
|
||||
if breakpoint.line <= source_lines:
|
||||
command = breakpoint.source_name + ":" + str(breakpoint.line)
|
||||
if breakpt.line:
|
||||
if breakpt.line <= source_lines:
|
||||
command = breakpt.source_name + ":" + str(breakpt.line)
|
||||
set_result = self._set_breakpoint(command, True)
|
||||
|
||||
if set_result:
|
||||
result += set_result
|
||||
del bp_list[breakpoint_index]
|
||||
elif breakpoint.function:
|
||||
command = breakpoint.function
|
||||
elif breakpt.function:
|
||||
command = breakpt.function
|
||||
set_result = self._set_breakpoint(command, True)
|
||||
|
||||
if set_result:
|
||||
@ -1024,39 +1018,33 @@ class JerryDebugger(object):
|
||||
|
||||
function = self.function_list[byte_code_cp]
|
||||
|
||||
for line, breakpoint in function.lines.items():
|
||||
self.line_list.delete(line, breakpoint)
|
||||
if breakpoint.active_index >= 0:
|
||||
del self.active_breakpoint_list[breakpoint.active_index]
|
||||
for line, breakpt in function.lines.items():
|
||||
self.line_list.delete(line, breakpt)
|
||||
if breakpt.active_index >= 0:
|
||||
del self.active_breakpoint_list[breakpt.active_index]
|
||||
|
||||
del self.function_list[byte_code_cp]
|
||||
self._send_bytecode_cp(byte_code_cp)
|
||||
logging.debug("Function {0x%x} byte-code released", byte_code_cp)
|
||||
|
||||
|
||||
def _enable_breakpoint(self, breakpoint):
|
||||
if isinstance(breakpoint, JerryPendingBreakpoint):
|
||||
if self._breakpoint_pending_exists(breakpoint):
|
||||
return "%sPending breakpoint%s already exists\n" % (self.yellow, self.nocolor)
|
||||
def _enable_breakpoint(self, breakpt):
|
||||
if isinstance(breakpt, JerryPendingBreakpoint):
|
||||
if self._breakpoint_pending_exists(breakpt):
|
||||
return f"{self.yellow}Pending breakpoint{self.nocolor} already exists\n"
|
||||
|
||||
self.next_breakpoint_index += 1
|
||||
breakpoint.index = self.next_breakpoint_index
|
||||
self.pending_breakpoint_list[self.next_breakpoint_index] = breakpoint
|
||||
return ("%sPending breakpoint %d%s at %s\n" % (self.yellow,
|
||||
breakpoint.index,
|
||||
self.nocolor,
|
||||
breakpoint))
|
||||
breakpt.index = self.next_breakpoint_index
|
||||
self.pending_breakpoint_list[self.next_breakpoint_index] = breakpt
|
||||
return f"{self.yellow}Pending breakpoint {breakpt.index}{self.nocolor} at {breakpt}\n"
|
||||
|
||||
if breakpoint.active_index < 0:
|
||||
if breakpt.active_index < 0:
|
||||
self.next_breakpoint_index += 1
|
||||
self.active_breakpoint_list[self.next_breakpoint_index] = breakpoint
|
||||
breakpoint.active_index = self.next_breakpoint_index
|
||||
self._send_breakpoint(breakpoint)
|
||||
self.active_breakpoint_list[self.next_breakpoint_index] = breakpt
|
||||
breakpt.active_index = self.next_breakpoint_index
|
||||
self._send_breakpoint(breakpt)
|
||||
|
||||
return "%sBreakpoint %d%s at %s\n" % (self.green,
|
||||
breakpoint.active_index,
|
||||
self.nocolor,
|
||||
breakpoint)
|
||||
return f"{self.green}Breakpoint {breakpt.active_index}{self.nocolor} at {breakpt}\n"
|
||||
|
||||
|
||||
def _set_breakpoint(self, string, pending):
|
||||
@ -1067,13 +1055,13 @@ class JerryDebugger(object):
|
||||
source_name = line.group(1)
|
||||
new_line = int(line.group(2))
|
||||
|
||||
for breakpoint in self.line_list.get(new_line):
|
||||
func_source = breakpoint.function.source_name
|
||||
for breakpt in self.line_list.get(new_line):
|
||||
func_source = breakpt.function.source_name
|
||||
if (source_name == func_source or
|
||||
func_source.endswith("/" + source_name) or
|
||||
func_source.endswith("\\" + source_name)):
|
||||
|
||||
result += self._enable_breakpoint(breakpoint)
|
||||
result += self._enable_breakpoint(breakpt)
|
||||
|
||||
else:
|
||||
functions_to_enable = []
|
||||
@ -1087,8 +1075,8 @@ class JerryDebugger(object):
|
||||
result += self._enable_breakpoint(function.lines[function.first_breakpoint_line])
|
||||
|
||||
if not result and not pending:
|
||||
print("No breakpoint found, do you want to add a %spending breakpoint%s? (y or [n]) " % \
|
||||
(self.yellow, self.nocolor), end='')
|
||||
print(f"No breakpoint found, do you want to add a "
|
||||
f"{self.yellow}pending breakpoint{self.nocolor}? (y or [n]) ", end='')
|
||||
|
||||
ans = sys.stdin.readline()
|
||||
if ans in ['yes\n', 'y\n']:
|
||||
@ -1096,10 +1084,10 @@ class JerryDebugger(object):
|
||||
self._send_parser_config(1)
|
||||
|
||||
if line:
|
||||
breakpoint = JerryPendingBreakpoint(int(line.group(2)), line.group(1))
|
||||
breakpt = JerryPendingBreakpoint(int(line.group(2)), line.group(1))
|
||||
else:
|
||||
breakpoint = JerryPendingBreakpoint(function=string)
|
||||
result += self._enable_breakpoint(breakpoint)
|
||||
breakpt = JerryPendingBreakpoint(function=string)
|
||||
result += self._enable_breakpoint(breakpt)
|
||||
|
||||
return result
|
||||
|
||||
@ -1117,7 +1105,7 @@ class JerryDebugger(object):
|
||||
nearest_offset = -1
|
||||
|
||||
for current_offset in function.offsets:
|
||||
if current_offset <= offset and current_offset > nearest_offset:
|
||||
if offset >= current_offset > nearest_offset:
|
||||
nearest_offset = current_offset
|
||||
|
||||
return (function.offsets[nearest_offset], False)
|
||||
@ -1131,8 +1119,7 @@ class JerryDebugger(object):
|
||||
subtype = data[-1]
|
||||
message += data[1:-1].decode('utf8')
|
||||
break
|
||||
else:
|
||||
message += data[1:].decode('utf8')
|
||||
message += data[1:].decode('utf8')
|
||||
|
||||
data = self.channel.get_message(True)
|
||||
buffer_type = data[0]
|
||||
@ -1149,14 +1136,14 @@ class JerryDebugger(object):
|
||||
lines = message.split("\n")
|
||||
self.current_out = lines.pop()
|
||||
|
||||
return "".join(["%s\n" % line for line in lines])
|
||||
return "".join([f"{line}\n" for line in lines])
|
||||
|
||||
if subtype == JERRY_DEBUGGER_OUTPUT_LOG:
|
||||
message = self.current_log + message
|
||||
lines = message.split("\n")
|
||||
self.current_log = lines.pop()
|
||||
|
||||
return "".join(["%s\n" % line for line in lines])
|
||||
return "".join([f"{line}\n" for line in lines])
|
||||
|
||||
# Subtypes of eval
|
||||
self.prompt = True
|
||||
@ -1165,7 +1152,7 @@ class JerryDebugger(object):
|
||||
message += "\n"
|
||||
|
||||
if subtype == JERRY_DEBUGGER_EVAL_ERROR:
|
||||
return "Uncaught exception: %s" % (message)
|
||||
return f"Uncaught exception: {message}"
|
||||
return message
|
||||
|
||||
def _process_scope_variables(self):
|
||||
@ -1239,7 +1226,6 @@ class JerryDebugger(object):
|
||||
result = ""
|
||||
col_width = [max(len(x) for x in col) for col in zip(*table)]
|
||||
for line in table:
|
||||
result += " | ".join("{:{}}".format(x, col_width[i])
|
||||
for i, x in enumerate(line)) + " \n"
|
||||
result += " | ".join(f"{x:<{col_width[i]}}" for i, x in enumerate(line)) + " \n"
|
||||
|
||||
return result
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright JS Foundation and other contributors, http://js.foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -18,7 +16,7 @@ import struct
|
||||
|
||||
MAX_BUFFER_SIZE = 256
|
||||
|
||||
class RawPacket(object):
|
||||
class RawPacket:
|
||||
""" Simplified transmission layer. """
|
||||
def __init__(self, protocol):
|
||||
self.protocol = protocol
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright JS Foundation and other contributors, http://js.foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -17,7 +15,7 @@
|
||||
import select
|
||||
import serial
|
||||
|
||||
class Serial(object):
|
||||
class Serial:
|
||||
""" Create a new socket using the given address family, socket type and protocol number. """
|
||||
def __init__(self, serial_config):
|
||||
config = serial_config.split(',')
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright JS Foundation and other contributors, http://js.foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -18,7 +16,7 @@ import socket
|
||||
import select
|
||||
|
||||
# pylint: disable=too-many-arguments,superfluous-parens
|
||||
class Socket(object):
|
||||
class Socket:
|
||||
""" Create a new socket using the given address family, socket type and protocol number. """
|
||||
def __init__(self, address, socket_family=socket.AF_INET, socket_type=socket.SOCK_STREAM, proto=0, fileno=None):
|
||||
self.address = address
|
||||
@ -29,7 +27,7 @@ class Socket(object):
|
||||
Connect to a remote socket at address (host, port).
|
||||
The format of address depends on the address family.
|
||||
"""
|
||||
print("Connecting to: %s:%s" % (self.address[0], self.address[1]))
|
||||
print(f"Connecting to: {self.address[0]}:{self.address[1]}")
|
||||
self.socket.connect(self.address)
|
||||
|
||||
def close(self):
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright JS Foundation and other contributors, http://js.foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -20,7 +18,7 @@ MAX_BUFFER_SIZE = 128
|
||||
WEBSOCKET_BINARY_FRAME = 2
|
||||
WEBSOCKET_FIN_BIT = 0x80
|
||||
|
||||
class WebSocket(object):
|
||||
class WebSocket:
|
||||
def __init__(self, protocol):
|
||||
|
||||
self.data_buffer = b""
|
||||
|
||||
@ -14,8 +14,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import fnmatch
|
||||
import json
|
||||
@ -30,7 +28,7 @@ JERRY_PORT = os.path.join(ROOT_DIR, 'jerry-port')
|
||||
JERRY_MATH = os.path.join(ROOT_DIR, 'jerry-math')
|
||||
|
||||
|
||||
class Amalgamator(object):
|
||||
class Amalgamator:
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
|
||||
_RE_INCLUDE = re.compile(r'\s*#include ("|<)(.*?)("|>)\n$')
|
||||
@ -70,7 +68,7 @@ class Amalgamator(object):
|
||||
return
|
||||
|
||||
normalized_path = repr(os.path.normpath(filename))[1:-1]
|
||||
line_info = '#line %d "%s"\n' % (line_number, normalized_path)
|
||||
line_info = f'#line {line_number} "{normalized_path}"\n'
|
||||
|
||||
if self._output and self._output[-1].startswith('#line'):
|
||||
# Avoid emitting multiple line infos in sequence, just overwrite the last one
|
||||
@ -92,7 +90,7 @@ class Amalgamator(object):
|
||||
self._emit_lineinfo(1, filename)
|
||||
|
||||
line_idx = 0
|
||||
with open(filename, 'r') as input_file:
|
||||
with open(filename, 'r', encoding='utf8') as input_file:
|
||||
in_copyright = False
|
||||
for line in input_file:
|
||||
line_idx += 1
|
||||
@ -166,7 +164,7 @@ class Amalgamator(object):
|
||||
out_fp.write(line)
|
||||
|
||||
for include in self._extra_includes:
|
||||
out_fp.write('#include "%s"\n' % include)
|
||||
out_fp.write(f'#include "{include}"\n')
|
||||
|
||||
for line in self._output:
|
||||
out_fp.write(line)
|
||||
@ -242,7 +240,7 @@ def amalgamate(base_dir, input_files=(), output_file=None,
|
||||
for fname in sorted(c_files.values(), reverse=True):
|
||||
amalgam.add_file(fname)
|
||||
|
||||
with open(output_file, 'w') as output:
|
||||
with open(output_file, 'w', encoding='utf8') as output:
|
||||
amalgam.write_output(output)
|
||||
|
||||
|
||||
|
||||
@ -14,8 +14,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import multiprocessing
|
||||
import os
|
||||
@ -32,7 +30,7 @@ def default_toolchain():
|
||||
(sysname, _, _, _, machine) = os.uname()
|
||||
toolchain = os.path.join(settings.PROJECT_DIR,
|
||||
'cmake',
|
||||
'toolchain_%s_%s.cmake' % (sysname.lower(), machine.lower()))
|
||||
f'toolchain_{sysname.lower()}_{machine.lower()}.cmake')
|
||||
return toolchain if os.path.isfile(toolchain) else None
|
||||
|
||||
def get_arguments():
|
||||
@ -179,7 +177,7 @@ def generate_build_options(arguments):
|
||||
|
||||
def build_options_append(cmakeopt, cliarg):
|
||||
if cliarg:
|
||||
build_options.append('-D%s=%s' % (cmakeopt, cliarg))
|
||||
build_options.append(f'-D{cmakeopt}={cliarg}')
|
||||
|
||||
# general build options
|
||||
build_options_append('CMAKE_BUILD_TYPE', arguments.build_type)
|
||||
@ -231,7 +229,7 @@ def generate_build_options(arguments):
|
||||
build_options_append('JERRY_VM_THROW', arguments.vm_throw)
|
||||
|
||||
if arguments.gc_mark_limit is not None:
|
||||
build_options.append('-D%s=%s' % ('JERRY_GC_MARK_LIMIT', arguments.gc_mark_limit))
|
||||
build_options.append(f'-DJERRY_GC_MARK_LIMIT={arguments.gc_mark_limit}')
|
||||
|
||||
# jerry-main options
|
||||
build_options_append('ENABLE_LINK_MAP', arguments.link_map)
|
||||
@ -261,7 +259,7 @@ def configure_jerry(arguments):
|
||||
cmake_cmd = ['cmake', '-B' + arguments.builddir, '-H' + settings.PROJECT_DIR]
|
||||
|
||||
if arguments.install:
|
||||
cmake_cmd.append('-DCMAKE_INSTALL_PREFIX=%s' % arguments.install)
|
||||
cmake_cmd.append(f'-DCMAKE_INSTALL_PREFIX={arguments.install}')
|
||||
|
||||
cmake_cmd.extend(build_options)
|
||||
|
||||
@ -271,11 +269,10 @@ def make_jerry(arguments):
|
||||
make_cmd = ['cmake', '--build', arguments.builddir, '--config', arguments.build_type]
|
||||
env = dict(os.environ)
|
||||
env['CMAKE_BUILD_PARALLEL_LEVEL'] = str(arguments.jobs)
|
||||
env['MAKEFLAGS'] = '-j%d' % (arguments.jobs) # Workaround for CMake < 3.12
|
||||
proc = subprocess.Popen(make_cmd, env=env)
|
||||
proc.wait()
|
||||
|
||||
return proc.returncode
|
||||
env['MAKEFLAGS'] = f'-j{arguments.jobs}' # Workaround for CMake < 3.12
|
||||
with subprocess.Popen(make_cmd, env=env) as proc:
|
||||
proc.wait()
|
||||
return proc.returncode
|
||||
|
||||
def install_jerry(arguments):
|
||||
install_target = 'INSTALL' if os.path.exists(os.path.join(arguments.builddir, 'Jerry.sln')) else 'install'
|
||||
@ -285,7 +282,7 @@ def install_jerry(arguments):
|
||||
def print_result(ret):
|
||||
print('=' * 30)
|
||||
if ret:
|
||||
print('Build failed with exit code: %s' % (ret))
|
||||
print(f'Build failed with exit code: {ret}')
|
||||
else:
|
||||
print('Build succeeded!')
|
||||
print('=' * 30)
|
||||
|
||||
@ -14,8 +14,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import multiprocessing
|
||||
import subprocess
|
||||
@ -48,7 +46,7 @@ def get_arguments():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--fix', action='store_true', dest='fix',
|
||||
help='fix source code stlye')
|
||||
parser.add_argument('--clang-format', dest='clang_format', default='clang-format-%d' % CLANG_FORMAT_MIN_VERSION,
|
||||
parser.add_argument('--clang-format', dest='clang_format', default=f'clang-format-{CLANG_FORMAT_MIN_VERSION}',
|
||||
help='path to clang-format executable')
|
||||
|
||||
script_args = parser.parse_args()
|
||||
@ -65,14 +63,13 @@ def check_clang_format(args, source_file_name):
|
||||
|
||||
cmd.append(source_file_name)
|
||||
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
_, error = proc.communicate()
|
||||
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
|
||||
_, error = proc.communicate()
|
||||
|
||||
if proc.returncode == 0:
|
||||
return 0
|
||||
if proc.returncode == 0:
|
||||
return 0
|
||||
|
||||
print(error.decode('utf8'))
|
||||
print(error.decode('utf8'))
|
||||
|
||||
return 1
|
||||
|
||||
@ -111,26 +108,24 @@ def check_clang_format_version(args):
|
||||
|
||||
def main(args):
|
||||
if check_clang_format_version(args) != 0:
|
||||
print("clang-format >= %d is not installed." %
|
||||
CLANG_FORMAT_MIN_VERSION)
|
||||
print(f"clang-format >= {CLANG_FORMAT_MIN_VERSION} is not installed.")
|
||||
return 1
|
||||
|
||||
pool = multiprocessing.Pool()
|
||||
failed = 0
|
||||
with multiprocessing.Pool() as pool:
|
||||
failed = 0
|
||||
|
||||
for folder in FOLDERS:
|
||||
# pylint: disable=unexpected-keyword-arg
|
||||
files = sum(([glob(path.join(PROJECT_DIR, folder, "**/*.%s" % e), recursive=True)
|
||||
for e in ['c', 'h']]), [])
|
||||
for folder in FOLDERS:
|
||||
# pylint: disable=unexpected-keyword-arg
|
||||
files = sum(([glob(path.join(PROJECT_DIR, folder, f"**/*.{e}"), recursive=True)
|
||||
for e in ['c', 'h']]), [])
|
||||
|
||||
failed += run_pass(pool, check_clang_format,
|
||||
[(args, sourece_file) for sourece_file in files])
|
||||
failed += run_pass(pool, check_comments,
|
||||
[([RE_DIRECTIVE_COMMENT, RE_FUNCTION_NAME_COMMENT], sourece_file) for sourece_file in files])
|
||||
failed += run_pass(pool, check_clang_format,
|
||||
[(args, sourece_file) for sourece_file in files])
|
||||
failed += run_pass(pool, check_comments,
|
||||
[([RE_DIRECTIVE_COMMENT, RE_FUNCTION_NAME_COMMENT], sourece_file) for sourece_file in
|
||||
files])
|
||||
|
||||
pool.close()
|
||||
|
||||
return 1 if failed else 0
|
||||
return 1 if failed else 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@ -14,8 +14,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
@ -78,9 +76,9 @@ def main():
|
||||
for fname in files:
|
||||
if any(fname.endswith(ext) for ext in EXTENSIONS):
|
||||
fpath = os.path.join(root, fname)
|
||||
with io.open(fpath, 'r', errors='ignore') as curr_file:
|
||||
with io.open(fpath, 'r', errors='ignore', encoding='utf8') as curr_file:
|
||||
if not LICENSE.search(curr_file.read()):
|
||||
print('%s: incorrect license' % fpath)
|
||||
print(f'{fpath}: incorrect license')
|
||||
is_ok = False
|
||||
|
||||
if not is_ok:
|
||||
|
||||
@ -14,8 +14,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import fileinput
|
||||
import os
|
||||
@ -24,7 +22,7 @@ import shlex
|
||||
import sys
|
||||
|
||||
|
||||
class DoctestExtractor(object):
|
||||
class DoctestExtractor:
|
||||
"""
|
||||
An extractor to process Markdown files and find doctests inside.
|
||||
"""
|
||||
@ -50,7 +48,7 @@ class DoctestExtractor(object):
|
||||
:param message: a description of the problem.
|
||||
:param lineno: the location that triggered the warning.
|
||||
"""
|
||||
print('%s:%d: %s' % (self._infile, lineno, message), file=sys.stderr)
|
||||
print(f'{self._infile}:{lineno}: {message}', file=sys.stderr)
|
||||
|
||||
def _process_decl(self, params):
|
||||
"""
|
||||
@ -72,7 +70,7 @@ class DoctestExtractor(object):
|
||||
decl[tokens[i]] = tokens[i + 2].strip('\'"')
|
||||
|
||||
if 'name' not in decl:
|
||||
decl['name'] = '%s%d.c' % (self._outname_base, self._outname_cnt)
|
||||
decl['name'] = f'{self._outname_base}{self._outname_cnt}.c'
|
||||
self._outname_cnt += 1
|
||||
|
||||
if 'test' not in decl:
|
||||
@ -87,7 +85,7 @@ class DoctestExtractor(object):
|
||||
:return: a tuple of a list (of the first line(s) of the doctest) and the
|
||||
line number of the start of the code block.
|
||||
"""
|
||||
return ['#line %d "%s"\n' % (fileinput.filelineno() + 1, self._infile)], fileinput.filelineno()
|
||||
return [f'#line {fileinput.filelineno() + 1} "{self._infile}"\n'], fileinput.filelineno()
|
||||
|
||||
def _process_code_end(self, decl, code):
|
||||
"""
|
||||
@ -99,9 +97,9 @@ class DoctestExtractor(object):
|
||||
outname = os.path.join(self._outdir, decl['name']).replace('\\', '/')
|
||||
action = decl['test']
|
||||
if self._dry:
|
||||
print('%s %s' % (action, outname))
|
||||
print(f'{action} {outname}')
|
||||
else:
|
||||
with open(outname, 'w') as outfile:
|
||||
with open(outname, 'w', encoding='utf8') as outfile:
|
||||
outfile.writelines(code)
|
||||
|
||||
def process(self, infile):
|
||||
|
||||
@ -14,8 +14,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
try:
|
||||
from configparser import ConfigParser
|
||||
except ImportError:
|
||||
@ -23,13 +21,14 @@ except ImportError:
|
||||
|
||||
import argparse
|
||||
import fileinput
|
||||
import subprocess
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from settings import FORMAT_SCRIPT, PROJECT_DIR
|
||||
|
||||
from gen_c_source import LICENSE
|
||||
|
||||
MAGIC_STRINGS_INI = os.path.join(PROJECT_DIR, 'jerry-core', 'lit', 'lit-magic-strings.ini')
|
||||
MAGIC_STRINGS_INC_H = os.path.join(PROJECT_DIR, 'jerry-core', 'lit', 'lit-magic-strings.inc.h')
|
||||
@ -42,6 +41,7 @@ PARSER_ERRORS_INC_H = os.path.join(PROJECT_DIR, 'jerry-core', 'parser', 'js', 'p
|
||||
|
||||
LIMIT_MAGIC_STR_LENGTH = 255
|
||||
|
||||
|
||||
def debug_dump(obj):
|
||||
def deepcopy(obj):
|
||||
if isinstance(obj, (list, tuple)):
|
||||
@ -51,6 +51,7 @@ def debug_dump(obj):
|
||||
if isinstance(obj, dict):
|
||||
return {repr(k): deepcopy(e) for k, e in obj.items()}
|
||||
return obj
|
||||
|
||||
return json.dumps(deepcopy(obj), indent=4)
|
||||
|
||||
|
||||
@ -64,7 +65,7 @@ def read_magic_string_defs(debug, ini_path, item_name):
|
||||
# [('LIT_MAGIC_STRING_xxx', 'vvv'), ...]
|
||||
# sorted by length and alpha.
|
||||
ini_parser = ConfigParser()
|
||||
ini_parser.optionxform = str # case sensitive options (magic string IDs)
|
||||
ini_parser.optionxform = str # case sensitive options (magic string IDs)
|
||||
ini_parser.read(ini_path)
|
||||
|
||||
defs = [(str_ref, json.loads(str_value) if str_value != '' else '')
|
||||
@ -73,13 +74,12 @@ def read_magic_string_defs(debug, ini_path, item_name):
|
||||
|
||||
if len(defs[-1][1]) > LIMIT_MAGIC_STR_LENGTH:
|
||||
for str_ref, str_value in [x for x in defs if len(x[1]) > LIMIT_MAGIC_STR_LENGTH]:
|
||||
print("error: The maximum allowed magic string size is {limit} but {str_ref} is {str_len} long.".format(
|
||||
limit=LIMIT_MAGIC_STR_LENGTH, str_ref=str_ref, str_len=len(str_value)))
|
||||
exit(1)
|
||||
print(f"error: The maximum allowed magic string size is "
|
||||
f"{LIMIT_MAGIC_STR_LENGTH} but {str_ref} is {len(str_value)} long.")
|
||||
sys.exit(1)
|
||||
|
||||
if debug:
|
||||
print('debug: magic string definitions: {dump}'
|
||||
.format(dump=debug_dump(defs)))
|
||||
print(f'debug: magic string definitions: {debug_dump(defs)}')
|
||||
|
||||
return defs
|
||||
|
||||
@ -93,12 +93,12 @@ def extract_magic_string_refs(debug, pattern, inc_h_filename):
|
||||
# = [('zzz.c', 123), ...]
|
||||
# meaning that the given literal is referenced under the given guards at
|
||||
# the listed (file, line number) locations.
|
||||
exception_list = ['%s_DEF' % pattern,
|
||||
'%s_FIRST_STRING_WITH_SIZE' % pattern,
|
||||
'%s_LENGTH_LIMIT' % pattern,
|
||||
'%s__COUNT' % pattern]
|
||||
exception_list = [f'{pattern}_DEF',
|
||||
f'{pattern}_FIRST_STRING_WITH_SIZE',
|
||||
f'{pattern}_LENGTH_LIMIT',
|
||||
f'{pattern}__COUNT']
|
||||
|
||||
for str_ref in re.findall('%s_[a-zA-Z0-9_]+' % pattern, line):
|
||||
for str_ref in re.findall(f'{pattern}_[a-zA-Z0-9_]+', line):
|
||||
if str_ref in exception_list:
|
||||
continue
|
||||
|
||||
@ -144,11 +144,11 @@ def extract_magic_string_refs(debug, pattern, inc_h_filename):
|
||||
guard_stack.append([process_guard(if_match.group(1))])
|
||||
elif elif_match is not None:
|
||||
guards = guard_stack[-1]
|
||||
guards[-1] = '!(%s)' % guards[-1].strip()
|
||||
guards[-1] = f'!({guards[-1].strip()})'
|
||||
guards.append(process_guard(elif_match.group(1)))
|
||||
elif else_match is not None:
|
||||
guards = guard_stack[-1]
|
||||
guards[-1] = '!(%s)' % guards[-1].strip()
|
||||
guards[-1] = f'!({guards[-1].strip()})'
|
||||
elif endif_match is not None:
|
||||
guard_stack.pop()
|
||||
|
||||
@ -156,20 +156,18 @@ def extract_magic_string_refs(debug, pattern, inc_h_filename):
|
||||
process_line(fname, lnum, line, guard_stack, pattern)
|
||||
|
||||
if guard_stack:
|
||||
print('warning: {fname}: unbalanced preprocessor conditional '
|
||||
'directives (analysis finished with no closing `#endif` '
|
||||
'for {guard_stack})'
|
||||
.format(fname=fname, guard_stack=guard_stack))
|
||||
print(f'warning: {fname}: unbalanced preprocessor conditional '
|
||||
f'directives (analysis finished with no closing `#endif` '
|
||||
f'for {guard_stack})')
|
||||
|
||||
for root, _, files in os.walk(os.path.join(PROJECT_DIR, 'jerry-core')):
|
||||
for fname in files:
|
||||
if (fname.endswith('.c') or fname.endswith('.h')) \
|
||||
and fname != inc_h_filename:
|
||||
and fname != inc_h_filename:
|
||||
process_file(os.path.join(root, fname), pattern)
|
||||
|
||||
if debug:
|
||||
print('debug: magic string references: {dump}'
|
||||
.format(dump=debug_dump(results)))
|
||||
print(f'debug: magic string references: {debug_dump(results)}')
|
||||
|
||||
return results
|
||||
|
||||
@ -179,8 +177,7 @@ def calculate_magic_string_guards(defs, uses, debug=False):
|
||||
|
||||
for str_ref, str_value in defs:
|
||||
if str_ref not in uses:
|
||||
print('warning: unused magic string {str_ref}'
|
||||
.format(str_ref=str_ref))
|
||||
print(f'warning: unused magic string {str_ref}')
|
||||
continue
|
||||
|
||||
# Calculate the most compact guard, i.e., if a magic string is
|
||||
@ -208,8 +205,7 @@ def calculate_magic_string_guards(defs, uses, debug=False):
|
||||
extended_defs.append((str_ref, str_value, guards))
|
||||
|
||||
if debug:
|
||||
print('debug: magic string definitions (with guards): {dump}'
|
||||
.format(dump=debug_dump(extended_defs)))
|
||||
print(f'debug: magic string definitions (with guards): {debug_dump(extended_defs)}')
|
||||
|
||||
return extended_defs
|
||||
|
||||
@ -220,25 +216,11 @@ def guards_to_str(guards):
|
||||
|
||||
|
||||
def generate_header(gen_file, ini_path):
|
||||
header = \
|
||||
"""/* Copyright JS Foundation and other contributors, http://js.foundation
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
header = f"""{LICENSE}
|
||||
|
||||
/* This file is automatically generated by the %s script
|
||||
* from %s. Do not edit! */
|
||||
""" % (os.path.basename(__file__), os.path.basename(ini_path))
|
||||
/* This file is automatically generated by the {os.path.basename(__file__)} script
|
||||
* from {os.path.basename(ini_path)}. Do not edit! */
|
||||
"""
|
||||
print(header, file=gen_file)
|
||||
|
||||
|
||||
@ -247,21 +229,20 @@ def generate_magic_string_defs(gen_file, defs, def_macro):
|
||||
for str_ref, str_value, guards in defs:
|
||||
if last_guards != guards:
|
||||
if () not in last_guards:
|
||||
print('#endif /* {guards} */'.format(guards=guards_to_str(last_guards)), file=gen_file)
|
||||
print(f'#endif /* {guards_to_str(last_guards)} */', file=gen_file)
|
||||
if () not in guards:
|
||||
print('#if {guards}'.format(guards=guards_to_str(guards)), file=gen_file)
|
||||
print(f'#if {guards_to_str(guards)}', file=gen_file)
|
||||
|
||||
print('{macro} ({str_ref}, {str_value})'
|
||||
.format(macro=def_macro, str_ref=str_ref, str_value=json.dumps(str_value)), file=gen_file)
|
||||
print(f'{def_macro} ({str_ref}, {json.dumps(str_value)})', file=gen_file)
|
||||
|
||||
last_guards = guards
|
||||
|
||||
if () not in last_guards:
|
||||
print('#endif /* {guards} */'.format(guards=guards_to_str(last_guards)), file=gen_file)
|
||||
print(f'#endif /* {guards_to_str(last_guards)} */', file=gen_file)
|
||||
|
||||
|
||||
def generate_first_magic_strings(gen_file, defs, with_size_macro):
|
||||
print(file=gen_file) # empty line separator
|
||||
print(file=gen_file) # empty line separator
|
||||
|
||||
max_size = len(defs[-1][1])
|
||||
for size in range(max_size + 1):
|
||||
@ -269,16 +250,15 @@ def generate_first_magic_strings(gen_file, defs, with_size_macro):
|
||||
for str_ref, str_value, guards in defs:
|
||||
if len(str_value) >= size:
|
||||
if () not in guards and () in last_guards:
|
||||
print('#if {guards}'.format(guards=guards_to_str(guards)), file=gen_file)
|
||||
print(f'#if {guards_to_str(guards)}', file=gen_file)
|
||||
elif () not in guards and () not in last_guards:
|
||||
if guards == last_guards:
|
||||
continue
|
||||
print('#elif {guards}'.format(guards=guards_to_str(guards)), file=gen_file)
|
||||
print(f'#elif {guards_to_str(guards)}', file=gen_file)
|
||||
elif () in guards and () not in last_guards:
|
||||
print('#else /* !({guards}) */'.format(guards=guards_to_str(last_guards)), file=gen_file)
|
||||
print(f'#else /* !({guards_to_str(last_guards)}) */', file=gen_file)
|
||||
|
||||
print('{macro} ({size}, {str_ref})'
|
||||
.format(macro=with_size_macro, size=size, str_ref=str_ref), file=gen_file)
|
||||
print(f'{with_size_macro} ({size}, {str_ref})', file=gen_file)
|
||||
|
||||
if () in guards:
|
||||
break
|
||||
@ -286,7 +266,8 @@ def generate_first_magic_strings(gen_file, defs, with_size_macro):
|
||||
last_guards = guards
|
||||
|
||||
if () not in last_guards:
|
||||
print('#endif /* {guards} */'.format(guards=guards_to_str(last_guards)), file=gen_file)
|
||||
print(f'#endif /* {guards_to_str(last_guards)} */', file=gen_file)
|
||||
|
||||
|
||||
def generate_magic_strings(args, ini_path, item_name, pattern, inc_h_path, def_macro, with_size_macro=None):
|
||||
defs = read_magic_string_defs(args.debug, ini_path, item_name)
|
||||
@ -294,12 +275,13 @@ def generate_magic_strings(args, ini_path, item_name, pattern, inc_h_path, def_m
|
||||
|
||||
extended_defs = calculate_magic_string_guards(defs, uses, debug=args.debug)
|
||||
|
||||
with open(inc_h_path, 'w') as gen_file:
|
||||
with open(inc_h_path, 'w', encoding='utf8') as gen_file:
|
||||
generate_header(gen_file, ini_path)
|
||||
generate_magic_string_defs(gen_file, extended_defs, def_macro)
|
||||
if with_size_macro:
|
||||
generate_first_magic_strings(gen_file, extended_defs, with_size_macro)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='lit-magic-strings.inc.h generator')
|
||||
parser.add_argument('--debug', action='store_true', help='enable debug output')
|
||||
@ -329,5 +311,6 @@ def main():
|
||||
|
||||
subprocess.call([FORMAT_SCRIPT, '--fix'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@ -14,8 +14,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import csv
|
||||
import itertools
|
||||
@ -45,7 +43,7 @@ UNICODE_PLANE_TYPE_SUPPLEMENTARY = 1
|
||||
|
||||
# common code generation
|
||||
|
||||
class UnicodeBasicSource(object):
|
||||
class UnicodeBasicSource:
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, filepath, character_type="uint16_t", length_type="uint8_t"):
|
||||
self._filepath = filepath
|
||||
@ -81,7 +79,7 @@ class UnicodeBasicSource(object):
|
||||
idx = 0
|
||||
for table in tables:
|
||||
self.add_table(table,
|
||||
"/**\n * %s %s.\n */" % (self._range_table_descriptions[idx], category),
|
||||
f"/**\n * {self._range_table_descriptions[idx]} {category}.\n */",
|
||||
self._range_table_types[idx],
|
||||
category,
|
||||
self._range_table_names[idx])
|
||||
@ -103,18 +101,16 @@ class UnicodeBasicSource(object):
|
||||
def add_table(self, table, description, table_type, category, table_name):
|
||||
if table and sum(table) != 0:
|
||||
self._data.append(description)
|
||||
self._data.append("static const %s lit_unicode_%s%s%s[] JERRY_ATTR_CONST_DATA ="
|
||||
% (table_type,
|
||||
category.lower(),
|
||||
"_" + table_name if table_name else "",
|
||||
self._table_name_suffix))
|
||||
self._data.append(f"static const {table_type} lit_unicode_{category.lower()}"
|
||||
f"{'_' + table_name if table_name else ''}{self._table_name_suffix}"
|
||||
f"[] JERRY_ATTR_CONST_DATA =")
|
||||
self._data.append("{")
|
||||
self._data.append(format_code(table, 1, 6 if self._table_name_suffix else 4))
|
||||
self._data.append("};")
|
||||
self._data.append("") # for an extra empty line
|
||||
|
||||
def generate(self):
|
||||
with open(self._filepath, 'w') as generated_source:
|
||||
with open(self._filepath, 'w', encoding='utf8') as generated_source:
|
||||
generated_source.write("\n".join(self._header))
|
||||
generated_source.write("\n".join(self._data))
|
||||
|
||||
@ -127,14 +123,14 @@ class UnicodeSupplementarySource(UnicodeBasicSource):
|
||||
def add_whitepace_range(self, category, categorizer, units):
|
||||
self.add_range(category, categorizer.create_tables(units))
|
||||
|
||||
class UnicodeBasicCategorizer(object):
|
||||
class UnicodeBasicCategorizer:
|
||||
def __init__(self):
|
||||
self._length_limit = 0xff
|
||||
self.extra_id_continue_units = set([0x200C, 0x200D])
|
||||
|
||||
#pylint: disable=no-self-use
|
||||
def in_range(self, i):
|
||||
return i >= 0x80 and i < 0x10000
|
||||
return 0x80 <= i < 0x10000
|
||||
|
||||
def _group_ranges(self, units):
|
||||
"""
|
||||
@ -194,7 +190,7 @@ class UnicodeBasicCategorizer(object):
|
||||
# <HEX>..<HEX> ; <category> # <subcategory>
|
||||
matcher = r"(?P<start>[\dA-F]+)(?:\.\.(?P<end>[\dA-F]+))?\s+; (?P<category>[\w]+) # (?P<subcategory>[\w&]{2})"
|
||||
|
||||
with open(file_path, "r") as src_file:
|
||||
with open(file_path, "r", encoding='utf8') as src_file:
|
||||
for line in src_file:
|
||||
match = re.match(matcher, line)
|
||||
|
||||
@ -227,7 +223,7 @@ class UnicodeBasicCategorizer(object):
|
||||
upper_case_mapping = {}
|
||||
|
||||
# Add one-to-one mappings
|
||||
with open(unicode_data_file) as unicode_data:
|
||||
with open(unicode_data_file, encoding='utf8') as unicode_data:
|
||||
reader = csv.reader(unicode_data, delimiter=';')
|
||||
|
||||
for line in reader:
|
||||
@ -246,7 +242,7 @@ class UnicodeBasicCategorizer(object):
|
||||
lower_case_mapping[letter_id] = parse_unicode_sequence(small_letter)
|
||||
|
||||
# Update the conversion tables with the special cases
|
||||
with open(special_casing_file) as special_casing:
|
||||
with open(special_casing_file, encoding='utf8') as special_casing:
|
||||
reader = csv.reader(special_casing, delimiter=';')
|
||||
|
||||
for line in reader:
|
||||
@ -293,8 +289,8 @@ def generate_ranges(script_args, plane_type):
|
||||
c_source = UnicodeBasicSource(RANGES_C_SOURCE)
|
||||
categorizer = UnicodeBasicCategorizer()
|
||||
|
||||
header_completion = ["/* This file is automatically generated by the %s script" % os.path.basename(__file__),
|
||||
" * from %s. Do not edit! */" % (DERIVED_PROPS_FILE),
|
||||
header_completion = [f"/* This file is automatically generated by the {os.path.basename(__file__)} script",
|
||||
f" * from {DERIVED_PROPS_FILE}. Do not edit! */",
|
||||
""]
|
||||
|
||||
c_source.complete_header("\n".join(header_completion))
|
||||
@ -652,8 +648,8 @@ def generate_conversions(script_args, plane_type):
|
||||
c_source = UnicodeBasicSource(CONVERSIONS_C_SOURCE)
|
||||
categorizer = UnicodeBasicCategorizer()
|
||||
|
||||
header_completion = ["/* This file is automatically generated by the %s script" % os.path.basename(__file__),
|
||||
" * from %s and %s files. Do not edit! */" % (UNICODE_DATA_FILE, SPECIAL_CASING_FILE),
|
||||
header_completion = [f"/* This file is automatically generated by the {os.path.basename(__file__)} script",
|
||||
f" * from {UNICODE_DATA_FILE} and {SPECIAL_CASING_FILE} files. Do not edit! */",
|
||||
""]
|
||||
|
||||
c_source.complete_header("\n".join(header_completion))
|
||||
@ -725,8 +721,8 @@ def generate_folding(script_args, plane_type):
|
||||
c_source = UnicodeBasicSource(FOLDING_C_SOURCE)
|
||||
categorizer = UnicodeBasicCategorizer()
|
||||
|
||||
header_completion = ["/* This file is automatically generated by the %s script" % os.path.basename(__file__),
|
||||
" * from the %s file. Do not edit! */" % (CASE_FOLDING_FILE),
|
||||
header_completion = [f"/* This file is automatically generated by the {os.path.basename(__file__)} script",
|
||||
f" * from the {CASE_FOLDING_FILE} file. Do not edit! */",
|
||||
""]
|
||||
|
||||
c_source.complete_header("\n".join(header_completion))
|
||||
@ -740,7 +736,7 @@ def generate_folding(script_args, plane_type):
|
||||
|
||||
folding = {}
|
||||
|
||||
with open(case_folding_path, 'r') as case_folding:
|
||||
with open(case_folding_path, 'r', encoding='utf8') as case_folding:
|
||||
case_folding_re = re.compile(r'(?P<code_point>[^;]*);\s*(?P<type>[^;]*);\s*(?P<folding>[^;]*);')
|
||||
for line in case_folding:
|
||||
match = case_folding_re.match(line)
|
||||
@ -782,7 +778,7 @@ def main():
|
||||
''')
|
||||
def check_dir(path):
|
||||
if not os.path.isdir(path) or not os.access(path, os.R_OK):
|
||||
raise argparse.ArgumentTypeError('The %s directory does not exist or is not readable!' % path)
|
||||
raise argparse.ArgumentTypeError(f'The {path} directory does not exist or is not readable!')
|
||||
return path
|
||||
|
||||
parser.add_argument('--unicode-dir', metavar='DIR', action='store', required=True,
|
||||
|
||||
@ -33,11 +33,11 @@ def format_code(code, indent, digit_number=4):
|
||||
def regroup(list_to_group, num):
|
||||
return [list_to_group[i:i+num] for i in range(0, len(list_to_group), num)]
|
||||
|
||||
def hex_format(char, digit_number):
|
||||
def hex_format(char, padding):
|
||||
if isinstance(char, str):
|
||||
char = ord(char)
|
||||
|
||||
return ("0x{:0%sx}" % digit_number).format(char)
|
||||
return f"{char:#0{padding + 2}x}"
|
||||
|
||||
lines = []
|
||||
|
||||
|
||||
@ -13,9 +13,6 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from __future__ import print_function
|
||||
# force // operator to be integer division in Python 2
|
||||
from __future__ import division
|
||||
|
||||
import argparse
|
||||
import json
|
||||
@ -53,7 +50,7 @@ def check_files(opts):
|
||||
files = [JERRY_BUILDER, opts.testfile]
|
||||
for _file in files:
|
||||
if not os.path.isfile(_file):
|
||||
sys.exit("File not found: %s" % _file)
|
||||
sys.exit(f"File not found: {_file}")
|
||||
|
||||
|
||||
def build_bin(heapsize, opts):
|
||||
@ -68,7 +65,7 @@ def build_bin(heapsize, opts):
|
||||
if opts.buildtype == 'debug':
|
||||
command.append(FLAG_DEBUG)
|
||||
|
||||
print('Building JerryScript with: %s' % (' '.join(command)))
|
||||
print(f"Building JerryScript with: {' '.join(command)}")
|
||||
subprocess.check_output(command)
|
||||
|
||||
|
||||
|
||||
@ -57,7 +57,7 @@ def reduce_code(code):
|
||||
|
||||
|
||||
def js_to_native_code(path, name, build_type):
|
||||
with open(path, 'r') as js_source:
|
||||
with open(path, 'r', encoding='utf8') as js_source:
|
||||
code = js_source.read()
|
||||
|
||||
if build_type != 'debug':
|
||||
@ -65,13 +65,13 @@ def js_to_native_code(path, name, build_type):
|
||||
|
||||
data = format_code(code, 1, 2)
|
||||
|
||||
native_code = """const static char {0}_n[] = "{0}";
|
||||
const static char {0}_s[] =
|
||||
native_code = f"""const static char {name}_n[] = "{name}";
|
||||
const static char {name}_s[] =
|
||||
{{
|
||||
{1}
|
||||
{data}
|
||||
}};
|
||||
const static int {0}_l = {2};
|
||||
""".format(name, data, len(code))
|
||||
const static int {name}_l = {len(code)};
|
||||
"""
|
||||
|
||||
return native_code
|
||||
|
||||
@ -96,13 +96,13 @@ def main():
|
||||
|
||||
script_args = parser.parse_args()
|
||||
|
||||
gen_line = "/* This file is generated by %s. Please do not modify. */" % os.path.basename(__file__)
|
||||
gen_line = f"/* This file is generated by {os.path.basename(__file__)}. Please do not modify. */"
|
||||
|
||||
gen_output = [LICENSE, "", gen_line, "", HEADER]
|
||||
gen_structs = [NATIVE_STRUCT]
|
||||
|
||||
if script_args.main:
|
||||
gen_structs.append(' {{ {0}_n, {0}_s, {0}_l }}, \\'.format("main"))
|
||||
gen_structs.append(' { main_n, main_s, main_l }, \\')
|
||||
|
||||
files = glob.glob(os.path.join(script_args.js_source_path, '*.js'))
|
||||
|
||||
@ -111,14 +111,14 @@ def main():
|
||||
name = extract_name(path)
|
||||
gen_output.append(js_to_native_code(path, name, script_args.build_type))
|
||||
if name != 'main':
|
||||
gen_structs.append(' {{ {0}_n, {0}_s, {0}_l }}, \\'.format(name))
|
||||
gen_structs.append(f' {{ {name}_n, {name}_s, {name}_l }}, \\')
|
||||
|
||||
gen_structs.append(' { NULL, NULL, 0 } \\\n};')
|
||||
|
||||
gen_output.append("\n".join(gen_structs))
|
||||
gen_output.append(FOOTER)
|
||||
|
||||
with open(os.path.join(script_args.output_path, 'jerry-targetjs.h'), 'w') as gen_file:
|
||||
with open(os.path.join(script_args.output_path, 'jerry-targetjs.h'), 'w', encoding='utf8') as gen_file:
|
||||
gen_file.write("\n".join(gen_output))
|
||||
|
||||
|
||||
|
||||
@ -59,8 +59,8 @@ confidence=
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
disable=import-star-module-level,old-octal-literal,oct-method,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating,missing-docstring,locally-disabled
|
||||
|
||||
disable=
|
||||
missing-docstring,
|
||||
|
||||
[REPORTS]
|
||||
|
||||
|
||||
@ -14,8 +14,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import collections
|
||||
import hashlib
|
||||
@ -26,7 +24,7 @@ import sys
|
||||
import settings
|
||||
|
||||
if sys.version_info.major >= 3:
|
||||
import runners.util as util # pylint: disable=import-error
|
||||
from runners import util
|
||||
else:
|
||||
sys.path.append(os.path.dirname(os.path.realpath(__file__)) + '/runners')
|
||||
import util
|
||||
@ -205,17 +203,19 @@ TERM_BLUE = '\033[1;34m'
|
||||
TERM_RED = '\033[1;31m'
|
||||
|
||||
def report_command(cmd_type, cmd, env=None):
|
||||
sys.stderr.write('%s%s%s\n' % (TERM_BLUE, cmd_type, TERM_NORMAL))
|
||||
sys.stderr.write(f'{TERM_BLUE}{cmd_type}{TERM_NORMAL}\n')
|
||||
if env is not None:
|
||||
sys.stderr.write(''.join('%s%s=%r \\%s\n' % (TERM_BLUE, var, val, TERM_NORMAL)
|
||||
sys.stderr.write(''.join(f'{TERM_BLUE}{var}={val!r} \\{TERM_NORMAL}\n'
|
||||
for var, val in sorted(env.items())))
|
||||
sys.stderr.write('%s%s%s\n' % (TERM_BLUE, (' \\%s\n\t%s' % (TERM_NORMAL, TERM_BLUE)).join(cmd), TERM_NORMAL))
|
||||
sys.stderr.write(f"{TERM_BLUE}" +
|
||||
f" \\{TERM_NORMAL}\n\t{TERM_BLUE}".join(cmd) +
|
||||
f"{TERM_NORMAL}\n")
|
||||
|
||||
def report_skip(job):
|
||||
sys.stderr.write('%sSkipping: %s' % (TERM_YELLOW, job.name))
|
||||
sys.stderr.write(f'{TERM_YELLOW}Skipping: {job.name}')
|
||||
if job.skip:
|
||||
sys.stderr.write(' (%s)' % job.skip)
|
||||
sys.stderr.write('%s\n' % TERM_NORMAL)
|
||||
sys.stderr.write(f' ({job.skip})')
|
||||
sys.stderr.write(f'{TERM_NORMAL}\n')
|
||||
|
||||
def create_binary(job, options):
|
||||
build_args = job.build_args[:]
|
||||
@ -232,20 +232,20 @@ def create_binary(job, options):
|
||||
build_cmd.append(settings.BUILD_SCRIPT)
|
||||
build_cmd.extend(build_args)
|
||||
|
||||
build_cmd.append('--builddir=%s' % build_dir_path)
|
||||
build_cmd.append(f'--builddir={build_dir_path}')
|
||||
|
||||
install_dir_path = os.path.join(build_dir_path, 'local')
|
||||
build_cmd.append('--install=%s' % install_dir_path)
|
||||
build_cmd.append(f'--install={install_dir_path}')
|
||||
|
||||
if options.toolchain:
|
||||
build_cmd.append('--toolchain=%s' % options.toolchain)
|
||||
build_cmd.append(f'--toolchain={options.toolchain}')
|
||||
|
||||
report_command('Build command:', build_cmd)
|
||||
|
||||
binary_key = tuple(sorted(build_args))
|
||||
if binary_key in BINARY_CACHE:
|
||||
ret, build_dir_path = BINARY_CACHE[binary_key]
|
||||
sys.stderr.write('(skipping: already built at %s with returncode %d)\n' % (build_dir_path, ret))
|
||||
sys.stderr.write(f'(skipping: already built at {build_dir_path} with returncode {ret})\n')
|
||||
return ret, build_dir_path
|
||||
|
||||
try:
|
||||
@ -282,19 +282,17 @@ def iterate_test_runner_jobs(jobs, options):
|
||||
yield job, ret_build, None
|
||||
|
||||
if build_dir_path in tested_paths:
|
||||
sys.stderr.write('(skipping: already tested with %s)\n' % build_dir_path)
|
||||
sys.stderr.write(f'(skipping: already tested with {build_dir_path})\n')
|
||||
continue
|
||||
else:
|
||||
tested_paths.add(build_dir_path)
|
||||
tested_paths.add(build_dir_path)
|
||||
|
||||
bin_path = get_binary_path(build_dir_path)
|
||||
bin_hash = hash_binary(bin_path)
|
||||
|
||||
if bin_hash in tested_hashes:
|
||||
sys.stderr.write('(skipping: already tested with equivalent %s)\n' % tested_hashes[bin_hash])
|
||||
sys.stderr.write(f'(skipping: already tested with equivalent {tested_hashes[bin_hash]})\n')
|
||||
continue
|
||||
else:
|
||||
tested_hashes[bin_hash] = build_dir_path
|
||||
tested_hashes[bin_hash] = build_dir_path
|
||||
|
||||
test_cmd = util.get_python_cmd_prefix()
|
||||
test_cmd.extend([settings.TEST_RUNNER_SCRIPT, '--engine', bin_path])
|
||||
@ -309,16 +307,16 @@ def run_check(runnable, env=None):
|
||||
full_env.update(env)
|
||||
env = full_env
|
||||
|
||||
proc = subprocess.Popen(runnable, env=env)
|
||||
proc.wait()
|
||||
return proc.returncode
|
||||
with subprocess.Popen(runnable, env=env) as proc:
|
||||
proc.wait()
|
||||
return proc.returncode
|
||||
|
||||
def run_jerry_debugger_tests(options):
|
||||
ret_build = ret_test = 0
|
||||
for job in DEBUGGER_TEST_OPTIONS:
|
||||
ret_build, build_dir_path = create_binary(job, options)
|
||||
if ret_build:
|
||||
print("\n%sBuild failed%s\n" % (TERM_RED, TERM_NORMAL))
|
||||
print(f"\n{TERM_RED}Build failed{TERM_NORMAL}\n")
|
||||
break
|
||||
|
||||
for channel in ["websocket", "rawpacket"]:
|
||||
@ -356,7 +354,7 @@ def run_jerry_tests(options):
|
||||
skip_list = []
|
||||
|
||||
if job.name == 'jerry_tests-snapshot':
|
||||
with open(settings.SNAPSHOT_TESTS_SKIPLIST, 'r') as snapshot_skip_list:
|
||||
with open(settings.SNAPSHOT_TESTS_SKIPLIST, 'r', encoding='utf8') as snapshot_skip_list:
|
||||
for line in snapshot_skip_list:
|
||||
skip_list.append(line.rstrip())
|
||||
|
||||
@ -381,7 +379,7 @@ def run_test262_test_suite(options):
|
||||
for job in jobs:
|
||||
ret_build, build_dir_path = create_binary(job, options)
|
||||
if ret_build:
|
||||
print("\n%sBuild failed%s\n" % (TERM_RED, TERM_NORMAL))
|
||||
print(f"\n{TERM_RED}Build failed{TERM_NORMAL}\n")
|
||||
break
|
||||
|
||||
test_cmd = util.get_python_cmd_prefix() + [
|
||||
@ -411,7 +409,7 @@ def run_unittests(options):
|
||||
continue
|
||||
ret_build, build_dir_path = create_binary(job, options)
|
||||
if ret_build:
|
||||
print("\n%sBuild failed%s\n" % (TERM_RED, TERM_NORMAL))
|
||||
print(f"\n{TERM_RED}Build failed{TERM_NORMAL}\n")
|
||||
break
|
||||
|
||||
if sys.platform == 'win32':
|
||||
@ -440,7 +438,7 @@ def run_buildoption_test(options):
|
||||
|
||||
ret, _ = create_binary(job, options)
|
||||
if ret:
|
||||
print("\n%sBuild failed%s\n" % (TERM_RED, TERM_NORMAL))
|
||||
print(f"\n{TERM_RED}Build failed{TERM_NORMAL}\n")
|
||||
break
|
||||
|
||||
return ret
|
||||
|
||||
@ -14,7 +14,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
@ -78,7 +77,7 @@ def update_exclude_list(args):
|
||||
passing_tests = set()
|
||||
failing_tests = set()
|
||||
new_passing_tests = set()
|
||||
with open(os.path.join(os.path.dirname(args.engine), 'test262.report'), 'r') as report_file:
|
||||
with open(os.path.join(os.path.dirname(args.engine), 'test262.report'), 'r', encoding='utf8') as report_file:
|
||||
for line in report_file:
|
||||
match = re.match('(=== )?(.*) (?:failed|passed) in (?:non-strict|strict)', line)
|
||||
if match:
|
||||
@ -92,7 +91,7 @@ def update_exclude_list(args):
|
||||
# Tests pass in strict-mode but fail in non-strict-mode (or vice versa) should be considered as failures
|
||||
passing_tests = passing_tests - failing_tests
|
||||
|
||||
with open(args.excludelist_path, 'r+') as exclude_file:
|
||||
with open(args.excludelist_path, 'r+', encoding='utf8') as exclude_file:
|
||||
lines = exclude_file.readlines()
|
||||
exclude_file.seek(0)
|
||||
exclude_file.truncate()
|
||||
@ -167,41 +166,38 @@ def main(args):
|
||||
if args.test262_test_list:
|
||||
test262_command.extend(args.test262_test_list.split(','))
|
||||
|
||||
proc = subprocess.Popen(test262_command,
|
||||
universal_newlines=True,
|
||||
stdout=subprocess.PIPE,
|
||||
**kwargs)
|
||||
with subprocess.Popen(test262_command, universal_newlines=True, stdout=subprocess.PIPE, **kwargs) as proc:
|
||||
|
||||
return_code = 1
|
||||
with open(os.path.join(os.path.dirname(args.engine), 'test262.report'), 'w') as output_file:
|
||||
counter = 0
|
||||
summary_found = False
|
||||
summary_end_found = False
|
||||
while True:
|
||||
output = proc.stdout.readline()
|
||||
if not output:
|
||||
break
|
||||
output_file.write(output)
|
||||
return_code = 1
|
||||
with open(os.path.join(os.path.dirname(args.engine), 'test262.report'), 'w', encoding='utf8') as output_file:
|
||||
counter = 0
|
||||
summary_found = False
|
||||
summary_end_found = False
|
||||
while True:
|
||||
output = proc.stdout.readline()
|
||||
if not output:
|
||||
break
|
||||
output_file.write(output)
|
||||
|
||||
if output.startswith('=== Summary ==='):
|
||||
summary_found = True
|
||||
print('')
|
||||
if output.startswith('=== Summary ==='):
|
||||
summary_found = True
|
||||
print('')
|
||||
|
||||
if summary_found:
|
||||
if not summary_end_found:
|
||||
print(output, end='')
|
||||
if not output.strip():
|
||||
summary_end_found = True
|
||||
if 'All tests succeeded' in output:
|
||||
return_code = 0
|
||||
elif re.search('in (non-)?strict mode', output):
|
||||
counter += 1
|
||||
if (counter % 100) == 0:
|
||||
print(".", end='')
|
||||
if (counter % 5000) == 0:
|
||||
print(" Executed %d tests." % counter)
|
||||
if summary_found:
|
||||
if not summary_end_found:
|
||||
print(output, end='')
|
||||
if not output.strip():
|
||||
summary_end_found = True
|
||||
if 'All tests succeeded' in output:
|
||||
return_code = 0
|
||||
elif re.search('in (non-)?strict mode', output):
|
||||
counter += 1
|
||||
if (counter % 100) == 0:
|
||||
print(".", end='')
|
||||
if (counter % 5000) == 0:
|
||||
print(f" Executed {counter} tests.")
|
||||
|
||||
proc.wait()
|
||||
proc.wait()
|
||||
|
||||
if sys.platform == 'win32':
|
||||
util.set_timezone(original_timezone)
|
||||
|
||||
@ -14,7 +14,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
@ -22,7 +21,7 @@ import sys
|
||||
|
||||
import util
|
||||
|
||||
def get_arguments():
|
||||
def get_args():
|
||||
execution_runtime = os.environ.get('RUNTIME')
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-q', '--quiet', action='store_true',
|
||||
@ -60,7 +59,7 @@ def get_tests(test_dir, test_list, skip_list):
|
||||
|
||||
if test_list:
|
||||
dirname = os.path.dirname(test_list)
|
||||
with open(test_list, "r") as test_list_fd:
|
||||
with open(test_list, "r", encoding='utf8') as test_list_fd:
|
||||
for test in test_list_fd:
|
||||
tests.append(os.path.normpath(os.path.join(dirname, test.rstrip())))
|
||||
|
||||
@ -79,10 +78,10 @@ def execute_test_command(test_cmd):
|
||||
kwargs = {}
|
||||
if sys.version_info.major >= 3:
|
||||
kwargs['encoding'] = 'unicode_escape'
|
||||
process = subprocess.Popen(test_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
universal_newlines=True, **kwargs)
|
||||
stdout = process.communicate()[0]
|
||||
return (process.returncode, stdout)
|
||||
with subprocess.Popen(test_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
universal_newlines=True, **kwargs) as process:
|
||||
stdout, _ = process.communicate()
|
||||
return process.returncode, stdout
|
||||
|
||||
|
||||
def main(args):
|
||||
@ -142,10 +141,10 @@ def run_normal_tests(args, tests):
|
||||
if (returncode == 0 and not is_expected_to_fail) or (returncode == 1 and is_expected_to_fail):
|
||||
passed += 1
|
||||
if not args.quiet:
|
||||
passed_string = 'PASS' + (' (XFAIL)' if is_expected_to_fail else '')
|
||||
passed_string = f"PASS{' (XFAIL)' if is_expected_to_fail else ''}"
|
||||
util.print_test_result(tested, total, True, passed_string, test_path)
|
||||
else:
|
||||
passed_string = 'FAIL%s (%d)' % (' (XPASS)' if returncode == 0 and is_expected_to_fail else '', returncode)
|
||||
passed_string = f"FAIL{' (XPASS)' if returncode == 0 and is_expected_to_fail else ''} ({returncode})"
|
||||
util.print_test_result(tested, total, False, passed_string, test_path)
|
||||
print("================================================")
|
||||
print(stdout)
|
||||
@ -183,7 +182,7 @@ def run_snapshot_tests(args, tests):
|
||||
passed_string = 'PASS' + (' (XFAIL)' if returncode else '')
|
||||
util.print_test_result(tested, total, True, passed_string, test_path, True)
|
||||
else:
|
||||
util.print_test_result(tested, total, False, 'FAIL (%d)' % (returncode), test_path, True)
|
||||
util.print_test_result(tested, total, False, f'FAIL ({returncode})', test_path, True)
|
||||
print("================================================")
|
||||
print(stdout)
|
||||
print("================================================")
|
||||
@ -199,10 +198,10 @@ def run_snapshot_tests(args, tests):
|
||||
if (returncode == 0 and not is_expected_to_fail) or (returncode == 1 and is_expected_to_fail):
|
||||
passed += 1
|
||||
if not args.quiet:
|
||||
passed_string = 'PASS' + (' (XFAIL)' if is_expected_to_fail else '')
|
||||
passed_string = f"PASS{' (XFAIL)' if is_expected_to_fail else ''}"
|
||||
util.print_test_result(tested, total, True, passed_string, test_path, False)
|
||||
else:
|
||||
passed_string = 'FAIL%s (%d)' % (' (XPASS)' if returncode == 0 and is_expected_to_fail else '', returncode)
|
||||
passed_string = f"FAIL{' (XPASS)' if returncode == 0 and is_expected_to_fail else ''} ({returncode})"
|
||||
util.print_test_result(tested, total, False, passed_string, test_path, False)
|
||||
print("================================================")
|
||||
print(stdout)
|
||||
@ -212,4 +211,4 @@ def run_snapshot_tests(args, tests):
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(get_arguments()))
|
||||
sys.exit(main(get_args()))
|
||||
|
||||
@ -14,7 +14,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
import glob
|
||||
import os
|
||||
@ -71,7 +70,7 @@ def main(args):
|
||||
util.print_test_result(tested, total, True, 'PASS', test_path)
|
||||
except subprocess.CalledProcessError as err:
|
||||
failed += 1
|
||||
util.print_test_result(tested, total, False, 'FAIL (%d)' % err.returncode, test_path)
|
||||
util.print_test_result(tested, total, False, f'FAIL ({err.returncode})', test_path)
|
||||
print("================================================")
|
||||
print(err.output)
|
||||
print("================================================")
|
||||
|
||||
@ -40,10 +40,8 @@
|
||||
# This code is governed by the BSD license found in the LICENSE file.
|
||||
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import logging
|
||||
import optparse
|
||||
import argparse
|
||||
import os
|
||||
from os import path
|
||||
import platform
|
||||
@ -55,7 +53,6 @@ import xml.dom.minidom
|
||||
from collections import Counter
|
||||
|
||||
import signal
|
||||
import threading
|
||||
import multiprocessing
|
||||
|
||||
#######################################################################
|
||||
@ -112,7 +109,7 @@ def my_read_dict(lines, indent=""):
|
||||
|
||||
|
||||
def my_read_value(lines, value, indent):
|
||||
if value == ">" or value == "|":
|
||||
if value in (">", "|"):
|
||||
(lines, value) = my_multiline(lines, value == "|")
|
||||
value = value + "\n"
|
||||
return (lines, value)
|
||||
@ -157,7 +154,7 @@ def my_remove_list_header(indent, line):
|
||||
def my_read_one_line(value):
|
||||
if M_YAML_LIST_PATTERN.match(value):
|
||||
return my_flow_list(value)
|
||||
elif re.match(r"^[-0-9]*$", value):
|
||||
if re.match(r"^[-0-9]*$", value):
|
||||
try:
|
||||
value = int(value)
|
||||
except ValueError:
|
||||
@ -196,7 +193,7 @@ def my_multiline(lines, preserve_newlines=False):
|
||||
break
|
||||
else:
|
||||
if preserve_newlines:
|
||||
if was_empty != None:
|
||||
if was_empty is not None:
|
||||
value += "\n"
|
||||
else:
|
||||
if was_empty:
|
||||
@ -248,7 +245,7 @@ _LICENSE_PATTERN = re.compile(
|
||||
def yaml_attr_parser(test_record, attrs, name, onerror=print):
|
||||
parsed = yaml_load(attrs)
|
||||
if parsed is None:
|
||||
onerror("Failed to parse yaml in name %s" % name)
|
||||
onerror(f"Failed to parse yaml in name {name}")
|
||||
return
|
||||
|
||||
for key in parsed:
|
||||
@ -287,7 +284,7 @@ def parse_test_record(src, name, onerror=print):
|
||||
|
||||
# YAML frontmatter is required for all tests.
|
||||
if frontmatter is None:
|
||||
onerror("Missing frontmatter: %s" % name)
|
||||
onerror(f"Missing frontmatter: {name}")
|
||||
|
||||
# The license shuold be placed before the frontmatter and there shouldn't be
|
||||
# any extra content between the license and the frontmatter.
|
||||
@ -295,13 +292,13 @@ def parse_test_record(src, name, onerror=print):
|
||||
header_idx = src.index(header)
|
||||
frontmatter_idx = src.index(frontmatter)
|
||||
if header_idx > frontmatter_idx:
|
||||
onerror("Unexpected license after frontmatter: %s" % name)
|
||||
onerror(f"Unexpected license after frontmatter: {name}")
|
||||
|
||||
# Search for any extra test content, but ignore whitespace only or comment lines.
|
||||
extra = src[header_idx + len(header): frontmatter_idx]
|
||||
if extra and any(line.strip() and not line.lstrip().startswith("//") for line in extra.split("\n")):
|
||||
onerror(
|
||||
"Unexpected test content between license and frontmatter: %s" % name)
|
||||
f"Unexpected test content between license and frontmatter: {name}")
|
||||
|
||||
# Remove the license and YAML parts from the actual test content.
|
||||
test = src
|
||||
@ -319,7 +316,7 @@ def parse_test_record(src, name, onerror=print):
|
||||
|
||||
# Report if the license block is missing in non-generated tests.
|
||||
if header is None and "generated" not in test_record and "hashbang" not in name:
|
||||
onerror("No license found in: %s" % name)
|
||||
onerror(f"No license found in: {name}")
|
||||
|
||||
return test_record
|
||||
|
||||
@ -339,36 +336,37 @@ def report_error(error_string):
|
||||
|
||||
|
||||
def build_options():
|
||||
result = optparse.OptionParser()
|
||||
result.add_option("--command", default=None,
|
||||
help="The command-line to run")
|
||||
result.add_option("--tests", default=path.abspath('.'),
|
||||
help="Path to the tests")
|
||||
result.add_option("--exclude-list", default=None,
|
||||
help="Path to the excludelist.xml file")
|
||||
result.add_option("--cat", default=False, action="store_true",
|
||||
help="Print packaged test code that would be run")
|
||||
result.add_option("--summary", default=False, action="store_true",
|
||||
help="Print summary after running tests")
|
||||
result.add_option("--full-summary", default=False, action="store_true",
|
||||
help="Print summary and test output after running tests")
|
||||
result.add_option("--strict_only", default=False, action="store_true",
|
||||
help="Test only strict mode")
|
||||
result.add_option("--non_strict_only", default=False, action="store_true",
|
||||
help="Test only non-strict mode")
|
||||
result.add_option("--unmarked_default", default="both",
|
||||
help="default mode for tests of unspecified strictness")
|
||||
result.add_option("-j", "--job-count", default=None, action="store", type=int,
|
||||
help="Number of parallel test jobs to run. In case of '0' cpu count is used.")
|
||||
result.add_option("--logname", help="Filename to save stdout to")
|
||||
result.add_option("--loglevel", default="warning",
|
||||
help="sets log level to debug, info, warning, error, or critical")
|
||||
result.add_option("--print-handle", default="print",
|
||||
help="Command to print from console")
|
||||
result.add_option("--list-includes", default=False, action="store_true",
|
||||
help="List includes required by tests")
|
||||
result.add_option("--module-flag", default="-m",
|
||||
help="List includes required by tests")
|
||||
result = argparse.ArgumentParser()
|
||||
result.add_argument("--command", default=None,
|
||||
help="The command-line to run")
|
||||
result.add_argument("--tests", default=path.abspath('.'),
|
||||
help="Path to the tests")
|
||||
result.add_argument("--exclude-list", default=None,
|
||||
help="Path to the excludelist.xml file")
|
||||
result.add_argument("--cat", default=False, action="store_true",
|
||||
help="Print packaged test code that would be run")
|
||||
result.add_argument("--summary", default=False, action="store_true",
|
||||
help="Print summary after running tests")
|
||||
result.add_argument("--full-summary", default=False, action="store_true",
|
||||
help="Print summary and test output after running tests")
|
||||
result.add_argument("--strict_only", default=False, action="store_true",
|
||||
help="Test only strict mode")
|
||||
result.add_argument("--non_strict_only", default=False, action="store_true",
|
||||
help="Test only non-strict mode")
|
||||
result.add_argument("--unmarked_default", default="both",
|
||||
help="default mode for tests of unspecified strictness")
|
||||
result.add_argument("-j", "--job-count", default=None, action="store", type=int,
|
||||
help="Number of parallel test jobs to run. In case of '0' cpu count is used.")
|
||||
result.add_argument("--logname", help="Filename to save stdout to")
|
||||
result.add_argument("--loglevel", default="warning",
|
||||
help="sets log level to debug, info, warning, error, or critical")
|
||||
result.add_argument("--print-handle", default="print",
|
||||
help="Command to print from console")
|
||||
result.add_argument("--list-includes", default=False, action="store_true",
|
||||
help="List includes required by tests")
|
||||
result.add_argument("--module-flag", default="-m",
|
||||
help="List includes required by tests")
|
||||
result.add_argument("test_list", nargs='*', default=None)
|
||||
return result
|
||||
|
||||
|
||||
@ -376,15 +374,15 @@ def validate_options(options):
|
||||
if not options.command:
|
||||
report_error("A --command must be specified.")
|
||||
if not path.exists(options.tests):
|
||||
report_error("Couldn't find test path '%s'" % options.tests)
|
||||
report_error(f"Couldn't find test path '{options.tests}'")
|
||||
|
||||
|
||||
def is_windows():
|
||||
actual_platform = platform.system()
|
||||
return (actual_platform == 'Windows') or (actual_platform == 'Microsoft')
|
||||
return actual_platform in ('Windows', 'Microsoft')
|
||||
|
||||
|
||||
class TempFile(object):
|
||||
class TempFile:
|
||||
|
||||
def __init__(self, suffix="", prefix="tmp", text=False):
|
||||
self.suffix = suffix
|
||||
@ -405,7 +403,7 @@ class TempFile(object):
|
||||
os.write(self.file_desc, string.encode('utf8'))
|
||||
|
||||
def read(self):
|
||||
with open(self.name, "r", newline='') as file_desc:
|
||||
with open(self.name, "r", newline='', encoding='utf8') as file_desc:
|
||||
return file_desc.read()
|
||||
|
||||
def close(self):
|
||||
@ -417,11 +415,11 @@ class TempFile(object):
|
||||
try:
|
||||
self.close()
|
||||
os.unlink(self.name)
|
||||
except OSError as exception:
|
||||
logging.error("Error disposing temp file: %s", str(exception))
|
||||
except OSError as os_error:
|
||||
logging.error("Error disposing temp file: %s", os_error)
|
||||
|
||||
|
||||
class TestResult(object):
|
||||
class TestResult:
|
||||
|
||||
def __init__(self, exit_code, stdout, stderr, case):
|
||||
self.exit_code = exit_code
|
||||
@ -433,37 +431,36 @@ class TestResult(object):
|
||||
name = self.case.get_name()
|
||||
mode = self.case.get_mode()
|
||||
|
||||
if self.exit_code != 0 and self.exit_code != 1:
|
||||
sys.stderr.write(u"===%s failed in %s with negative:%s===\n"
|
||||
% (name, mode, self.case.get_negative_type()))
|
||||
if self.exit_code not in (0, 1):
|
||||
sys.stderr.write(f"==={name} failed in {mode} with negative:{self.case.get_negative_type()}===\n")
|
||||
self.write_output(sys.stderr)
|
||||
|
||||
if self.has_unexpected_outcome():
|
||||
if self.case.is_negative():
|
||||
print("=== %s passed in %s, but was expected to fail ===" % (name, mode))
|
||||
print("--- expected error: %s ---\n" % self.case.get_negative_type())
|
||||
print(f"=== {name} passed in {mode}, but was expected to fail ===")
|
||||
print(f"--- expected error: {self.case.get_negative_type()} ---\n")
|
||||
else:
|
||||
if long_format:
|
||||
print("=== %s failed in %s ===" % (name, mode))
|
||||
print(f"=== {name} failed in {mode} ===")
|
||||
else:
|
||||
print("%s in %s: " % (name, mode))
|
||||
print(f"{name} in {mode}: ")
|
||||
self.write_output(sys.stdout)
|
||||
if long_format:
|
||||
print("===")
|
||||
elif self.case.is_negative():
|
||||
print("%s failed in %s as expected" % (name, mode))
|
||||
print(f"{name} failed in {mode} as expected")
|
||||
else:
|
||||
print("%s passed in %s" % (name, mode))
|
||||
print(f"{name} passed in {mode}")
|
||||
|
||||
def write_output(self, target):
|
||||
out = self.stdout.strip()
|
||||
if out:
|
||||
target.write("--- output --- \n %s" % out)
|
||||
target.write(f"--- output --- \n {out}")
|
||||
error = self.stderr.strip()
|
||||
if error:
|
||||
target.write("--- errors --- \n %s" % error)
|
||||
target.write(f"--- errors --- \n {error}")
|
||||
|
||||
target.write("\n--- exit code: %d ---\n" % self.exit_code)
|
||||
target.write(f"\n--- exit code: {self.exit_code} ---\n")
|
||||
|
||||
def has_failed(self):
|
||||
return self.exit_code != 0
|
||||
@ -486,14 +483,14 @@ class TestResult(object):
|
||||
return self.stdout
|
||||
|
||||
|
||||
class TestCase(object):
|
||||
class TestCase:
|
||||
|
||||
def __init__(self, suite, name, full_path, strict_mode, command_template, module_flag):
|
||||
self.suite = suite
|
||||
self.name = name
|
||||
self.full_path = full_path
|
||||
self.strict_mode = strict_mode
|
||||
with open(self.full_path, "r", newline='') as file_desc:
|
||||
with open(self.full_path, "r", newline='', encoding='utf8') as file_desc:
|
||||
self.contents = file_desc.read()
|
||||
test_record = parse_test_record(self.contents, name)
|
||||
self.test = test_record["test"]
|
||||
@ -602,25 +599,25 @@ class TestCase(object):
|
||||
@staticmethod
|
||||
def execute(command):
|
||||
if is_windows():
|
||||
args = '%s' % command
|
||||
args = f'{command}'
|
||||
else:
|
||||
args = command.split(" ")
|
||||
stdout = TempFile(prefix="test262-out-")
|
||||
stderr = TempFile(prefix="test262-err-")
|
||||
try:
|
||||
logging.info("exec: %s", str(args))
|
||||
process = subprocess.Popen(
|
||||
with subprocess.Popen(
|
||||
args,
|
||||
shell=False,
|
||||
stdout=stdout.file_desc,
|
||||
stderr=stderr.file_desc
|
||||
)
|
||||
timer = threading.Timer(TEST262_CASE_TIMEOUT, process.kill)
|
||||
timer.start()
|
||||
code = process.wait()
|
||||
timer.cancel()
|
||||
out = stdout.read()
|
||||
err = stderr.read()
|
||||
) as process:
|
||||
try:
|
||||
code = process.wait(timeout=TEST262_CASE_TIMEOUT)
|
||||
except subprocess.TimeoutExpired:
|
||||
process.kill()
|
||||
out = stdout.read()
|
||||
err = stderr.read()
|
||||
finally:
|
||||
stdout.dispose()
|
||||
stderr.dispose()
|
||||
@ -666,10 +663,10 @@ class TestCase(object):
|
||||
if 'raw' in flags:
|
||||
if 'noStrict' in flags:
|
||||
raise TypeError("The `raw` flag implies the `noStrict` flag")
|
||||
elif 'onlyStrict' in flags:
|
||||
if 'onlyStrict' in flags:
|
||||
raise TypeError(
|
||||
"The `raw` flag is incompatible with the `onlyStrict` flag")
|
||||
elif self.get_include_list():
|
||||
if self.get_include_list():
|
||||
raise TypeError(
|
||||
"The `raw` flag is incompatible with the `includes` tag")
|
||||
|
||||
@ -683,7 +680,7 @@ def test_case_run_process(case):
|
||||
return case.run()
|
||||
|
||||
|
||||
class ProgressIndicator(object):
|
||||
class ProgressIndicator:
|
||||
|
||||
def __init__(self, count):
|
||||
self.count = count
|
||||
@ -700,18 +697,11 @@ class ProgressIndicator(object):
|
||||
self.succeeded += 1
|
||||
|
||||
|
||||
def make_plural(num):
|
||||
if num == 1:
|
||||
return (num, "")
|
||||
return (num, "s")
|
||||
|
||||
|
||||
def percent_format(partial, total):
|
||||
return "%i test%s (%.1f%%)" % (make_plural(partial) +
|
||||
((100.0 * partial)/total,))
|
||||
return f"{partial} test{'s' if partial > 1 else ''} ({(100.0 * partial)/total:.1f}%)"
|
||||
|
||||
|
||||
class TestSuite(object):
|
||||
class TestSuite:
|
||||
|
||||
def __init__(self, options):
|
||||
self.test_root = path.join(options.tests, 'test')
|
||||
@ -760,7 +750,7 @@ class TestSuite(object):
|
||||
if not name in self.include_cache:
|
||||
static = path.join(self.lib_root, name)
|
||||
if path.exists(static):
|
||||
with open(static) as file_desc:
|
||||
with open(static, encoding='utf8') as file_desc:
|
||||
contents = file_desc.read()
|
||||
contents = re.sub(r'\r\n', '\n', contents)
|
||||
self.include_cache[name] = contents + "\n"
|
||||
@ -815,7 +805,7 @@ class TestSuite(object):
|
||||
count = progress.count
|
||||
succeeded = progress.succeeded
|
||||
failed = progress.failed
|
||||
write(" - Ran %i test%s" % make_plural(count))
|
||||
write(f" - Ran {count} test{'s' if count > 1 else ''}")
|
||||
if progress.failed == 0:
|
||||
write(" - All tests succeeded")
|
||||
else:
|
||||
@ -827,12 +817,12 @@ class TestSuite(object):
|
||||
print("")
|
||||
write("Failed Tests")
|
||||
for result in positive:
|
||||
write(" %s in %s" % (result.case.get_name(), result.case.get_mode()))
|
||||
write(f" {result.case.get_name()} in {result.case.get_mode()}")
|
||||
if negative:
|
||||
print("")
|
||||
write("Expected to fail but passed ---")
|
||||
for result in negative:
|
||||
write(" %s in %s" % (result.case.get_name(), result.case.get_mode()))
|
||||
write(f" {result.case.get_name()} in {result.case.get_mode()}")
|
||||
|
||||
def print_failure_output(self, progress, logfile):
|
||||
for result in progress.failed_tests:
|
||||
@ -849,7 +839,7 @@ class TestSuite(object):
|
||||
report_error("No tests to run")
|
||||
progress = ProgressIndicator(len(cases))
|
||||
if logname:
|
||||
self.logf = open(logname, "w")
|
||||
self.logf = open(logname, "w", encoding='utf8') # pylint: disable=consider-using-with
|
||||
|
||||
if job_count == 1:
|
||||
for case in cases:
|
||||
@ -861,15 +851,11 @@ class TestSuite(object):
|
||||
if job_count == 0:
|
||||
job_count = None # uses multiprocessing.cpu_count()
|
||||
|
||||
pool = multiprocessing.Pool(processes=job_count, initializer=pool_init)
|
||||
try:
|
||||
with multiprocessing.Pool(processes=job_count, initializer=pool_init) as pool:
|
||||
for result in pool.imap(test_case_run_process, cases):
|
||||
if logname:
|
||||
self.write_log(result)
|
||||
progress.has_run(result)
|
||||
except KeyboardInterrupt:
|
||||
pool.terminate()
|
||||
pool.join()
|
||||
|
||||
if print_summary:
|
||||
self.print_summary(progress, logname)
|
||||
@ -887,17 +873,17 @@ class TestSuite(object):
|
||||
if result.has_unexpected_outcome():
|
||||
if result.case.is_negative():
|
||||
self.logf.write(
|
||||
"=== %s passed in %s, but was expected to fail === \n" % (name, mode))
|
||||
self.logf.write("--- expected error: %s ---\n" % result.case.GetNegativeType())
|
||||
f"=== {name} passed in {mode}, but was expected to fail === \n")
|
||||
self.logf.write(f"--- expected error: {result.case.GetNegativeType()} ---\n")
|
||||
result.write_output(self.logf)
|
||||
else:
|
||||
self.logf.write("=== %s failed in %s === \n" % (name, mode))
|
||||
self.logf.write(f"=== {name} failed in {mode} === \n")
|
||||
result.write_output(self.logf)
|
||||
self.logf.write("===\n")
|
||||
elif result.case.is_negative():
|
||||
self.logf.write("%s failed in %s as expected \n" % (name, mode))
|
||||
self.logf.write(f"{name} failed in {mode} as expected \n")
|
||||
else:
|
||||
self.logf.write("%s passed in %s \n" % (name, mode))
|
||||
self.logf.write(f"{name} passed in {mode} \n")
|
||||
|
||||
def print_source(self, tests):
|
||||
cases = self.enumerate_tests(tests, "")
|
||||
@ -917,7 +903,8 @@ class TestSuite(object):
|
||||
def main():
|
||||
code = 0
|
||||
parser = build_options()
|
||||
(options, args) = parser.parse_args()
|
||||
options = parser.parse_args()
|
||||
args = options.test_list
|
||||
validate_options(options)
|
||||
|
||||
test_suite = TestSuite(options)
|
||||
@ -951,5 +938,5 @@ if __name__ == '__main__':
|
||||
try:
|
||||
sys.exit(main())
|
||||
except Test262Error as exception:
|
||||
print("Error: %s" % exception.message)
|
||||
print(f"Error: {exception.message}")
|
||||
sys.exit(1)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright JS Foundation and other contributors, http://js.foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -14,7 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
@ -46,13 +43,13 @@ def set_sighdl_to_reset_timezone(timezone):
|
||||
|
||||
|
||||
def print_test_summary(summary_string, total, passed, failed):
|
||||
print("\n[summary] %s\n" % summary_string)
|
||||
print("TOTAL: %d" % total)
|
||||
print("%sPASS: %d%s" % (TERM_GREEN, passed, TERM_NORMAL))
|
||||
print("%sFAIL: %d%s\n" % (TERM_RED, failed, TERM_NORMAL))
|
||||
print(f"\n[summary] {summary_string}\n")
|
||||
print(f"TOTAL: {total}")
|
||||
print(f"{TERM_GREEN}PASS: {passed}{TERM_NORMAL}")
|
||||
print(f"{TERM_RED}FAIL: {failed}{TERM_NORMAL}\n")
|
||||
|
||||
success_color = TERM_GREEN if passed == total else TERM_RED
|
||||
print("%sSuccess: %d%%%s" % (success_color, passed*100/total, TERM_NORMAL))
|
||||
print(f"{success_color}Success: {passed*100/total}{TERM_NORMAL}")
|
||||
|
||||
|
||||
def print_test_result(tested, total, is_passed, passed_string, test_path, is_snapshot_generation=None):
|
||||
@ -64,7 +61,7 @@ def print_test_result(tested, total, is_passed, passed_string, test_path, is_sna
|
||||
snapshot_string = ' (execute snapshot)'
|
||||
|
||||
color = TERM_GREEN if is_passed else TERM_RED
|
||||
print("[%4d/%4d] %s%s: %s%s%s" % (tested, total, color, passed_string, test_path, snapshot_string, TERM_NORMAL))
|
||||
print(f"[{tested:>4}/{total:>4}] {color}{passed_string}: {test_path}{snapshot_string}{TERM_NORMAL}")
|
||||
|
||||
|
||||
def get_platform_cmd_prefix():
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright JS Foundation and other contributors, http://js.foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
||||
@ -14,8 +14,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
@ -32,7 +30,8 @@ def main():
|
||||
)
|
||||
_ = parser.parse_args()
|
||||
|
||||
with open(os.path.join(settings.PROJECT_DIR, 'jerry-core', 'include', 'jerryscript.h'), 'r') as header:
|
||||
with open(os.path.join(settings.PROJECT_DIR, 'jerry-core', 'include', 'jerryscript.h'), 'r',
|
||||
encoding='utf8') as header:
|
||||
version = {}
|
||||
version_re = re.compile(r'\s*#define\s+JERRY_API_(?P<key>MAJOR|MINOR|PATCH)_VERSION\s+(?P<value>\S+)')
|
||||
for line in header:
|
||||
@ -40,7 +39,7 @@ def main():
|
||||
if match:
|
||||
version[match.group('key')] = match.group('value')
|
||||
|
||||
print('%(MAJOR)s.%(MINOR)s.%(PATCH)s' % version)
|
||||
print(f'{version["MAJOR"]}.{version["MINOR"]}.{version["PATCH"]}')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user