Pārlūkot izejas kodu

🧑‍💻 Use spaces indent for Python

Scott Lahteine 2 gadus atpakaļ
vecāks
revīzija
306e03b03b
28 mainītis faili ar 2222 papildinājumiem un 2218 dzēšanām
  1. 5
    1
      .editorconfig
  2. 112
    112
      Marlin/src/HAL/LPC1768/upload_extra_script.py
  3. 1
    1
      Marlin/src/feature/spindle_laser.h
  4. 10
    10
      buildroot/share/PlatformIO/scripts/SAMD51_grandcentral_m4.py
  5. 87
    87
      buildroot/share/PlatformIO/scripts/chitu_crypt.py
  6. 8
    8
      buildroot/share/PlatformIO/scripts/common-dependencies-post.py
  7. 244
    244
      buildroot/share/PlatformIO/scripts/common-dependencies.py
  8. 194
    194
      buildroot/share/PlatformIO/scripts/configuration.py
  9. 8
    8
      buildroot/share/PlatformIO/scripts/custom_board.py
  10. 42
    42
      buildroot/share/PlatformIO/scripts/download_mks_assets.py
  11. 22
    22
      buildroot/share/PlatformIO/scripts/fix_framework_weakness.py
  12. 49
    49
      buildroot/share/PlatformIO/scripts/generic_create_variant.py
  13. 23
    23
      buildroot/share/PlatformIO/scripts/jgaurora_a5s_a1_with_bootloader.py
  14. 31
    31
      buildroot/share/PlatformIO/scripts/lerdge.py
  15. 41
    41
      buildroot/share/PlatformIO/scripts/marlin.py
  16. 53
    53
      buildroot/share/PlatformIO/scripts/mc-apply.py
  17. 51
    51
      buildroot/share/PlatformIO/scripts/offset_and_rename.py
  18. 13
    13
      buildroot/share/PlatformIO/scripts/openblt.py
  19. 5
    5
      buildroot/share/PlatformIO/scripts/pioutil.py
  20. 120
    120
      buildroot/share/PlatformIO/scripts/preflight-checks.py
  21. 70
    70
      buildroot/share/PlatformIO/scripts/preprocessor.py
  22. 3
    3
      buildroot/share/PlatformIO/scripts/random-bin.py
  23. 385
    385
      buildroot/share/PlatformIO/scripts/schema.py
  24. 242
    242
      buildroot/share/PlatformIO/scripts/signature.py
  25. 31
    31
      buildroot/share/PlatformIO/scripts/simulator.py
  26. 56
    56
      buildroot/share/PlatformIO/scripts/stm32_serialbuffer.py
  27. 314
    314
      buildroot/share/scripts/upload.py
  28. 2
    2
      get_test_targets.py

+ 5
- 1
.editorconfig Parādīt failu

@@ -14,6 +14,10 @@ end_of_line = lf
14 14
 indent_style = space
15 15
 indent_size = 2
16 16
 
17
-[{*.py,*.conf,*.sublime-project}]
17
+[{*.py}]
18
+indent_style = space
19
+indent_size = 4
20
+
21
+[{*.conf,*.sublime-project}]
18 22
 indent_style = tab
19 23
 indent_size = 4

+ 112
- 112
Marlin/src/HAL/LPC1768/upload_extra_script.py Parādīt failu

@@ -9,127 +9,127 @@ from __future__ import print_function
9 9
 import pioutil
10 10
 if pioutil.is_pio_build():
11 11
 
12
-	target_filename = "FIRMWARE.CUR"
13
-	target_drive = "REARM"
12
+    target_filename = "FIRMWARE.CUR"
13
+    target_drive = "REARM"
14 14
 
15
-	import platform
15
+    import platform
16 16
 
17
-	current_OS = platform.system()
18
-	Import("env")
17
+    current_OS = platform.system()
18
+    Import("env")
19 19
 
20
-	def print_error(e):
21
-		print('\nUnable to find destination disk (%s)\n' \
22
-			  'Please select it in platformio.ini using the upload_port keyword ' \
23
-			  '(https://docs.platformio.org/en/latest/projectconf/section_env_upload.html) ' \
24
-			  'or copy the firmware (.pio/build/%s/firmware.bin) manually to the appropriate disk\n' \
25
-			  %(e, env.get('PIOENV')))
20
+    def print_error(e):
21
+        print('\nUnable to find destination disk (%s)\n' \
22
+              'Please select it in platformio.ini using the upload_port keyword ' \
23
+              '(https://docs.platformio.org/en/latest/projectconf/section_env_upload.html) ' \
24
+              'or copy the firmware (.pio/build/%s/firmware.bin) manually to the appropriate disk\n' \
25
+              %(e, env.get('PIOENV')))
26 26
 
27
-	def before_upload(source, target, env):
28
-		try:
29
-			from pathlib import Path
30
-  			#
31
-			# Find a disk for upload
32
-			#
33
-			upload_disk = 'Disk not found'
34
-			target_file_found = False
35
-			target_drive_found = False
36
-			if current_OS == 'Windows':
37
-				#
38
-				# platformio.ini will accept this for a Windows upload port designation: 'upload_port = L:'
39
-				#   Windows - doesn't care about the disk's name, only cares about the drive letter
40
-				import subprocess,string
41
-				from ctypes import windll
42
-				from pathlib import PureWindowsPath
27
+    def before_upload(source, target, env):
28
+        try:
29
+            from pathlib import Path
30
+            #
31
+            # Find a disk for upload
32
+            #
33
+            upload_disk = 'Disk not found'
34
+            target_file_found = False
35
+            target_drive_found = False
36
+            if current_OS == 'Windows':
37
+                #
38
+                # platformio.ini will accept this for a Windows upload port designation: 'upload_port = L:'
39
+                #   Windows - doesn't care about the disk's name, only cares about the drive letter
40
+                import subprocess,string
41
+                from ctypes import windll
42
+                from pathlib import PureWindowsPath
43 43
 
44
-				# getting list of drives
45
-				# https://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python
46
-				drives = []
47
-				bitmask = windll.kernel32.GetLogicalDrives()
48
-				for letter in string.ascii_uppercase:
49
-					if bitmask & 1:
50
-						drives.append(letter)
51
-					bitmask >>= 1
44
+                # getting list of drives
45
+                # https://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python
46
+                drives = []
47
+                bitmask = windll.kernel32.GetLogicalDrives()
48
+                for letter in string.ascii_uppercase:
49
+                    if bitmask & 1:
50
+                        drives.append(letter)
51
+                    bitmask >>= 1
52 52
 
53
-				for drive in drives:
54
-					final_drive_name = drive + ':'
55
-					# print ('disc check: {}'.format(final_drive_name))
56
-					try:
57
-						volume_info = str(subprocess.check_output('cmd /C dir ' + final_drive_name, stderr=subprocess.STDOUT))
58
-					except Exception as e:
59
-						print ('error:{}'.format(e))
60
-						continue
61
-					else:
62
-						if target_drive in volume_info and not target_file_found:  # set upload if not found target file yet
63
-							target_drive_found = True
64
-							upload_disk = PureWindowsPath(final_drive_name)
65
-						if target_filename in volume_info:
66
-							if not target_file_found:
67
-								upload_disk = PureWindowsPath(final_drive_name)
68
-							target_file_found = True
53
+                for drive in drives:
54
+                    final_drive_name = drive + ':'
55
+                    # print ('disc check: {}'.format(final_drive_name))
56
+                    try:
57
+                        volume_info = str(subprocess.check_output('cmd /C dir ' + final_drive_name, stderr=subprocess.STDOUT))
58
+                    except Exception as e:
59
+                        print ('error:{}'.format(e))
60
+                        continue
61
+                    else:
62
+                        if target_drive in volume_info and not target_file_found:  # set upload if not found target file yet
63
+                            target_drive_found = True
64
+                            upload_disk = PureWindowsPath(final_drive_name)
65
+                        if target_filename in volume_info:
66
+                            if not target_file_found:
67
+                                upload_disk = PureWindowsPath(final_drive_name)
68
+                            target_file_found = True
69 69
 
70
-			elif current_OS == 'Linux':
71
-				#
72
-				# platformio.ini will accept this for a Linux upload port designation: 'upload_port = /media/media_name/drive'
73
-				#
74
-				import getpass
75
-				user = getpass.getuser()
76
-				mpath = Path('media', user)
77
-				drives = [ x for x in mpath.iterdir() if x.is_dir() ]
78
-				if target_drive in drives:  # If target drive is found, use it.
79
-					target_drive_found = True
80
-					upload_disk = mpath / target_drive
81
-				else:
82
-					for drive in drives:
83
-						try:
84
-							fpath = mpath / drive
85
-							filenames = [ x.name for x in fpath.iterdir() if x.is_file() ]
86
-						except:
87
-							continue
88
-						else:
89
-							if target_filename in filenames:
90
-								upload_disk = mpath / drive
91
-								target_file_found = True
92
-								break
93
-				#
94
-				# set upload_port to drive if found
95
-				#
70
+            elif current_OS == 'Linux':
71
+                #
72
+                # platformio.ini will accept this for a Linux upload port designation: 'upload_port = /media/media_name/drive'
73
+                #
74
+                import getpass
75
+                user = getpass.getuser()
76
+                mpath = Path('media', user)
77
+                drives = [ x for x in mpath.iterdir() if x.is_dir() ]
78
+                if target_drive in drives:  # If target drive is found, use it.
79
+                    target_drive_found = True
80
+                    upload_disk = mpath / target_drive
81
+                else:
82
+                    for drive in drives:
83
+                        try:
84
+                            fpath = mpath / drive
85
+                            filenames = [ x.name for x in fpath.iterdir() if x.is_file() ]
86
+                        except:
87
+                            continue
88
+                        else:
89
+                            if target_filename in filenames:
90
+                                upload_disk = mpath / drive
91
+                                target_file_found = True
92
+                                break
93
+                #
94
+                # set upload_port to drive if found
95
+                #
96 96
 
97
-				if target_file_found or target_drive_found:
98
-					env.Replace(
99
-						UPLOAD_FLAGS="-P$UPLOAD_PORT"
100
-					)
97
+                if target_file_found or target_drive_found:
98
+                    env.Replace(
99
+                        UPLOAD_FLAGS="-P$UPLOAD_PORT"
100
+                    )
101 101
 
102
-			elif current_OS == 'Darwin':  # MAC
103
-				#
104
-				# platformio.ini will accept this for a OSX upload port designation: 'upload_port = /media/media_name/drive'
105
-				#
106
-				dpath = Path('/Volumes')  # human readable names
107
-				drives = [ x for x in dpath.iterdir() if x.is_dir() ]
108
-				if target_drive in drives and not target_file_found:  # set upload if not found target file yet
109
-					target_drive_found = True
110
-					upload_disk = dpath / target_drive
111
-				for drive in drives:
112
-					try:
113
-						fpath = dpath / drive	# will get an error if the drive is protected
114
-						filenames = [ x.name for x in fpath.iterdir() if x.is_file() ]
115
-					except:
116
-						continue
117
-					else:
118
-						if target_filename in filenames:
119
-							upload_disk = dpath / drive
120
-							target_file_found = True
121
-							break
102
+            elif current_OS == 'Darwin':  # MAC
103
+                #
104
+                # platformio.ini will accept this for a OSX upload port designation: 'upload_port = /media/media_name/drive'
105
+                #
106
+                dpath = Path('/Volumes')  # human readable names
107
+                drives = [ x for x in dpath.iterdir() if x.is_dir() ]
108
+                if target_drive in drives and not target_file_found:  # set upload if not found target file yet
109
+                    target_drive_found = True
110
+                    upload_disk = dpath / target_drive
111
+                for drive in drives:
112
+                    try:
113
+                        fpath = dpath / drive   # will get an error if the drive is protected
114
+                        filenames = [ x.name for x in fpath.iterdir() if x.is_file() ]
115
+                    except:
116
+                        continue
117
+                    else:
118
+                        if target_filename in filenames:
119
+                            upload_disk = dpath / drive
120
+                            target_file_found = True
121
+                            break
122 122
 
123
-			#
124
-			# Set upload_port to drive if found
125
-			#
126
-			if target_file_found or target_drive_found:
127
-				env.Replace(UPLOAD_PORT=str(upload_disk))
128
-				print('\nUpload disk: ', upload_disk, '\n')
129
-			else:
130
-				print_error('Autodetect Error')
123
+            #
124
+            # Set upload_port to drive if found
125
+            #
126
+            if target_file_found or target_drive_found:
127
+                env.Replace(UPLOAD_PORT=str(upload_disk))
128
+                print('\nUpload disk: ', upload_disk, '\n')
129
+            else:
130
+                print_error('Autodetect Error')
131 131
 
132
-		except Exception as e:
133
-			print_error(str(e))
132
+        except Exception as e:
133
+            print_error(str(e))
134 134
 
135
-	env.AddPreAction("upload", before_upload)
135
+    env.AddPreAction("upload", before_upload)

+ 1
- 1
Marlin/src/feature/spindle_laser.h Parādīt failu

@@ -285,7 +285,7 @@ public:
285 285
           if (!menuPower) menuPower = cpwr_to_upwr(SPEED_POWER_STARTUP);
286 286
           power = upower_to_ocr(menuPower);
287 287
           apply_power(power);
288
-        } else 
288
+        } else
289 289
           apply_power(0);
290 290
       }
291 291
 

+ 10
- 10
buildroot/share/PlatformIO/scripts/SAMD51_grandcentral_m4.py Parādīt failu

@@ -4,17 +4,17 @@
4 4
 #
5 5
 import pioutil
6 6
 if pioutil.is_pio_build():
7
-	from os.path import join, isfile
8
-	import shutil
7
+    from os.path import join, isfile
8
+    import shutil
9 9
 
10
-	Import("env")
10
+    Import("env")
11 11
 
12
-	mf = env["MARLIN_FEATURES"]
13
-	rxBuf = mf["RX_BUFFER_SIZE"] if "RX_BUFFER_SIZE" in mf else "0"
14
-	txBuf = mf["TX_BUFFER_SIZE"] if "TX_BUFFER_SIZE" in mf else "0"
12
+    mf = env["MARLIN_FEATURES"]
13
+    rxBuf = mf["RX_BUFFER_SIZE"] if "RX_BUFFER_SIZE" in mf else "0"
14
+    txBuf = mf["TX_BUFFER_SIZE"] if "TX_BUFFER_SIZE" in mf else "0"
15 15
 
16
-	serialBuf = str(max(int(rxBuf), int(txBuf), 350))
16
+    serialBuf = str(max(int(rxBuf), int(txBuf), 350))
17 17
 
18
-	build_flags = env.get('BUILD_FLAGS')
19
-	build_flags.append("-DSERIAL_BUFFER_SIZE=" + serialBuf)
20
-	env.Replace(BUILD_FLAGS=build_flags)
18
+    build_flags = env.get('BUILD_FLAGS')
19
+    build_flags.append("-DSERIAL_BUFFER_SIZE=" + serialBuf)
20
+    env.Replace(BUILD_FLAGS=build_flags)

+ 87
- 87
buildroot/share/PlatformIO/scripts/chitu_crypt.py Parādīt failu

@@ -4,123 +4,123 @@
4 4
 #
5 5
 import pioutil
6 6
 if pioutil.is_pio_build():
7
-	import struct,uuid,marlin
7
+    import struct,uuid,marlin
8 8
 
9
-	board = marlin.env.BoardConfig()
9
+    board = marlin.env.BoardConfig()
10 10
 
11
-	def calculate_crc(contents, seed):
12
-		accumulating_xor_value = seed;
11
+    def calculate_crc(contents, seed):
12
+        accumulating_xor_value = seed;
13 13
 
14
-		for i in range(0, len(contents), 4):
15
-			value = struct.unpack('<I', contents[ i : i + 4])[0]
16
-			accumulating_xor_value = accumulating_xor_value ^ value
17
-		return accumulating_xor_value
14
+        for i in range(0, len(contents), 4):
15
+            value = struct.unpack('<I', contents[ i : i + 4])[0]
16
+            accumulating_xor_value = accumulating_xor_value ^ value
17
+        return accumulating_xor_value
18 18
 
19
-	def xor_block(r0, r1, block_number, block_size, file_key):
20
-		# This is the loop counter
21
-		loop_counter = 0x0
19
+    def xor_block(r0, r1, block_number, block_size, file_key):
20
+        # This is the loop counter
21
+        loop_counter = 0x0
22 22
 
23
-		# This is the key length
24
-		key_length = 0x18
23
+        # This is the key length
24
+        key_length = 0x18
25 25
 
26
-		# This is an initial seed
27
-		xor_seed = 0x4BAD
26
+        # This is an initial seed
27
+        xor_seed = 0x4BAD
28 28
 
29
-		# This is the block counter
30
-		block_number = xor_seed * block_number
29
+        # This is the block counter
30
+        block_number = xor_seed * block_number
31 31
 
32
-		#load the xor key from the file
33
-		r7 =  file_key
32
+        #load the xor key from the file
33
+        r7 =  file_key
34 34
 
35
-		for loop_counter in range(0, block_size):
36
-			# meant to make sure different bits of the key are used.
37
-			xor_seed = int(loop_counter / key_length)
35
+        for loop_counter in range(0, block_size):
36
+            # meant to make sure different bits of the key are used.
37
+            xor_seed = int(loop_counter / key_length)
38 38
 
39
-			# IP is a scratch register / R12
40
-			ip = loop_counter - (key_length * xor_seed)
39
+            # IP is a scratch register / R12
40
+            ip = loop_counter - (key_length * xor_seed)
41 41
 
42
-			# xor_seed = (loop_counter * loop_counter) + block_number
43
-			xor_seed = (loop_counter * loop_counter) + block_number
42
+            # xor_seed = (loop_counter * loop_counter) + block_number
43
+            xor_seed = (loop_counter * loop_counter) + block_number
44 44
 
45
-			# shift the xor_seed left by the bits in IP.
46
-			xor_seed = xor_seed >> ip
45
+            # shift the xor_seed left by the bits in IP.
46
+            xor_seed = xor_seed >> ip
47 47
 
48
-			# load a byte into IP
49
-			ip = r0[loop_counter]
48
+            # load a byte into IP
49
+            ip = r0[loop_counter]
50 50
 
51
-			# XOR the seed with r7
52
-			xor_seed = xor_seed ^ r7
51
+            # XOR the seed with r7
52
+            xor_seed = xor_seed ^ r7
53 53
 
54
-			# and then with IP
55
-			xor_seed = xor_seed ^ ip
54
+            # and then with IP
55
+            xor_seed = xor_seed ^ ip
56 56
 
57
-			#Now store the byte back
58
-			r1[loop_counter] = xor_seed & 0xFF
57
+            #Now store the byte back
58
+            r1[loop_counter] = xor_seed & 0xFF
59 59
 
60
-			#increment the loop_counter
61
-			loop_counter = loop_counter + 1
60
+            #increment the loop_counter
61
+            loop_counter = loop_counter + 1
62 62
 
63
-	def encrypt_file(input, output_file, file_length):
64
-		input_file = bytearray(input.read())
65
-		block_size = 0x800
66
-		key_length = 0x18
63
+    def encrypt_file(input, output_file, file_length):
64
+        input_file = bytearray(input.read())
65
+        block_size = 0x800
66
+        key_length = 0x18
67 67
 
68
-		uid_value = uuid.uuid4()
69
-		file_key = int(uid_value.hex[0:8], 16)
68
+        uid_value = uuid.uuid4()
69
+        file_key = int(uid_value.hex[0:8], 16)
70 70
 
71
-		xor_crc = 0xEF3D4323;
71
+        xor_crc = 0xEF3D4323;
72 72
 
73
-		# the input file is exepcted to be in chunks of 0x800
74
-		# so round the size
75
-		while len(input_file) % block_size != 0:
76
-			input_file.extend(b'0x0')
73
+        # the input file is exepcted to be in chunks of 0x800
74
+        # so round the size
75
+        while len(input_file) % block_size != 0:
76
+            input_file.extend(b'0x0')
77 77
 
78
-		# write the file header
79
-		output_file.write(struct.pack(">I", 0x443D2D3F))
80
-		# encrypt the contents using a known file header key
78
+        # write the file header
79
+        output_file.write(struct.pack(">I", 0x443D2D3F))
80
+        # encrypt the contents using a known file header key
81 81
 
82
-		# write the file_key
83
-		output_file.write(struct.pack("<I", file_key))
82
+        # write the file_key
83
+        output_file.write(struct.pack("<I", file_key))
84 84
 
85
-		#TODO - how to enforce that the firmware aligns to block boundaries?
86
-		block_count = int(len(input_file) / block_size)
87
-		print ("Block Count is ", block_count)
88
-		for block_number in range(0, block_count):
89
-			block_offset = (block_number * block_size)
90
-			block_end = block_offset + block_size
91
-			block_array = bytearray(input_file[block_offset: block_end])
92
-			xor_block(block_array, block_array, block_number, block_size, file_key)
93
-			for n in range (0, block_size):
94
-				input_file[block_offset + n] = block_array[n]
85
+        #TODO - how to enforce that the firmware aligns to block boundaries?
86
+        block_count = int(len(input_file) / block_size)
87
+        print ("Block Count is ", block_count)
88
+        for block_number in range(0, block_count):
89
+            block_offset = (block_number * block_size)
90
+            block_end = block_offset + block_size
91
+            block_array = bytearray(input_file[block_offset: block_end])
92
+            xor_block(block_array, block_array, block_number, block_size, file_key)
93
+            for n in range (0, block_size):
94
+                input_file[block_offset + n] = block_array[n]
95 95
 
96
-			# update the expected CRC value.
97
-			xor_crc = calculate_crc(block_array, xor_crc)
96
+            # update the expected CRC value.
97
+            xor_crc = calculate_crc(block_array, xor_crc)
98 98
 
99
-		# write CRC
100
-		output_file.write(struct.pack("<I", xor_crc))
99
+        # write CRC
100
+        output_file.write(struct.pack("<I", xor_crc))
101 101
 
102
-		# finally, append the encrypted results.
103
-		output_file.write(input_file)
104
-		return
102
+        # finally, append the encrypted results.
103
+        output_file.write(input_file)
104
+        return
105 105
 
106
-	# Encrypt ${PROGNAME}.bin and save it as 'update.cbd'
107
-	def encrypt(source, target, env):
108
-		from pathlib import Path
106
+    # Encrypt ${PROGNAME}.bin and save it as 'update.cbd'
107
+    def encrypt(source, target, env):
108
+        from pathlib import Path
109 109
 
110
-		fwpath = Path(target[0].path)
111
-		fwsize = fwpath.stat().st_size
110
+        fwpath = Path(target[0].path)
111
+        fwsize = fwpath.stat().st_size
112 112
 
113
-		enname = board.get("build.crypt_chitu")
114
-		enpath = Path(target[0].dir.path)
113
+        enname = board.get("build.crypt_chitu")
114
+        enpath = Path(target[0].dir.path)
115 115
 
116
-		fwfile = fwpath.open("rb")
117
-		enfile = (enpath / enname).open("wb")
116
+        fwfile = fwpath.open("rb")
117
+        enfile = (enpath / enname).open("wb")
118 118
 
119
-		print(f"Encrypting {fwpath} to {enname}")
120
-		encrypt_file(fwfile, enfile, fwsize)
121
-		fwfile.close()
122
-		enfile.close()
123
-		fwpath.unlink()
119
+        print(f"Encrypting {fwpath} to {enname}")
120
+        encrypt_file(fwfile, enfile, fwsize)
121
+        fwfile.close()
122
+        enfile.close()
123
+        fwpath.unlink()
124 124
 
125
-	marlin.relocate_firmware("0x08008800")
126
-	marlin.add_post_action(encrypt);
125
+    marlin.relocate_firmware("0x08008800")
126
+    marlin.add_post_action(encrypt);

+ 8
- 8
buildroot/share/PlatformIO/scripts/common-dependencies-post.py Parādīt failu

@@ -4,13 +4,13 @@
4 4
 #
5 5
 import pioutil
6 6
 if pioutil.is_pio_build():
7
-	Import("env", "projenv")
7
+    Import("env", "projenv")
8 8
 
9
-	def apply_board_build_flags():
10
-		if not 'BOARD_CUSTOM_BUILD_FLAGS' in env['MARLIN_FEATURES']:
11
-			return
12
-		projenv.Append(CCFLAGS=env['MARLIN_FEATURES']['BOARD_CUSTOM_BUILD_FLAGS'].split())
9
+    def apply_board_build_flags():
10
+        if not 'BOARD_CUSTOM_BUILD_FLAGS' in env['MARLIN_FEATURES']:
11
+            return
12
+        projenv.Append(CCFLAGS=env['MARLIN_FEATURES']['BOARD_CUSTOM_BUILD_FLAGS'].split())
13 13
 
14
-	# We need to add the board build flags in a post script
15
-	# so the platform build script doesn't overwrite the custom CCFLAGS
16
-	apply_board_build_flags()
14
+    # We need to add the board build flags in a post script
15
+    # so the platform build script doesn't overwrite the custom CCFLAGS
16
+    apply_board_build_flags()

+ 244
- 244
buildroot/share/PlatformIO/scripts/common-dependencies.py Parādīt failu

@@ -5,247 +5,247 @@
5 5
 import pioutil
6 6
 if pioutil.is_pio_build():
7 7
 
8
-	import subprocess,os,re
9
-	Import("env")
10
-
11
-	from platformio.package.meta import PackageSpec
12
-	from platformio.project.config import ProjectConfig
13
-
14
-	verbose = 0
15
-	FEATURE_CONFIG = {}
16
-
17
-	def validate_pio():
18
-		PIO_VERSION_MIN = (6, 0, 1)
19
-		try:
20
-			from platformio import VERSION as PIO_VERSION
21
-			weights = (1000, 100, 1)
22
-			version_min = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION_MIN)])
23
-			version_cur = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION)])
24
-			if version_cur < version_min:
25
-				print()
26
-				print("**************************************************")
27
-				print("******      An update to PlatformIO is      ******")
28
-				print("******  required to build Marlin Firmware.  ******")
29
-				print("******                                      ******")
30
-				print("******      Minimum version: ", PIO_VERSION_MIN, "    ******")
31
-				print("******      Current Version: ", PIO_VERSION, "    ******")
32
-				print("******                                      ******")
33
-				print("******   Update PlatformIO and try again.   ******")
34
-				print("**************************************************")
35
-				print()
36
-				exit(1)
37
-		except SystemExit:
38
-			exit(1)
39
-		except:
40
-			print("Can't detect PlatformIO Version")
41
-
42
-	def blab(str,level=1):
43
-		if verbose >= level:
44
-			print("[deps] %s" % str)
45
-
46
-	def add_to_feat_cnf(feature, flines):
47
-
48
-		try:
49
-			feat = FEATURE_CONFIG[feature]
50
-		except:
51
-			FEATURE_CONFIG[feature] = {}
52
-
53
-		# Get a reference to the FEATURE_CONFIG under construction
54
-		feat = FEATURE_CONFIG[feature]
55
-
56
-		# Split up passed lines on commas or newlines and iterate
57
-		# Add common options to the features config under construction
58
-		# For lib_deps replace a previous instance of the same library
59
-		atoms = re.sub(r',\s*', '\n', flines).strip().split('\n')
60
-		for line in atoms:
61
-			parts = line.split('=')
62
-			name = parts.pop(0)
63
-			if name in ['build_flags', 'extra_scripts', 'src_filter', 'lib_ignore']:
64
-				feat[name] = '='.join(parts)
65
-				blab("[%s] %s=%s" % (feature, name, feat[name]), 3)
66
-			else:
67
-				for dep in re.split(r',\s*', line):
68
-					lib_name = re.sub(r'@([~^]|[<>]=?)?[\d.]+', '', dep.strip()).split('=').pop(0)
69
-					lib_re = re.compile('(?!^' + lib_name + '\\b)')
70
-					feat['lib_deps'] = list(filter(lib_re.match, feat['lib_deps'])) + [dep]
71
-					blab("[%s] lib_deps = %s" % (feature, dep), 3)
72
-
73
-	def load_features():
74
-		blab("========== Gather [features] entries...")
75
-		for key in ProjectConfig().items('features'):
76
-			feature = key[0].upper()
77
-			if not feature in FEATURE_CONFIG:
78
-				FEATURE_CONFIG[feature] = { 'lib_deps': [] }
79
-			add_to_feat_cnf(feature, key[1])
80
-
81
-		# Add options matching custom_marlin.MY_OPTION to the pile
82
-		blab("========== Gather custom_marlin entries...")
83
-		for n in env.GetProjectOptions():
84
-			key = n[0]
85
-			mat = re.match(r'custom_marlin\.(.+)', key)
86
-			if mat:
87
-				try:
88
-					val = env.GetProjectOption(key)
89
-				except:
90
-					val = None
91
-				if val:
92
-					opt = mat[1].upper()
93
-					blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val ))
94
-					add_to_feat_cnf(opt, val)
95
-
96
-	def get_all_known_libs():
97
-		known_libs = []
98
-		for feature in FEATURE_CONFIG:
99
-			feat = FEATURE_CONFIG[feature]
100
-			if not 'lib_deps' in feat:
101
-				continue
102
-			for dep in feat['lib_deps']:
103
-				known_libs.append(PackageSpec(dep).name)
104
-		return known_libs
105
-
106
-	def get_all_env_libs():
107
-		env_libs = []
108
-		lib_deps = env.GetProjectOption('lib_deps')
109
-		for dep in lib_deps:
110
-			env_libs.append(PackageSpec(dep).name)
111
-		return env_libs
112
-
113
-	def set_env_field(field, value):
114
-		proj = env.GetProjectConfig()
115
-		proj.set("env:" + env['PIOENV'], field, value)
116
-
117
-	# All unused libs should be ignored so that if a library
118
-	# exists in .pio/lib_deps it will not break compilation.
119
-	def force_ignore_unused_libs():
120
-		env_libs = get_all_env_libs()
121
-		known_libs = get_all_known_libs()
122
-		diff = (list(set(known_libs) - set(env_libs)))
123
-		lib_ignore = env.GetProjectOption('lib_ignore') + diff
124
-		blab("Ignore libraries: %s" % lib_ignore)
125
-		set_env_field('lib_ignore', lib_ignore)
126
-
127
-	def apply_features_config():
128
-		load_features()
129
-		blab("========== Apply enabled features...")
130
-		for feature in FEATURE_CONFIG:
131
-			if not env.MarlinHas(feature):
132
-				continue
133
-
134
-			feat = FEATURE_CONFIG[feature]
135
-
136
-			if 'lib_deps' in feat and len(feat['lib_deps']):
137
-				blab("========== Adding lib_deps for %s... " % feature, 2)
138
-
139
-				# feat to add
140
-				deps_to_add = {}
141
-				for dep in feat['lib_deps']:
142
-					deps_to_add[PackageSpec(dep).name] = dep
143
-					blab("==================== %s... " % dep, 2)
144
-
145
-				# Does the env already have the dependency?
146
-				deps = env.GetProjectOption('lib_deps')
147
-				for dep in deps:
148
-					name = PackageSpec(dep).name
149
-					if name in deps_to_add:
150
-						del deps_to_add[name]
151
-
152
-				# Are there any libraries that should be ignored?
153
-				lib_ignore = env.GetProjectOption('lib_ignore')
154
-				for dep in deps:
155
-					name = PackageSpec(dep).name
156
-					if name in deps_to_add:
157
-						del deps_to_add[name]
158
-
159
-				# Is there anything left?
160
-				if len(deps_to_add) > 0:
161
-					# Only add the missing dependencies
162
-					set_env_field('lib_deps', deps + list(deps_to_add.values()))
163
-
164
-			if 'build_flags' in feat:
165
-				f = feat['build_flags']
166
-				blab("========== Adding build_flags for %s: %s" % (feature, f), 2)
167
-				new_flags = env.GetProjectOption('build_flags') + [ f ]
168
-				env.Replace(BUILD_FLAGS=new_flags)
169
-
170
-			if 'extra_scripts' in feat:
171
-				blab("Running extra_scripts for %s... " % feature, 2)
172
-				env.SConscript(feat['extra_scripts'], exports="env")
173
-
174
-			if 'src_filter' in feat:
175
-				blab("========== Adding build_src_filter for %s... " % feature, 2)
176
-				src_filter = ' '.join(env.GetProjectOption('src_filter'))
177
-				# first we need to remove the references to the same folder
178
-				my_srcs = re.findall(r'[+-](<.*?>)', feat['src_filter'])
179
-				cur_srcs = re.findall(r'[+-](<.*?>)', src_filter)
180
-				for d in my_srcs:
181
-					if d in cur_srcs:
182
-						src_filter = re.sub(r'[+-]' + d, '', src_filter)
183
-
184
-				src_filter = feat['src_filter'] + ' ' + src_filter
185
-				set_env_field('build_src_filter', [src_filter])
186
-				env.Replace(SRC_FILTER=src_filter)
187
-
188
-			if 'lib_ignore' in feat:
189
-				blab("========== Adding lib_ignore for %s... " % feature, 2)
190
-				lib_ignore = env.GetProjectOption('lib_ignore') + [feat['lib_ignore']]
191
-				set_env_field('lib_ignore', lib_ignore)
192
-
193
-	#
194
-	# Use the compiler to get a list of all enabled features
195
-	#
196
-	def load_marlin_features():
197
-		if 'MARLIN_FEATURES' in env:
198
-			return
199
-
200
-		# Process defines
201
-		from preprocessor import run_preprocessor
202
-		define_list = run_preprocessor(env)
203
-		marlin_features = {}
204
-		for define in define_list:
205
-			feature = define[8:].strip().decode().split(' ')
206
-			feature, definition = feature[0], ' '.join(feature[1:])
207
-			marlin_features[feature] = definition
208
-		env['MARLIN_FEATURES'] = marlin_features
209
-
210
-	#
211
-	# Return True if a matching feature is enabled
212
-	#
213
-	def MarlinHas(env, feature):
214
-		load_marlin_features()
215
-		r = re.compile('^' + feature + '$')
216
-		found = list(filter(r.match, env['MARLIN_FEATURES']))
217
-
218
-		# Defines could still be 'false' or '0', so check
219
-		some_on = False
220
-		if len(found):
221
-			for f in found:
222
-				val = env['MARLIN_FEATURES'][f]
223
-				if val in [ '', '1', 'true' ]:
224
-					some_on = True
225
-				elif val in env['MARLIN_FEATURES']:
226
-					some_on = env.MarlinHas(val)
227
-
228
-		return some_on
229
-
230
-	validate_pio()
231
-
232
-	try:
233
-		verbose = int(env.GetProjectOption('custom_verbose'))
234
-	except:
235
-		pass
236
-
237
-	#
238
-	# Add a method for other PIO scripts to query enabled features
239
-	#
240
-	env.AddMethod(MarlinHas)
241
-
242
-	#
243
-	# Add dependencies for enabled Marlin features
244
-	#
245
-	apply_features_config()
246
-	force_ignore_unused_libs()
247
-
248
-	#print(env.Dump())
249
-
250
-	from signature import compute_build_signature
251
-	compute_build_signature(env)
8
+    import subprocess,os,re
9
+    Import("env")
10
+
11
+    from platformio.package.meta import PackageSpec
12
+    from platformio.project.config import ProjectConfig
13
+
14
+    verbose = 0
15
+    FEATURE_CONFIG = {}
16
+
17
+    def validate_pio():
18
+        PIO_VERSION_MIN = (6, 0, 1)
19
+        try:
20
+            from platformio import VERSION as PIO_VERSION
21
+            weights = (1000, 100, 1)
22
+            version_min = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION_MIN)])
23
+            version_cur = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION)])
24
+            if version_cur < version_min:
25
+                print()
26
+                print("**************************************************")
27
+                print("******      An update to PlatformIO is      ******")
28
+                print("******  required to build Marlin Firmware.  ******")
29
+                print("******                                      ******")
30
+                print("******      Minimum version: ", PIO_VERSION_MIN, "    ******")
31
+                print("******      Current Version: ", PIO_VERSION, "    ******")
32
+                print("******                                      ******")
33
+                print("******   Update PlatformIO and try again.   ******")
34
+                print("**************************************************")
35
+                print()
36
+                exit(1)
37
+        except SystemExit:
38
+            exit(1)
39
+        except:
40
+            print("Can't detect PlatformIO Version")
41
+
42
+    def blab(str,level=1):
43
+        if verbose >= level:
44
+            print("[deps] %s" % str)
45
+
46
+    def add_to_feat_cnf(feature, flines):
47
+
48
+        try:
49
+            feat = FEATURE_CONFIG[feature]
50
+        except:
51
+            FEATURE_CONFIG[feature] = {}
52
+
53
+        # Get a reference to the FEATURE_CONFIG under construction
54
+        feat = FEATURE_CONFIG[feature]
55
+
56
+        # Split up passed lines on commas or newlines and iterate
57
+        # Add common options to the features config under construction
58
+        # For lib_deps replace a previous instance of the same library
59
+        atoms = re.sub(r',\s*', '\n', flines).strip().split('\n')
60
+        for line in atoms:
61
+            parts = line.split('=')
62
+            name = parts.pop(0)
63
+            if name in ['build_flags', 'extra_scripts', 'src_filter', 'lib_ignore']:
64
+                feat[name] = '='.join(parts)
65
+                blab("[%s] %s=%s" % (feature, name, feat[name]), 3)
66
+            else:
67
+                for dep in re.split(r',\s*', line):
68
+                    lib_name = re.sub(r'@([~^]|[<>]=?)?[\d.]+', '', dep.strip()).split('=').pop(0)
69
+                    lib_re = re.compile('(?!^' + lib_name + '\\b)')
70
+                    feat['lib_deps'] = list(filter(lib_re.match, feat['lib_deps'])) + [dep]
71
+                    blab("[%s] lib_deps = %s" % (feature, dep), 3)
72
+
73
+    def load_features():
74
+        blab("========== Gather [features] entries...")
75
+        for key in ProjectConfig().items('features'):
76
+            feature = key[0].upper()
77
+            if not feature in FEATURE_CONFIG:
78
+                FEATURE_CONFIG[feature] = { 'lib_deps': [] }
79
+            add_to_feat_cnf(feature, key[1])
80
+
81
+        # Add options matching custom_marlin.MY_OPTION to the pile
82
+        blab("========== Gather custom_marlin entries...")
83
+        for n in env.GetProjectOptions():
84
+            key = n[0]
85
+            mat = re.match(r'custom_marlin\.(.+)', key)
86
+            if mat:
87
+                try:
88
+                    val = env.GetProjectOption(key)
89
+                except:
90
+                    val = None
91
+                if val:
92
+                    opt = mat[1].upper()
93
+                    blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val ))
94
+                    add_to_feat_cnf(opt, val)
95
+
96
+    def get_all_known_libs():
97
+        known_libs = []
98
+        for feature in FEATURE_CONFIG:
99
+            feat = FEATURE_CONFIG[feature]
100
+            if not 'lib_deps' in feat:
101
+                continue
102
+            for dep in feat['lib_deps']:
103
+                known_libs.append(PackageSpec(dep).name)
104
+        return known_libs
105
+
106
+    def get_all_env_libs():
107
+        env_libs = []
108
+        lib_deps = env.GetProjectOption('lib_deps')
109
+        for dep in lib_deps:
110
+            env_libs.append(PackageSpec(dep).name)
111
+        return env_libs
112
+
113
+    def set_env_field(field, value):
114
+        proj = env.GetProjectConfig()
115
+        proj.set("env:" + env['PIOENV'], field, value)
116
+
117
+    # All unused libs should be ignored so that if a library
118
+    # exists in .pio/lib_deps it will not break compilation.
119
+    def force_ignore_unused_libs():
120
+        env_libs = get_all_env_libs()
121
+        known_libs = get_all_known_libs()
122
+        diff = (list(set(known_libs) - set(env_libs)))
123
+        lib_ignore = env.GetProjectOption('lib_ignore') + diff
124
+        blab("Ignore libraries: %s" % lib_ignore)
125
+        set_env_field('lib_ignore', lib_ignore)
126
+
127
+    def apply_features_config():
128
+        load_features()
129
+        blab("========== Apply enabled features...")
130
+        for feature in FEATURE_CONFIG:
131
+            if not env.MarlinHas(feature):
132
+                continue
133
+
134
+            feat = FEATURE_CONFIG[feature]
135
+
136
+            if 'lib_deps' in feat and len(feat['lib_deps']):
137
+                blab("========== Adding lib_deps for %s... " % feature, 2)
138
+
139
+                # feat to add
140
+                deps_to_add = {}
141
+                for dep in feat['lib_deps']:
142
+                    deps_to_add[PackageSpec(dep).name] = dep
143
+                    blab("==================== %s... " % dep, 2)
144
+
145
+                # Does the env already have the dependency?
146
+                deps = env.GetProjectOption('lib_deps')
147
+                for dep in deps:
148
+                    name = PackageSpec(dep).name
149
+                    if name in deps_to_add:
150
+                        del deps_to_add[name]
151
+
152
+                # Are there any libraries that should be ignored?
153
+                lib_ignore = env.GetProjectOption('lib_ignore')
154
+                for dep in deps:
155
+                    name = PackageSpec(dep).name
156
+                    if name in deps_to_add:
157
+                        del deps_to_add[name]
158
+
159
+                # Is there anything left?
160
+                if len(deps_to_add) > 0:
161
+                    # Only add the missing dependencies
162
+                    set_env_field('lib_deps', deps + list(deps_to_add.values()))
163
+
164
+            if 'build_flags' in feat:
165
+                f = feat['build_flags']
166
+                blab("========== Adding build_flags for %s: %s" % (feature, f), 2)
167
+                new_flags = env.GetProjectOption('build_flags') + [ f ]
168
+                env.Replace(BUILD_FLAGS=new_flags)
169
+
170
+            if 'extra_scripts' in feat:
171
+                blab("Running extra_scripts for %s... " % feature, 2)
172
+                env.SConscript(feat['extra_scripts'], exports="env")
173
+
174
+            if 'src_filter' in feat:
175
+                blab("========== Adding build_src_filter for %s... " % feature, 2)
176
+                src_filter = ' '.join(env.GetProjectOption('src_filter'))
177
+                # first we need to remove the references to the same folder
178
+                my_srcs = re.findall(r'[+-](<.*?>)', feat['src_filter'])
179
+                cur_srcs = re.findall(r'[+-](<.*?>)', src_filter)
180
+                for d in my_srcs:
181
+                    if d in cur_srcs:
182
+                        src_filter = re.sub(r'[+-]' + d, '', src_filter)
183
+
184
+                src_filter = feat['src_filter'] + ' ' + src_filter
185
+                set_env_field('build_src_filter', [src_filter])
186
+                env.Replace(SRC_FILTER=src_filter)
187
+
188
+            if 'lib_ignore' in feat:
189
+                blab("========== Adding lib_ignore for %s... " % feature, 2)
190
+                lib_ignore = env.GetProjectOption('lib_ignore') + [feat['lib_ignore']]
191
+                set_env_field('lib_ignore', lib_ignore)
192
+
193
+    #
194
+    # Use the compiler to get a list of all enabled features
195
+    #
196
+    def load_marlin_features():
197
+        if 'MARLIN_FEATURES' in env:
198
+            return
199
+
200
+        # Process defines
201
+        from preprocessor import run_preprocessor
202
+        define_list = run_preprocessor(env)
203
+        marlin_features = {}
204
+        for define in define_list:
205
+            feature = define[8:].strip().decode().split(' ')
206
+            feature, definition = feature[0], ' '.join(feature[1:])
207
+            marlin_features[feature] = definition
208
+        env['MARLIN_FEATURES'] = marlin_features
209
+
210
+    #
211
+    # Return True if a matching feature is enabled
212
+    #
213
+    def MarlinHas(env, feature):
214
+        load_marlin_features()
215
+        r = re.compile('^' + feature + '$')
216
+        found = list(filter(r.match, env['MARLIN_FEATURES']))
217
+
218
+        # Defines could still be 'false' or '0', so check
219
+        some_on = False
220
+        if len(found):
221
+            for f in found:
222
+                val = env['MARLIN_FEATURES'][f]
223
+                if val in [ '', '1', 'true' ]:
224
+                    some_on = True
225
+                elif val in env['MARLIN_FEATURES']:
226
+                    some_on = env.MarlinHas(val)
227
+
228
+        return some_on
229
+
230
+    validate_pio()
231
+
232
+    try:
233
+        verbose = int(env.GetProjectOption('custom_verbose'))
234
+    except:
235
+        pass
236
+
237
+    #
238
+    # Add a method for other PIO scripts to query enabled features
239
+    #
240
+    env.AddMethod(MarlinHas)
241
+
242
+    #
243
+    # Add dependencies for enabled Marlin features
244
+    #
245
+    apply_features_config()
246
+    force_ignore_unused_libs()
247
+
248
+    #print(env.Dump())
249
+
250
+    from signature import compute_build_signature
251
+    compute_build_signature(env)

+ 194
- 194
buildroot/share/PlatformIO/scripts/configuration.py Parādīt failu

@@ -7,229 +7,229 @@ from pathlib import Path
7 7
 
8 8
 verbose = 0
9 9
 def blab(str,level=1):
10
-	if verbose >= level: print(f"[config] {str}")
10
+    if verbose >= level: print(f"[config] {str}")
11 11
 
12 12
 def config_path(cpath):
13
-	return Path("Marlin", cpath)
13
+    return Path("Marlin", cpath)
14 14
 
15 15
 # Apply a single name = on/off ; name = value ; etc.
16 16
 # TODO: Limit to the given (optional) configuration
17 17
 def apply_opt(name, val, conf=None):
18
-	if name == "lcd": name, val = val, "on"
19
-
20
-	# Create a regex to match the option and capture parts of the line
21
-	regex = re.compile(rf'^(\s*)(//\s*)?(#define\s+)({name}\b)(\s*)(.*?)(\s*)(//.*)?$', re.IGNORECASE)
22
-
23
-	# Find and enable and/or update all matches
24
-	for file in ("Configuration.h", "Configuration_adv.h"):
25
-		fullpath = config_path(file)
26
-		lines = fullpath.read_text().split('\n')
27
-		found = False
28
-		for i in range(len(lines)):
29
-			line = lines[i]
30
-			match = regex.match(line)
31
-			if match and match[4].upper() == name.upper():
32
-				found = True
33
-				# For boolean options un/comment the define
34
-				if val in ("on", "", None):
35
-					newline = re.sub(r'^(\s*)//+\s*(#define)(\s{1,3})?(\s*)', r'\1\2 \4', line)
36
-				elif val == "off":
37
-					newline = re.sub(r'^(\s*)(#define)(\s{1,3})?(\s*)', r'\1//\2 \4', line)
38
-				else:
39
-					# For options with values, enable and set the value
40
-					newline = match[1] + match[3] + match[4] + match[5] + val
41
-					if match[8]:
42
-						sp = match[7] if match[7] else ' '
43
-						newline += sp + match[8]
44
-				lines[i] = newline
45
-				blab(f"Set {name} to {val}")
46
-
47
-		# If the option was found, write the modified lines
48
-		if found:
49
-			fullpath.write_text('\n'.join(lines))
50
-			break
51
-
52
-	# If the option didn't appear in either config file, add it
53
-	if not found:
54
-		# OFF options are added as disabled items so they appear
55
-		# in config dumps. Useful for custom settings.
56
-		prefix = ""
57
-		if val == "off":
58
-			prefix, val = "//", ""	# Item doesn't appear in config dump
59
-			#val = "false"			# Item appears in config dump
60
-
61
-		# Uppercase the option unless already mixed/uppercase
62
-		added = name.upper() if name.islower() else name
63
-
64
-		# Add the provided value after the name
65
-		if val != "on" and val != "" and val is not None:
66
-			added += " " + val
67
-
68
-		# Prepend the new option after the first set of #define lines
69
-		fullpath = config_path("Configuration.h")
70
-		with fullpath.open() as f:
71
-			lines = f.readlines()
72
-			linenum = 0
73
-			gotdef = False
74
-			for line in lines:
75
-				isdef = line.startswith("#define")
76
-				if not gotdef:
77
-					gotdef = isdef
78
-				elif not isdef:
79
-					break
80
-				linenum += 1
81
-			lines.insert(linenum, f"{prefix}#define {added} // Added by config.ini\n")
82
-			fullpath.write_text('\n'.join(lines))
18
+    if name == "lcd": name, val = val, "on"
19
+
20
+    # Create a regex to match the option and capture parts of the line
21
+    regex = re.compile(rf'^(\s*)(//\s*)?(#define\s+)({name}\b)(\s*)(.*?)(\s*)(//.*)?$', re.IGNORECASE)
22
+
23
+    # Find and enable and/or update all matches
24
+    for file in ("Configuration.h", "Configuration_adv.h"):
25
+        fullpath = config_path(file)
26
+        lines = fullpath.read_text().split('\n')
27
+        found = False
28
+        for i in range(len(lines)):
29
+            line = lines[i]
30
+            match = regex.match(line)
31
+            if match and match[4].upper() == name.upper():
32
+                found = True
33
+                # For boolean options un/comment the define
34
+                if val in ("on", "", None):
35
+                    newline = re.sub(r'^(\s*)//+\s*(#define)(\s{1,3})?(\s*)', r'\1\2 \4', line)
36
+                elif val == "off":
37
+                    newline = re.sub(r'^(\s*)(#define)(\s{1,3})?(\s*)', r'\1//\2 \4', line)
38
+                else:
39
+                    # For options with values, enable and set the value
40
+                    newline = match[1] + match[3] + match[4] + match[5] + val
41
+                    if match[8]:
42
+                        sp = match[7] if match[7] else ' '
43
+                        newline += sp + match[8]
44
+                lines[i] = newline
45
+                blab(f"Set {name} to {val}")
46
+
47
+        # If the option was found, write the modified lines
48
+        if found:
49
+            fullpath.write_text('\n'.join(lines))
50
+            break
51
+
52
+    # If the option didn't appear in either config file, add it
53
+    if not found:
54
+        # OFF options are added as disabled items so they appear
55
+        # in config dumps. Useful for custom settings.
56
+        prefix = ""
57
+        if val == "off":
58
+            prefix, val = "//", ""  # Item doesn't appear in config dump
59
+            #val = "false"          # Item appears in config dump
60
+
61
+        # Uppercase the option unless already mixed/uppercase
62
+        added = name.upper() if name.islower() else name
63
+
64
+        # Add the provided value after the name
65
+        if val != "on" and val != "" and val is not None:
66
+            added += " " + val
67
+
68
+        # Prepend the new option after the first set of #define lines
69
+        fullpath = config_path("Configuration.h")
70
+        with fullpath.open() as f:
71
+            lines = f.readlines()
72
+            linenum = 0
73
+            gotdef = False
74
+            for line in lines:
75
+                isdef = line.startswith("#define")
76
+                if not gotdef:
77
+                    gotdef = isdef
78
+                elif not isdef:
79
+                    break
80
+                linenum += 1
81
+            lines.insert(linenum, f"{prefix}#define {added} // Added by config.ini\n")
82
+            fullpath.write_text('\n'.join(lines))
83 83
 
84 84
 # Fetch configuration files from GitHub given the path.
85 85
 # Return True if any files were fetched.
86 86
 def fetch_example(url):
87
-	if url.endswith("/"): url = url[:-1]
88
-	if url.startswith('http'):
89
-		url = url.replace("%", "%25").replace(" ", "%20")
90
-	else:
91
-		brch = "bugfix-2.1.x"
92
-		if '@' in path: path, brch = map(str.strip, path.split('@'))
93
-		url = f"https://raw.githubusercontent.com/MarlinFirmware/Configurations/{brch}/config/{url}"
94
-
95
-	# Find a suitable fetch command
96
-	if shutil.which("curl") is not None:
97
-		fetch = "curl -L -s -S -f -o"
98
-	elif shutil.which("wget") is not None:
99
-		fetch = "wget -q -O"
100
-	else:
101
-		blab("Couldn't find curl or wget", -1)
102
-		return False
103
-
104
-	import os
105
-
106
-	# Reset configurations to default
107
-	os.system("git reset --hard HEAD")
108
-
109
-	# Try to fetch the remote files
110
-	gotfile = False
111
-	for fn in ("Configuration.h", "Configuration_adv.h", "_Bootscreen.h", "_Statusscreen.h"):
112
-		if os.system(f"{fetch} wgot {url}/{fn} >/dev/null 2>&1") == 0:
113
-			shutil.move('wgot', config_path(fn))
114
-			gotfile = True
115
-
116
-	if Path('wgot').exists(): shutil.rmtree('wgot')
117
-
118
-	return gotfile
87
+    if url.endswith("/"): url = url[:-1]
88
+    if url.startswith('http'):
89
+        url = url.replace("%", "%25").replace(" ", "%20")
90
+    else:
91
+        brch = "bugfix-2.1.x"
92
+        if '@' in path: path, brch = map(str.strip, path.split('@'))
93
+        url = f"https://raw.githubusercontent.com/MarlinFirmware/Configurations/{brch}/config/{url}"
94
+
95
+    # Find a suitable fetch command
96
+    if shutil.which("curl") is not None:
97
+        fetch = "curl -L -s -S -f -o"
98
+    elif shutil.which("wget") is not None:
99
+        fetch = "wget -q -O"
100
+    else:
101
+        blab("Couldn't find curl or wget", -1)
102
+        return False
103
+
104
+    import os
105
+
106
+    # Reset configurations to default
107
+    os.system("git reset --hard HEAD")
108
+
109
+    # Try to fetch the remote files
110
+    gotfile = False
111
+    for fn in ("Configuration.h", "Configuration_adv.h", "_Bootscreen.h", "_Statusscreen.h"):
112
+        if os.system(f"{fetch} wgot {url}/{fn} >/dev/null 2>&1") == 0:
113
+            shutil.move('wgot', config_path(fn))
114
+            gotfile = True
115
+
116
+    if Path('wgot').exists(): shutil.rmtree('wgot')
117
+
118
+    return gotfile
119 119
 
120 120
 def section_items(cp, sectkey):
121
-	return cp.items(sectkey) if sectkey in cp.sections() else []
121
+    return cp.items(sectkey) if sectkey in cp.sections() else []
122 122
 
123 123
 # Apply all items from a config section
124 124
 def apply_ini_by_name(cp, sect):
125
-	iniok = True
126
-	if sect in ('config:base', 'config:root'):
127
-		iniok = False
128
-		items = section_items(cp, 'config:base') + section_items(cp, 'config:root')
129
-	else:
130
-		items = cp.items(sect)
125
+    iniok = True
126
+    if sect in ('config:base', 'config:root'):
127
+        iniok = False
128
+        items = section_items(cp, 'config:base') + section_items(cp, 'config:root')
129
+    else:
130
+        items = cp.items(sect)
131 131
 
132
-	for item in items:
133
-		if iniok or not item[0].startswith('ini_'):
134
-			apply_opt(item[0], item[1])
132
+    for item in items:
133
+        if iniok or not item[0].startswith('ini_'):
134
+            apply_opt(item[0], item[1])
135 135
 
136 136
 # Apply all config sections from a parsed file
137 137
 def apply_all_sections(cp):
138
-	for sect in cp.sections():
139
-		if sect.startswith('config:'):
140
-			apply_ini_by_name(cp, sect)
138
+    for sect in cp.sections():
139
+        if sect.startswith('config:'):
140
+            apply_ini_by_name(cp, sect)
141 141
 
142 142
 # Apply certain config sections from a parsed file
143 143
 def apply_sections(cp, ckey='all'):
144
-	blab(f"Apply section key: {ckey}")
145
-	if ckey == 'all':
146
-		apply_all_sections(cp)
147
-	else:
148
-		# Apply the base/root config.ini settings after external files are done
149
-		if ckey in ('base', 'root'):
150
-			apply_ini_by_name(cp, 'config:base')
151
-
152
-		# Apply historically 'Configuration.h' settings everywhere
153
-		if ckey == 'basic':
154
-			apply_ini_by_name(cp, 'config:basic')
155
-
156
-		# Apply historically Configuration_adv.h settings everywhere
157
-		# (Some of which rely on defines in 'Conditionals_LCD.h')
158
-		elif ckey in ('adv', 'advanced'):
159
-			apply_ini_by_name(cp, 'config:advanced')
160
-
161
-		# Apply a specific config:<name> section directly
162
-		elif ckey.startswith('config:'):
163
-			apply_ini_by_name(cp, ckey)
144
+    blab(f"Apply section key: {ckey}")
145
+    if ckey == 'all':
146
+        apply_all_sections(cp)
147
+    else:
148
+        # Apply the base/root config.ini settings after external files are done
149
+        if ckey in ('base', 'root'):
150
+            apply_ini_by_name(cp, 'config:base')
151
+
152
+        # Apply historically 'Configuration.h' settings everywhere
153
+        if ckey == 'basic':
154
+            apply_ini_by_name(cp, 'config:basic')
155
+
156
+        # Apply historically Configuration_adv.h settings everywhere
157
+        # (Some of which rely on defines in 'Conditionals_LCD.h')
158
+        elif ckey in ('adv', 'advanced'):
159
+            apply_ini_by_name(cp, 'config:advanced')
160
+
161
+        # Apply a specific config:<name> section directly
162
+        elif ckey.startswith('config:'):
163
+            apply_ini_by_name(cp, ckey)
164 164
 
165 165
 # Apply settings from a top level config.ini
166 166
 def apply_config_ini(cp):
167
-	blab("=" * 20 + " Gather 'config.ini' entries...")
168
-
169
-	# Pre-scan for ini_use_config to get config_keys
170
-	base_items = section_items(cp, 'config:base') + section_items(cp, 'config:root')
171
-	config_keys = ['base']
172
-	for ikey, ival in base_items:
173
-		if ikey == 'ini_use_config':
174
-			config_keys = map(str.strip, ival.split(','))
175
-
176
-	# For each ini_use_config item perform an action
177
-	for ckey in config_keys:
178
-		addbase = False
179
-
180
-		# For a key ending in .ini load and parse another .ini file
181
-		if ckey.endswith('.ini'):
182
-			sect = 'base'
183
-			if '@' in ckey: sect, ckey = ckey.split('@')
184
-			other_ini = configparser.ConfigParser()
185
-			other_ini.read(config_path(ckey))
186
-			apply_sections(other_ini, sect)
187
-
188
-		# (Allow 'example/' as a shortcut for 'examples/')
189
-		elif ckey.startswith('example/'):
190
-			ckey = 'examples' + ckey[7:]
191
-
192
-		# For 'examples/<path>' fetch an example set from GitHub.
193
-		# For https?:// do a direct fetch of the URL.
194
-		elif ckey.startswith('examples/') or ckey.startswith('http'):
195
-			fetch_example(ckey)
196
-			ckey = 'base'
197
-
198
-		# Apply keyed sections after external files are done
199
-		apply_sections(cp, 'config:' + ckey)
167
+    blab("=" * 20 + " Gather 'config.ini' entries...")
168
+
169
+    # Pre-scan for ini_use_config to get config_keys
170
+    base_items = section_items(cp, 'config:base') + section_items(cp, 'config:root')
171
+    config_keys = ['base']
172
+    for ikey, ival in base_items:
173
+        if ikey == 'ini_use_config':
174
+            config_keys = map(str.strip, ival.split(','))
175
+
176
+    # For each ini_use_config item perform an action
177
+    for ckey in config_keys:
178
+        addbase = False
179
+
180
+        # For a key ending in .ini load and parse another .ini file
181
+        if ckey.endswith('.ini'):
182
+            sect = 'base'
183
+            if '@' in ckey: sect, ckey = ckey.split('@')
184
+            other_ini = configparser.ConfigParser()
185
+            other_ini.read(config_path(ckey))
186
+            apply_sections(other_ini, sect)
187
+
188
+        # (Allow 'example/' as a shortcut for 'examples/')
189
+        elif ckey.startswith('example/'):
190
+            ckey = 'examples' + ckey[7:]
191
+
192
+        # For 'examples/<path>' fetch an example set from GitHub.
193
+        # For https?:// do a direct fetch of the URL.
194
+        elif ckey.startswith('examples/') or ckey.startswith('http'):
195
+            fetch_example(ckey)
196
+            ckey = 'base'
197
+
198
+        # Apply keyed sections after external files are done
199
+        apply_sections(cp, 'config:' + ckey)
200 200
 
201 201
 if __name__ == "__main__":
202
-	#
203
-	# From command line use the given file name
204
-	#
205
-	import sys
206
-	args = sys.argv[1:]
207
-	if len(args) > 0:
208
-		if args[0].endswith('.ini'):
209
-			ini_file = args[0]
210
-		else:
211
-			print("Usage: %s <.ini file>" % sys.argv[0])
212
-	else:
213
-		ini_file = config_path('config.ini')
214
-
215
-	if ini_file:
216
-		user_ini = configparser.ConfigParser()
217
-		user_ini.read(ini_file)
218
-		apply_config_ini(user_ini)
202
+    #
203
+    # From command line use the given file name
204
+    #
205
+    import sys
206
+    args = sys.argv[1:]
207
+    if len(args) > 0:
208
+        if args[0].endswith('.ini'):
209
+            ini_file = args[0]
210
+        else:
211
+            print("Usage: %s <.ini file>" % sys.argv[0])
212
+    else:
213
+        ini_file = config_path('config.ini')
214
+
215
+    if ini_file:
216
+        user_ini = configparser.ConfigParser()
217
+        user_ini.read(ini_file)
218
+        apply_config_ini(user_ini)
219 219
 
220 220
 else:
221
-	#
222
-	# From within PlatformIO use the loaded INI file
223
-	#
224
-	import pioutil
225
-	if pioutil.is_pio_build():
221
+    #
222
+    # From within PlatformIO use the loaded INI file
223
+    #
224
+    import pioutil
225
+    if pioutil.is_pio_build():
226 226
 
227
-		Import("env")
227
+        Import("env")
228 228
 
229
-		try:
230
-			verbose = int(env.GetProjectOption('custom_verbose'))
231
-		except:
232
-			pass
229
+        try:
230
+            verbose = int(env.GetProjectOption('custom_verbose'))
231
+        except:
232
+            pass
233 233
 
234
-		from platformio.project.config import ProjectConfig
235
-		apply_config_ini(ProjectConfig())
234
+        from platformio.project.config import ProjectConfig
235
+        apply_config_ini(ProjectConfig())

+ 8
- 8
buildroot/share/PlatformIO/scripts/custom_board.py Parādīt failu

@@ -6,13 +6,13 @@
6 6
 #
7 7
 import pioutil
8 8
 if pioutil.is_pio_build():
9
-	import marlin
10
-	board = marlin.env.BoardConfig()
9
+    import marlin
10
+    board = marlin.env.BoardConfig()
11 11
 
12
-	address = board.get("build.address", "")
13
-	if address:
14
-		marlin.relocate_firmware(address)
12
+    address = board.get("build.address", "")
13
+    if address:
14
+        marlin.relocate_firmware(address)
15 15
 
16
-	ldscript = board.get("build.ldscript", "")
17
-	if ldscript:
18
-		marlin.custom_ld_script(ldscript)
16
+    ldscript = board.get("build.ldscript", "")
17
+    if ldscript:
18
+        marlin.custom_ld_script(ldscript)

+ 42
- 42
buildroot/share/PlatformIO/scripts/download_mks_assets.py Parādīt failu

@@ -4,50 +4,50 @@
4 4
 #
5 5
 import pioutil
6 6
 if pioutil.is_pio_build():
7
-	Import("env")
8
-	import requests,zipfile,tempfile,shutil
9
-	from pathlib import Path
7
+    Import("env")
8
+    import requests,zipfile,tempfile,shutil
9
+    from pathlib import Path
10 10
 
11
-	url = "https://github.com/makerbase-mks/Mks-Robin-Nano-Marlin2.0-Firmware/archive/0263cdaccf.zip"
12
-	deps_path = Path(env.Dictionary("PROJECT_LIBDEPS_DIR"))
13
-	zip_path = deps_path / "mks-assets.zip"
14
-	assets_path = Path(env.Dictionary("PROJECT_BUILD_DIR"), env.Dictionary("PIOENV"), "assets")
11
+    url = "https://github.com/makerbase-mks/Mks-Robin-Nano-Marlin2.0-Firmware/archive/0263cdaccf.zip"
12
+    deps_path = Path(env.Dictionary("PROJECT_LIBDEPS_DIR"))
13
+    zip_path = deps_path / "mks-assets.zip"
14
+    assets_path = Path(env.Dictionary("PROJECT_BUILD_DIR"), env.Dictionary("PIOENV"), "assets")
15 15
 
16
-	def download_mks_assets():
17
-		print("Downloading MKS Assets")
18
-		r = requests.get(url, stream=True)
19
-		# the user may have a very clean workspace,
20
-		# so create the PROJECT_LIBDEPS_DIR directory if not exits
21
-		if not deps_path.exists():
22
-			deps_path.mkdir()
23
-		with zip_path.open('wb') as fd:
24
-			for chunk in r.iter_content(chunk_size=128):
25
-				fd.write(chunk)
16
+    def download_mks_assets():
17
+        print("Downloading MKS Assets")
18
+        r = requests.get(url, stream=True)
19
+        # the user may have a very clean workspace,
20
+        # so create the PROJECT_LIBDEPS_DIR directory if not exits
21
+        if not deps_path.exists():
22
+            deps_path.mkdir()
23
+        with zip_path.open('wb') as fd:
24
+            for chunk in r.iter_content(chunk_size=128):
25
+                fd.write(chunk)
26 26
 
27
-	def copy_mks_assets():
28
-		print("Copying MKS Assets")
29
-		output_path = Path(tempfile.mkdtemp())
30
-		zip_obj = zipfile.ZipFile(zip_path, 'r')
31
-		zip_obj.extractall(output_path)
32
-		zip_obj.close()
33
-		if assets_path.exists() and not assets_path.is_dir():
34
-			assets_path.unlink()
35
-		if not assets_path.exists():
36
-			assets_path.mkdir()
37
-		base_path = ''
38
-		for filename in output_path.iterdir():
39
-			base_path = filename
40
-		fw_path = (output_path / base_path / 'Firmware')
41
-		font_path = fw_path / 'mks_font'
42
-		for filename in font_path.iterdir():
43
-			shutil.copy(font_path / filename, assets_path)
44
-		pic_path = fw_path / 'mks_pic'
45
-		for filename in pic_path.iterdir():
46
-			shutil.copy(pic_path / filename, assets_path)
47
-		shutil.rmtree(output_path, ignore_errors=True)
27
+    def copy_mks_assets():
28
+        print("Copying MKS Assets")
29
+        output_path = Path(tempfile.mkdtemp())
30
+        zip_obj = zipfile.ZipFile(zip_path, 'r')
31
+        zip_obj.extractall(output_path)
32
+        zip_obj.close()
33
+        if assets_path.exists() and not assets_path.is_dir():
34
+            assets_path.unlink()
35
+        if not assets_path.exists():
36
+            assets_path.mkdir()
37
+        base_path = ''
38
+        for filename in output_path.iterdir():
39
+            base_path = filename
40
+        fw_path = (output_path / base_path / 'Firmware')
41
+        font_path = fw_path / 'mks_font'
42
+        for filename in font_path.iterdir():
43
+            shutil.copy(font_path / filename, assets_path)
44
+        pic_path = fw_path / 'mks_pic'
45
+        for filename in pic_path.iterdir():
46
+            shutil.copy(pic_path / filename, assets_path)
47
+        shutil.rmtree(output_path, ignore_errors=True)
48 48
 
49
-	if not zip_path.exists():
50
-		download_mks_assets()
49
+    if not zip_path.exists():
50
+        download_mks_assets()
51 51
 
52
-	if not assets_path.exists():
53
-		copy_mks_assets()
52
+    if not assets_path.exists():
53
+        copy_mks_assets()

+ 22
- 22
buildroot/share/PlatformIO/scripts/fix_framework_weakness.py Parādīt failu

@@ -4,32 +4,32 @@
4 4
 import pioutil
5 5
 if pioutil.is_pio_build():
6 6
 
7
-	import shutil
8
-	from os.path import join, isfile
9
-	from pprint import pprint
7
+    import shutil
8
+    from os.path import join, isfile
9
+    from pprint import pprint
10 10
 
11
-	Import("env")
11
+    Import("env")
12 12
 
13
-	if env.MarlinHas("POSTMORTEM_DEBUGGING"):
14
-		FRAMEWORK_DIR = env.PioPlatform().get_package_dir("framework-arduinoststm32-maple")
15
-		patchflag_path = join(FRAMEWORK_DIR, ".exc-patching-done")
13
+    if env.MarlinHas("POSTMORTEM_DEBUGGING"):
14
+        FRAMEWORK_DIR = env.PioPlatform().get_package_dir("framework-arduinoststm32-maple")
15
+        patchflag_path = join(FRAMEWORK_DIR, ".exc-patching-done")
16 16
 
17
-		# patch file only if we didn't do it before
18
-		if not isfile(patchflag_path):
19
-			print("Patching libmaple exception handlers")
20
-			original_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S")
21
-			backup_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S.bak")
22
-			src_file = join("buildroot", "share", "PlatformIO", "scripts", "exc.S")
17
+        # patch file only if we didn't do it before
18
+        if not isfile(patchflag_path):
19
+            print("Patching libmaple exception handlers")
20
+            original_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S")
21
+            backup_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S.bak")
22
+            src_file = join("buildroot", "share", "PlatformIO", "scripts", "exc.S")
23 23
 
24
-			assert isfile(original_file) and isfile(src_file)
25
-			shutil.copyfile(original_file, backup_file)
26
-			shutil.copyfile(src_file, original_file);
24
+            assert isfile(original_file) and isfile(src_file)
25
+            shutil.copyfile(original_file, backup_file)
26
+            shutil.copyfile(src_file, original_file);
27 27
 
28
-			def _touch(path):
29
-				with open(path, "w") as fp:
30
-					fp.write("")
28
+            def _touch(path):
29
+                with open(path, "w") as fp:
30
+                    fp.write("")
31 31
 
32
-			env.Execute(lambda *args, **kwargs: _touch(patchflag_path))
33
-			print("Done patching exception handler")
32
+            env.Execute(lambda *args, **kwargs: _touch(patchflag_path))
33
+            print("Done patching exception handler")
34 34
 
35
-		print("Libmaple modified and ready for post mortem debugging")
35
+        print("Libmaple modified and ready for post mortem debugging")

+ 49
- 49
buildroot/share/PlatformIO/scripts/generic_create_variant.py Parādīt failu

@@ -7,52 +7,52 @@
7 7
 #
8 8
 import pioutil
9 9
 if pioutil.is_pio_build():
10
-	import shutil,marlin
11
-	from pathlib import Path
12
-
13
-	#
14
-	# Get the platform name from the 'platform_packages' option,
15
-	# or look it up by the platform.class.name.
16
-	#
17
-	env = marlin.env
18
-	platform = env.PioPlatform()
19
-
20
-	from platformio.package.meta import PackageSpec
21
-	platform_packages = env.GetProjectOption('platform_packages')
22
-
23
-	# Remove all tool items from platform_packages
24
-	platform_packages = [x for x in platform_packages if not x.startswith("platformio/tool-")]
25
-
26
-	if len(platform_packages) == 0:
27
-		framewords = {
28
-			"Ststm32Platform": "framework-arduinoststm32",
29
-			"AtmelavrPlatform": "framework-arduino-avr"
30
-		}
31
-		platform_name = framewords[platform.__class__.__name__]
32
-	else:
33
-		platform_name = PackageSpec(platform_packages[0]).name
34
-
35
-	if platform_name in [ "usb-host-msc", "usb-host-msc-cdc-msc", "usb-host-msc-cdc-msc-2", "usb-host-msc-cdc-msc-3", "tool-stm32duino", "biqu-bx-workaround", "main" ]:
36
-		platform_name = "framework-arduinoststm32"
37
-
38
-	FRAMEWORK_DIR = Path(platform.get_package_dir(platform_name))
39
-	assert FRAMEWORK_DIR.is_dir()
40
-
41
-	board = env.BoardConfig()
42
-
43
-	#mcu_type = board.get("build.mcu")[:-2]
44
-	variant = board.get("build.variant")
45
-	#series = mcu_type[:7].upper() + "xx"
46
-
47
-	# Prepare a new empty folder at the destination
48
-	variant_dir = FRAMEWORK_DIR / "variants" / variant
49
-	if variant_dir.is_dir():
50
-		shutil.rmtree(variant_dir)
51
-	if not variant_dir.is_dir():
52
-		variant_dir.mkdir()
53
-
54
-	# Source dir is a local variant sub-folder
55
-	source_dir = Path("buildroot/share/PlatformIO/variants", variant)
56
-	assert source_dir.is_dir()
57
-
58
-	marlin.copytree(source_dir, variant_dir)
10
+    import shutil,marlin
11
+    from pathlib import Path
12
+
13
+    #
14
+    # Get the platform name from the 'platform_packages' option,
15
+    # or look it up by the platform.class.name.
16
+    #
17
+    env = marlin.env
18
+    platform = env.PioPlatform()
19
+
20
+    from platformio.package.meta import PackageSpec
21
+    platform_packages = env.GetProjectOption('platform_packages')
22
+
23
+    # Remove all tool items from platform_packages
24
+    platform_packages = [x for x in platform_packages if not x.startswith("platformio/tool-")]
25
+
26
+    if len(platform_packages) == 0:
27
+        framewords = {
28
+            "Ststm32Platform": "framework-arduinoststm32",
29
+            "AtmelavrPlatform": "framework-arduino-avr"
30
+        }
31
+        platform_name = framewords[platform.__class__.__name__]
32
+    else:
33
+        platform_name = PackageSpec(platform_packages[0]).name
34
+
35
+    if platform_name in [ "usb-host-msc", "usb-host-msc-cdc-msc", "usb-host-msc-cdc-msc-2", "usb-host-msc-cdc-msc-3", "tool-stm32duino", "biqu-bx-workaround", "main" ]:
36
+        platform_name = "framework-arduinoststm32"
37
+
38
+    FRAMEWORK_DIR = Path(platform.get_package_dir(platform_name))
39
+    assert FRAMEWORK_DIR.is_dir()
40
+
41
+    board = env.BoardConfig()
42
+
43
+    #mcu_type = board.get("build.mcu")[:-2]
44
+    variant = board.get("build.variant")
45
+    #series = mcu_type[:7].upper() + "xx"
46
+
47
+    # Prepare a new empty folder at the destination
48
+    variant_dir = FRAMEWORK_DIR / "variants" / variant
49
+    if variant_dir.is_dir():
50
+        shutil.rmtree(variant_dir)
51
+    if not variant_dir.is_dir():
52
+        variant_dir.mkdir()
53
+
54
+    # Source dir is a local variant sub-folder
55
+    source_dir = Path("buildroot/share/PlatformIO/variants", variant)
56
+    assert source_dir.is_dir()
57
+
58
+    marlin.copytree(source_dir, variant_dir)

+ 23
- 23
buildroot/share/PlatformIO/scripts/jgaurora_a5s_a1_with_bootloader.py Parādīt failu

@@ -5,31 +5,31 @@
5 5
 import pioutil
6 6
 if pioutil.is_pio_build():
7 7
 
8
-	# Append ${PROGNAME}.bin firmware after bootloader and save it as 'jgaurora_firmware.bin'
9
-	def addboot(source, target, env):
10
-		from pathlib import Path
8
+    # Append ${PROGNAME}.bin firmware after bootloader and save it as 'jgaurora_firmware.bin'
9
+    def addboot(source, target, env):
10
+        from pathlib import Path
11 11
 
12
-		fw_path = Path(target[0].path)
13
-		fwb_path = fw_path.parent / 'firmware_with_bootloader.bin'
14
-		with fwb_path.open("wb") as fwb_file:
15
-			bl_path = Path("buildroot/share/PlatformIO/scripts/jgaurora_bootloader.bin")
16
-			bl_file = bl_path.open("rb")
17
-			while True:
18
-				b = bl_file.read(1)
19
-				if b == b'': break
20
-				else: fwb_file.write(b)
12
+        fw_path = Path(target[0].path)
13
+        fwb_path = fw_path.parent / 'firmware_with_bootloader.bin'
14
+        with fwb_path.open("wb") as fwb_file:
15
+            bl_path = Path("buildroot/share/PlatformIO/scripts/jgaurora_bootloader.bin")
16
+            bl_file = bl_path.open("rb")
17
+            while True:
18
+                b = bl_file.read(1)
19
+                if b == b'': break
20
+                else: fwb_file.write(b)
21 21
 
22
-			with fw_path.open("rb") as fw_file:
23
-				while True:
24
-					b = fw_file.read(1)
25
-					if b == b'': break
26
-					else: fwb_file.write(b)
22
+            with fw_path.open("rb") as fw_file:
23
+                while True:
24
+                    b = fw_file.read(1)
25
+                    if b == b'': break
26
+                    else: fwb_file.write(b)
27 27
 
28
-		fws_path = Path(target[0].dir.path, 'firmware_for_sd_upload.bin')
29
-		if fws_path.exists():
30
-			fws_path.unlink()
28
+        fws_path = Path(target[0].dir.path, 'firmware_for_sd_upload.bin')
29
+        if fws_path.exists():
30
+            fws_path.unlink()
31 31
 
32
-		fw_path.rename(fws_path)
32
+        fw_path.rename(fws_path)
33 33
 
34
-	import marlin
35
-	marlin.add_post_action(addboot);
34
+    import marlin
35
+    marlin.add_post_action(addboot);

+ 31
- 31
buildroot/share/PlatformIO/scripts/lerdge.py Parādīt failu

@@ -7,41 +7,41 @@
7 7
 #
8 8
 import pioutil
9 9
 if pioutil.is_pio_build():
10
-	import os,marlin
10
+    import os,marlin
11 11
 
12
-	board = marlin.env.BoardConfig()
12
+    board = marlin.env.BoardConfig()
13 13
 
14
-	def encryptByte(byte):
15
-		byte = 0xFF & ((byte << 6) | (byte >> 2))
16
-		i = 0x58 + byte
17
-		j = 0x05 + byte + (i >> 8)
18
-		byte = (0xF8 & i) | (0x07 & j)
19
-		return byte
14
+    def encryptByte(byte):
15
+        byte = 0xFF & ((byte << 6) | (byte >> 2))
16
+        i = 0x58 + byte
17
+        j = 0x05 + byte + (i >> 8)
18
+        byte = (0xF8 & i) | (0x07 & j)
19
+        return byte
20 20
 
21
-	def encrypt_file(input, output_file, file_length):
22
-		input_file = bytearray(input.read())
23
-		for i in range(len(input_file)):
24
-			input_file[i] = encryptByte(input_file[i])
25
-		output_file.write(input_file)
21
+    def encrypt_file(input, output_file, file_length):
22
+        input_file = bytearray(input.read())
23
+        for i in range(len(input_file)):
24
+            input_file[i] = encryptByte(input_file[i])
25
+        output_file.write(input_file)
26 26
 
27
-	# Encrypt ${PROGNAME}.bin and save it with the name given in build.crypt_lerdge
28
-	def encrypt(source, target, env):
29
-		fwpath = target[0].path
30
-		enname = board.get("build.crypt_lerdge")
31
-		print("Encrypting %s to %s" % (fwpath, enname))
32
-		fwfile = open(fwpath, "rb")
33
-		enfile = open(target[0].dir.path + "/" + enname, "wb")
34
-		length = os.path.getsize(fwpath)
27
+    # Encrypt ${PROGNAME}.bin and save it with the name given in build.crypt_lerdge
28
+    def encrypt(source, target, env):
29
+        fwpath = target[0].path
30
+        enname = board.get("build.crypt_lerdge")
31
+        print("Encrypting %s to %s" % (fwpath, enname))
32
+        fwfile = open(fwpath, "rb")
33
+        enfile = open(target[0].dir.path + "/" + enname, "wb")
34
+        length = os.path.getsize(fwpath)
35 35
 
36
-		encrypt_file(fwfile, enfile, length)
36
+        encrypt_file(fwfile, enfile, length)
37 37
 
38
-		fwfile.close()
39
-		enfile.close()
40
-		os.remove(fwpath)
38
+        fwfile.close()
39
+        enfile.close()
40
+        os.remove(fwpath)
41 41
 
42
-	if 'crypt_lerdge' in board.get("build").keys():
43
-		if board.get("build.crypt_lerdge") != "":
44
-			marlin.add_post_action(encrypt)
45
-	else:
46
-		print("LERDGE builds require output file via board_build.crypt_lerdge = 'filename' parameter")
47
-		exit(1)
42
+    if 'crypt_lerdge' in board.get("build").keys():
43
+        if board.get("build.crypt_lerdge") != "":
44
+            marlin.add_post_action(encrypt)
45
+    else:
46
+        print("LERDGE builds require output file via board_build.crypt_lerdge = 'filename' parameter")
47
+        exit(1)

+ 41
- 41
buildroot/share/PlatformIO/scripts/marlin.py Parādīt failu

@@ -9,64 +9,64 @@ from SCons.Script import DefaultEnvironment
9 9
 env = DefaultEnvironment()
10 10
 
11 11
 def copytree(src, dst, symlinks=False, ignore=None):
12
-	for item in src.iterdir():
13
-		if item.is_dir():
14
-			shutil.copytree(item, dst / item.name, symlinks, ignore)
15
-		else:
16
-			shutil.copy2(item, dst / item.name)
12
+    for item in src.iterdir():
13
+        if item.is_dir():
14
+            shutil.copytree(item, dst / item.name, symlinks, ignore)
15
+        else:
16
+            shutil.copy2(item, dst / item.name)
17 17
 
18 18
 def replace_define(field, value):
19
-	for define in env['CPPDEFINES']:
20
-		if define[0] == field:
21
-			env['CPPDEFINES'].remove(define)
22
-	env['CPPDEFINES'].append((field, value))
19
+    for define in env['CPPDEFINES']:
20
+        if define[0] == field:
21
+            env['CPPDEFINES'].remove(define)
22
+    env['CPPDEFINES'].append((field, value))
23 23
 
24 24
 # Relocate the firmware to a new address, such as "0x08005000"
25 25
 def relocate_firmware(address):
26
-	replace_define("VECT_TAB_ADDR", address)
26
+    replace_define("VECT_TAB_ADDR", address)
27 27
 
28 28
 # Relocate the vector table with a new offset
29 29
 def relocate_vtab(address):
30
-	replace_define("VECT_TAB_OFFSET", address)
30
+    replace_define("VECT_TAB_OFFSET", address)
31 31
 
32 32
 # Replace the existing -Wl,-T with the given ldscript path
33 33
 def custom_ld_script(ldname):
34
-	apath = str(Path("buildroot/share/PlatformIO/ldscripts", ldname).resolve())
35
-	for i, flag in enumerate(env["LINKFLAGS"]):
36
-		if "-Wl,-T" in flag:
37
-			env["LINKFLAGS"][i] = "-Wl,-T" + apath
38
-		elif flag == "-T":
39
-			env["LINKFLAGS"][i + 1] = apath
34
+    apath = str(Path("buildroot/share/PlatformIO/ldscripts", ldname).resolve())
35
+    for i, flag in enumerate(env["LINKFLAGS"]):
36
+        if "-Wl,-T" in flag:
37
+            env["LINKFLAGS"][i] = "-Wl,-T" + apath
38
+        elif flag == "-T":
39
+            env["LINKFLAGS"][i + 1] = apath
40 40
 
41 41
 # Encrypt ${PROGNAME}.bin and save it with a new name. This applies (mostly) to MKS boards
42 42
 # This PostAction is set up by offset_and_rename.py for envs with 'build.encrypt_mks'.
43 43
 def encrypt_mks(source, target, env, new_name):
44
-	import sys
44
+    import sys
45 45
 
46
-	key = [0xA3, 0xBD, 0xAD, 0x0D, 0x41, 0x11, 0xBB, 0x8D, 0xDC, 0x80, 0x2D, 0xD0, 0xD2, 0xC4, 0x9B, 0x1E, 0x26, 0xEB, 0xE3, 0x33, 0x4A, 0x15, 0xE4, 0x0A, 0xB3, 0xB1, 0x3C, 0x93, 0xBB, 0xAF, 0xF7, 0x3E]
46
+    key = [0xA3, 0xBD, 0xAD, 0x0D, 0x41, 0x11, 0xBB, 0x8D, 0xDC, 0x80, 0x2D, 0xD0, 0xD2, 0xC4, 0x9B, 0x1E, 0x26, 0xEB, 0xE3, 0x33, 0x4A, 0x15, 0xE4, 0x0A, 0xB3, 0xB1, 0x3C, 0x93, 0xBB, 0xAF, 0xF7, 0x3E]
47 47
 
48
-	# If FIRMWARE_BIN is defined by config, override all
49
-	mf = env["MARLIN_FEATURES"]
50
-	if "FIRMWARE_BIN" in mf: new_name = mf["FIRMWARE_BIN"]
48
+    # If FIRMWARE_BIN is defined by config, override all
49
+    mf = env["MARLIN_FEATURES"]
50
+    if "FIRMWARE_BIN" in mf: new_name = mf["FIRMWARE_BIN"]
51 51
 
52
-	fwpath = Path(target[0].path)
53
-	fwfile = fwpath.open("rb")
54
-	enfile = Path(target[0].dir.path, new_name).open("wb")
55
-	length = fwpath.stat().st_size
56
-	position = 0
57
-	try:
58
-		while position < length:
59
-			byte = fwfile.read(1)
60
-			if 320 <= position < 31040:
61
-				byte = chr(ord(byte) ^ key[position & 31])
62
-				if sys.version_info[0] > 2:
63
-					byte = bytes(byte, 'latin1')
64
-			enfile.write(byte)
65
-			position += 1
66
-	finally:
67
-		fwfile.close()
68
-		enfile.close()
69
-		fwpath.unlink()
52
+    fwpath = Path(target[0].path)
53
+    fwfile = fwpath.open("rb")
54
+    enfile = Path(target[0].dir.path, new_name).open("wb")
55
+    length = fwpath.stat().st_size
56
+    position = 0
57
+    try:
58
+        while position < length:
59
+            byte = fwfile.read(1)
60
+            if 320 <= position < 31040:
61
+                byte = chr(ord(byte) ^ key[position & 31])
62
+                if sys.version_info[0] > 2:
63
+                    byte = bytes(byte, 'latin1')
64
+            enfile.write(byte)
65
+            position += 1
66
+    finally:
67
+        fwfile.close()
68
+        enfile.close()
69
+        fwpath.unlink()
70 70
 
71 71
 def add_post_action(action):
72
-	env.AddPostAction(str(Path("$BUILD_DIR", "${PROGNAME}.bin")), action);
72
+    env.AddPostAction(str(Path("$BUILD_DIR", "${PROGNAME}.bin")), action);

+ 53
- 53
buildroot/share/PlatformIO/scripts/mc-apply.py Parādīt failu

@@ -11,59 +11,59 @@ opt_output = '--opt' in sys.argv
11 11
 output_suffix = '.sh' if opt_output else '' if '--bare-output' in sys.argv else '.gen'
12 12
 
13 13
 try:
14
-	with open('marlin_config.json', 'r') as infile:
15
-		conf = json.load(infile)
16
-		for key in conf:
17
-			# We don't care about the hash when restoring here
18
-			if key == '__INITIAL_HASH':
19
-				continue
20
-			if key == 'VERSION':
21
-				for k, v in sorted(conf[key].items()):
22
-					print(k + ': ' + v)
23
-				continue
24
-			# The key is the file name, so let's build it now
25
-			outfile = open('Marlin/' + key + output_suffix, 'w')
26
-			for k, v in sorted(conf[key].items()):
27
-				# Make define line now
28
-				if opt_output:
29
-					if v != '':
30
-						if '"' in v:
31
-							v = "'%s'" % v
32
-						elif ' ' in v:
33
-							v = '"%s"' % v
34
-						define = 'opt_set ' + k + ' ' + v + '\n'
35
-					else:
36
-						define = 'opt_enable ' + k + '\n'
37
-				else:
38
-					define = '#define ' + k + ' ' + v + '\n'
39
-				outfile.write(define)
40
-			outfile.close()
14
+    with open('marlin_config.json', 'r') as infile:
15
+        conf = json.load(infile)
16
+        for key in conf:
17
+            # We don't care about the hash when restoring here
18
+            if key == '__INITIAL_HASH':
19
+                continue
20
+            if key == 'VERSION':
21
+                for k, v in sorted(conf[key].items()):
22
+                    print(k + ': ' + v)
23
+                continue
24
+            # The key is the file name, so let's build it now
25
+            outfile = open('Marlin/' + key + output_suffix, 'w')
26
+            for k, v in sorted(conf[key].items()):
27
+                # Make define line now
28
+                if opt_output:
29
+                    if v != '':
30
+                        if '"' in v:
31
+                            v = "'%s'" % v
32
+                        elif ' ' in v:
33
+                            v = '"%s"' % v
34
+                        define = 'opt_set ' + k + ' ' + v + '\n'
35
+                    else:
36
+                        define = 'opt_enable ' + k + '\n'
37
+                else:
38
+                    define = '#define ' + k + ' ' + v + '\n'
39
+                outfile.write(define)
40
+            outfile.close()
41 41
 
42
-			# Try to apply changes to the actual configuration file (in order to keep useful comments)
43
-			if output_suffix != '':
44
-				# Move the existing configuration so it doesn't interfere
45
-				shutil.move('Marlin/' + key, 'Marlin/' + key + '.orig')
46
-				infile_lines = open('Marlin/' + key + '.orig', 'r').read().split('\n')
47
-				outfile = open('Marlin/' + key, 'w')
48
-				for line in infile_lines:
49
-					sline = line.strip(" \t\n\r")
50
-					if sline[:7] == "#define":
51
-						# Extract the key here (we don't care about the value)
52
-						kv = sline[8:].strip().split(' ')
53
-						if kv[0] in conf[key]:
54
-							outfile.write('#define ' + kv[0] + ' ' + conf[key][kv[0]] + '\n')
55
-							# Remove the key from the dict, so we can still write all missing keys at the end of the file
56
-							del conf[key][kv[0]]
57
-						else:
58
-							outfile.write(line + '\n')
59
-					else:
60
-						outfile.write(line + '\n')
61
-				# Process any remaining defines here
62
-				for k, v in sorted(conf[key].items()):
63
-					define = '#define ' + k + ' ' + v + '\n'
64
-					outfile.write(define)
65
-				outfile.close()
42
+            # Try to apply changes to the actual configuration file (in order to keep useful comments)
43
+            if output_suffix != '':
44
+                # Move the existing configuration so it doesn't interfere
45
+                shutil.move('Marlin/' + key, 'Marlin/' + key + '.orig')
46
+                infile_lines = open('Marlin/' + key + '.orig', 'r').read().split('\n')
47
+                outfile = open('Marlin/' + key, 'w')
48
+                for line in infile_lines:
49
+                    sline = line.strip(" \t\n\r")
50
+                    if sline[:7] == "#define":
51
+                        # Extract the key here (we don't care about the value)
52
+                        kv = sline[8:].strip().split(' ')
53
+                        if kv[0] in conf[key]:
54
+                            outfile.write('#define ' + kv[0] + ' ' + conf[key][kv[0]] + '\n')
55
+                            # Remove the key from the dict, so we can still write all missing keys at the end of the file
56
+                            del conf[key][kv[0]]
57
+                        else:
58
+                            outfile.write(line + '\n')
59
+                    else:
60
+                        outfile.write(line + '\n')
61
+                # Process any remaining defines here
62
+                for k, v in sorted(conf[key].items()):
63
+                    define = '#define ' + k + ' ' + v + '\n'
64
+                    outfile.write(define)
65
+                outfile.close()
66 66
 
67
-			print('Output configuration written to: ' + 'Marlin/' + key + output_suffix)
67
+            print('Output configuration written to: ' + 'Marlin/' + key + output_suffix)
68 68
 except:
69
-	print('No marlin_config.json found.')
69
+    print('No marlin_config.json found.')

+ 51
- 51
buildroot/share/PlatformIO/scripts/offset_and_rename.py Parādīt failu

@@ -2,59 +2,59 @@
2 2
 # offset_and_rename.py
3 3
 #
4 4
 # - If 'build.offset' is provided, either by JSON or by the environment...
5
-# 	- Set linker flag LD_FLASH_OFFSET and relocate the VTAB based on 'build.offset'.
6
-# 	- Set linker flag LD_MAX_DATA_SIZE based on 'build.maximum_ram_size'.
7
-# 	- Define STM32_FLASH_SIZE from 'upload.maximum_size' for use by Flash-based EEPROM emulation.
5
+#   - Set linker flag LD_FLASH_OFFSET and relocate the VTAB based on 'build.offset'.
6
+#   - Set linker flag LD_MAX_DATA_SIZE based on 'build.maximum_ram_size'.
7
+#   - Define STM32_FLASH_SIZE from 'upload.maximum_size' for use by Flash-based EEPROM emulation.
8 8
 #
9 9
 # - For 'board_build.rename' add a post-action to rename the firmware file.
10 10
 #
11 11
 import pioutil
12 12
 if pioutil.is_pio_build():
13
-	import sys,marlin
14
-
15
-	env = marlin.env
16
-	board = env.BoardConfig()
17
-	board_keys = board.get("build").keys()
18
-
19
-	#
20
-	# For build.offset define LD_FLASH_OFFSET, used by ldscript.ld
21
-	#
22
-	if 'offset' in board_keys:
23
-		LD_FLASH_OFFSET = board.get("build.offset")
24
-		marlin.relocate_vtab(LD_FLASH_OFFSET)
25
-
26
-		# Flash size
27
-		maximum_flash_size = int(board.get("upload.maximum_size") / 1024)
28
-		marlin.replace_define('STM32_FLASH_SIZE', maximum_flash_size)
29
-
30
-		# Get upload.maximum_ram_size (defined by /buildroot/share/PlatformIO/boards/VARIOUS.json)
31
-		maximum_ram_size = board.get("upload.maximum_ram_size")
32
-
33
-		for i, flag in enumerate(env["LINKFLAGS"]):
34
-			if "-Wl,--defsym=LD_FLASH_OFFSET" in flag:
35
-				env["LINKFLAGS"][i] = "-Wl,--defsym=LD_FLASH_OFFSET=" + LD_FLASH_OFFSET
36
-			if "-Wl,--defsym=LD_MAX_DATA_SIZE" in flag:
37
-				env["LINKFLAGS"][i] = "-Wl,--defsym=LD_MAX_DATA_SIZE=" + str(maximum_ram_size - 40)
38
-
39
-	#
40
-	# For build.encrypt_mks rename and encode the firmware file.
41
-	#
42
-	if 'encrypt_mks' in board_keys:
43
-
44
-		# Encrypt ${PROGNAME}.bin and save it with the name given in build.encrypt_mks
45
-		def encrypt(source, target, env):
46
-			marlin.encrypt_mks(source, target, env, board.get("build.encrypt_mks"))
47
-
48
-		if board.get("build.encrypt_mks") != "":
49
-			marlin.add_post_action(encrypt)
50
-
51
-	#
52
-	# For build.rename simply rename the firmware file.
53
-	#
54
-	if 'rename' in board_keys:
55
-
56
-		def rename_target(source, target, env):
57
-			from pathlib import Path
58
-			Path(target[0].path).replace(Path(target[0].dir.path, board.get("build.rename")))
59
-
60
-		marlin.add_post_action(rename_target)
13
+    import sys,marlin
14
+
15
+    env = marlin.env
16
+    board = env.BoardConfig()
17
+    board_keys = board.get("build").keys()
18
+
19
+    #
20
+    # For build.offset define LD_FLASH_OFFSET, used by ldscript.ld
21
+    #
22
+    if 'offset' in board_keys:
23
+        LD_FLASH_OFFSET = board.get("build.offset")
24
+        marlin.relocate_vtab(LD_FLASH_OFFSET)
25
+
26
+        # Flash size
27
+        maximum_flash_size = int(board.get("upload.maximum_size") / 1024)
28
+        marlin.replace_define('STM32_FLASH_SIZE', maximum_flash_size)
29
+
30
+        # Get upload.maximum_ram_size (defined by /buildroot/share/PlatformIO/boards/VARIOUS.json)
31
+        maximum_ram_size = board.get("upload.maximum_ram_size")
32
+
33
+        for i, flag in enumerate(env["LINKFLAGS"]):
34
+            if "-Wl,--defsym=LD_FLASH_OFFSET" in flag:
35
+                env["LINKFLAGS"][i] = "-Wl,--defsym=LD_FLASH_OFFSET=" + LD_FLASH_OFFSET
36
+            if "-Wl,--defsym=LD_MAX_DATA_SIZE" in flag:
37
+                env["LINKFLAGS"][i] = "-Wl,--defsym=LD_MAX_DATA_SIZE=" + str(maximum_ram_size - 40)
38
+
39
+    #
40
+    # For build.encrypt_mks rename and encode the firmware file.
41
+    #
42
+    if 'encrypt_mks' in board_keys:
43
+
44
+        # Encrypt ${PROGNAME}.bin and save it with the name given in build.encrypt_mks
45
+        def encrypt(source, target, env):
46
+            marlin.encrypt_mks(source, target, env, board.get("build.encrypt_mks"))
47
+
48
+        if board.get("build.encrypt_mks") != "":
49
+            marlin.add_post_action(encrypt)
50
+
51
+    #
52
+    # For build.rename simply rename the firmware file.
53
+    #
54
+    if 'rename' in board_keys:
55
+
56
+        def rename_target(source, target, env):
57
+            from pathlib import Path
58
+            Path(target[0].path).replace(Path(target[0].dir.path, board.get("build.rename")))
59
+
60
+        marlin.add_post_action(rename_target)

+ 13
- 13
buildroot/share/PlatformIO/scripts/openblt.py Parādīt failu

@@ -3,18 +3,18 @@
3 3
 #
4 4
 import pioutil
5 5
 if pioutil.is_pio_build():
6
-	import os,sys
7
-	from os.path import join
6
+    import os,sys
7
+    from os.path import join
8 8
 
9
-	Import("env")
9
+    Import("env")
10 10
 
11
-	board = env.BoardConfig()
12
-	board_keys = board.get("build").keys()
13
-	if 'encode' in board_keys:
14
-		env.AddPostAction(
15
-			join("$BUILD_DIR", "${PROGNAME}.bin"),
16
-			env.VerboseAction(" ".join([
17
-				"$OBJCOPY", "-O", "srec",
18
-				"\"$BUILD_DIR/${PROGNAME}.elf\"", "\"" + join("$BUILD_DIR", board.get("build.encode")) + "\""
19
-			]), "Building " + board.get("build.encode"))
20
-		)
11
+    board = env.BoardConfig()
12
+    board_keys = board.get("build").keys()
13
+    if 'encode' in board_keys:
14
+        env.AddPostAction(
15
+            join("$BUILD_DIR", "${PROGNAME}.bin"),
16
+            env.VerboseAction(" ".join([
17
+                "$OBJCOPY", "-O", "srec",
18
+                "\"$BUILD_DIR/${PROGNAME}.elf\"", "\"" + join("$BUILD_DIR", board.get("build.encode")) + "\""
19
+            ]), "Building " + board.get("build.encode"))
20
+        )

+ 5
- 5
buildroot/share/PlatformIO/scripts/pioutil.py Parādīt failu

@@ -4,10 +4,10 @@
4 4
 
5 5
 # Make sure 'vscode init' is not the current command
6 6
 def is_pio_build():
7
-	from SCons.Script import DefaultEnvironment
8
-	env = DefaultEnvironment()
9
-	return not env.IsIntegrationDump()
7
+    from SCons.Script import DefaultEnvironment
8
+    env = DefaultEnvironment()
9
+    return not env.IsIntegrationDump()
10 10
 
11 11
 def get_pio_version():
12
-	from platformio import util
13
-	return util.pioversion_to_intstr()
12
+    from platformio import util
13
+    return util.pioversion_to_intstr()

+ 120
- 120
buildroot/share/PlatformIO/scripts/preflight-checks.py Parādīt failu

@@ -5,123 +5,123 @@
5 5
 import pioutil
6 6
 if pioutil.is_pio_build():
7 7
 
8
-	import os,re,sys
9
-	from pathlib import Path
10
-	Import("env")
11
-
12
-	def get_envs_for_board(board):
13
-		ppath = Path("Marlin/src/pins/pins.h")
14
-		with ppath.open() as file:
15
-
16
-			if sys.platform == 'win32':
17
-				envregex = r"(?:env|win):"
18
-			elif sys.platform == 'darwin':
19
-				envregex = r"(?:env|mac|uni):"
20
-			elif sys.platform == 'linux':
21
-				envregex = r"(?:env|lin|uni):"
22
-			else:
23
-				envregex = r"(?:env):"
24
-
25
-			r = re.compile(r"if\s+MB\((.+)\)")
26
-			if board.startswith("BOARD_"):
27
-				board = board[6:]
28
-
29
-			for line in file:
30
-				mbs = r.findall(line)
31
-				if mbs and board in re.split(r",\s*", mbs[0]):
32
-					line = file.readline()
33
-					found_envs = re.match(r"\s*#include .+" + envregex, line)
34
-					if found_envs:
35
-						envlist = re.findall(envregex + r"(\w+)", line)
36
-						return [ "env:"+s for s in envlist ]
37
-		return []
38
-
39
-	def check_envs(build_env, board_envs, config):
40
-		if build_env in board_envs:
41
-			return True
42
-		ext = config.get(build_env, 'extends', default=None)
43
-		if ext:
44
-			if isinstance(ext, str):
45
-				return check_envs(ext, board_envs, config)
46
-			elif isinstance(ext, list):
47
-				for ext_env in ext:
48
-					if check_envs(ext_env, board_envs, config):
49
-						return True
50
-		return False
51
-
52
-	def sanity_check_target():
53
-		# Sanity checks:
54
-		if 'PIOENV' not in env:
55
-			raise SystemExit("Error: PIOENV is not defined. This script is intended to be used with PlatformIO")
56
-
57
-		# Require PlatformIO 6.1.1 or later
58
-		vers = pioutil.get_pio_version()
59
-		if vers < [6, 1, 1]:
60
-			raise SystemExit("Error: Marlin requires PlatformIO >= 6.1.1. Use 'pio upgrade' to get a newer version.")
61
-
62
-		if 'MARLIN_FEATURES' not in env:
63
-			raise SystemExit("Error: this script should be used after common Marlin scripts")
64
-
65
-		if 'MOTHERBOARD' not in env['MARLIN_FEATURES']:
66
-			raise SystemExit("Error: MOTHERBOARD is not defined in Configuration.h")
67
-
68
-		build_env = env['PIOENV']
69
-		motherboard = env['MARLIN_FEATURES']['MOTHERBOARD']
70
-		board_envs = get_envs_for_board(motherboard)
71
-		config = env.GetProjectConfig()
72
-		result = check_envs("env:"+build_env, board_envs, config)
73
-
74
-		if not result:
75
-			err = "Error: Build environment '%s' is incompatible with %s. Use one of these: %s" % \
76
-				  ( build_env, motherboard, ", ".join([ e[4:] for e in board_envs if e.startswith("env:") ]) )
77
-			raise SystemExit(err)
78
-
79
-		#
80
-		# Check for Config files in two common incorrect places
81
-		#
82
-		epath = Path(env['PROJECT_DIR'])
83
-		for p in [ epath, epath / "config" ]:
84
-			for f in ("Configuration.h", "Configuration_adv.h"):
85
-				if (p / f).is_file():
86
-					err = "ERROR: Config files found in directory %s. Please move them into the Marlin subfolder." % p
87
-					raise SystemExit(err)
88
-
89
-		#
90
-		# Find the name.cpp.o or name.o and remove it
91
-		#
92
-		def rm_ofile(subdir, name):
93
-			build_dir = Path(env['PROJECT_BUILD_DIR'], build_env);
94
-			for outdir in (build_dir, build_dir / "debug"):
95
-				for ext in (".cpp.o", ".o"):
96
-					fpath = outdir / "src/src" / subdir / (name + ext)
97
-					if fpath.exists():
98
-						fpath.unlink()
99
-
100
-		#
101
-		# Give warnings on every build
102
-		#
103
-		rm_ofile("inc", "Warnings")
104
-
105
-		#
106
-		# Rebuild 'settings.cpp' for EEPROM_INIT_NOW
107
-		#
108
-		if 'EEPROM_INIT_NOW' in env['MARLIN_FEATURES']:
109
-			rm_ofile("module", "settings")
110
-
111
-		#
112
-		# Check for old files indicating an entangled Marlin (mixing old and new code)
113
-		#
114
-		mixedin = []
115
-		p = Path(env['PROJECT_DIR'], "Marlin/src/lcd/dogm")
116
-		for f in [ "ultralcd_DOGM.cpp", "ultralcd_DOGM.h" ]:
117
-			if (p / f).is_file():
118
-				mixedin += [ f ]
119
-		p = Path(env['PROJECT_DIR'], "Marlin/src/feature/bedlevel/abl")
120
-		for f in [ "abl.cpp", "abl.h" ]:
121
-			if (p / f).is_file():
122
-				mixedin += [ f ]
123
-		if mixedin:
124
-			err = "ERROR: Old files fell into your Marlin folder. Remove %s and try again" % ", ".join(mixedin)
125
-			raise SystemExit(err)
126
-
127
-	sanity_check_target()
8
+    import os,re,sys
9
+    from pathlib import Path
10
+    Import("env")
11
+
12
+    def get_envs_for_board(board):
13
+        ppath = Path("Marlin/src/pins/pins.h")
14
+        with ppath.open() as file:
15
+
16
+            if sys.platform == 'win32':
17
+                envregex = r"(?:env|win):"
18
+            elif sys.platform == 'darwin':
19
+                envregex = r"(?:env|mac|uni):"
20
+            elif sys.platform == 'linux':
21
+                envregex = r"(?:env|lin|uni):"
22
+            else:
23
+                envregex = r"(?:env):"
24
+
25
+            r = re.compile(r"if\s+MB\((.+)\)")
26
+            if board.startswith("BOARD_"):
27
+                board = board[6:]
28
+
29
+            for line in file:
30
+                mbs = r.findall(line)
31
+                if mbs and board in re.split(r",\s*", mbs[0]):
32
+                    line = file.readline()
33
+                    found_envs = re.match(r"\s*#include .+" + envregex, line)
34
+                    if found_envs:
35
+                        envlist = re.findall(envregex + r"(\w+)", line)
36
+                        return [ "env:"+s for s in envlist ]
37
+        return []
38
+
39
+    def check_envs(build_env, board_envs, config):
40
+        if build_env in board_envs:
41
+            return True
42
+        ext = config.get(build_env, 'extends', default=None)
43
+        if ext:
44
+            if isinstance(ext, str):
45
+                return check_envs(ext, board_envs, config)
46
+            elif isinstance(ext, list):
47
+                for ext_env in ext:
48
+                    if check_envs(ext_env, board_envs, config):
49
+                        return True
50
+        return False
51
+
52
+    def sanity_check_target():
53
+        # Sanity checks:
54
+        if 'PIOENV' not in env:
55
+            raise SystemExit("Error: PIOENV is not defined. This script is intended to be used with PlatformIO")
56
+
57
+        # Require PlatformIO 6.1.1 or later
58
+        vers = pioutil.get_pio_version()
59
+        if vers < [6, 1, 1]:
60
+            raise SystemExit("Error: Marlin requires PlatformIO >= 6.1.1. Use 'pio upgrade' to get a newer version.")
61
+
62
+        if 'MARLIN_FEATURES' not in env:
63
+            raise SystemExit("Error: this script should be used after common Marlin scripts")
64
+
65
+        if 'MOTHERBOARD' not in env['MARLIN_FEATURES']:
66
+            raise SystemExit("Error: MOTHERBOARD is not defined in Configuration.h")
67
+
68
+        build_env = env['PIOENV']
69
+        motherboard = env['MARLIN_FEATURES']['MOTHERBOARD']
70
+        board_envs = get_envs_for_board(motherboard)
71
+        config = env.GetProjectConfig()
72
+        result = check_envs("env:"+build_env, board_envs, config)
73
+
74
+        if not result:
75
+            err = "Error: Build environment '%s' is incompatible with %s. Use one of these: %s" % \
76
+                  ( build_env, motherboard, ", ".join([ e[4:] for e in board_envs if e.startswith("env:") ]) )
77
+            raise SystemExit(err)
78
+
79
+        #
80
+        # Check for Config files in two common incorrect places
81
+        #
82
+        epath = Path(env['PROJECT_DIR'])
83
+        for p in [ epath, epath / "config" ]:
84
+            for f in ("Configuration.h", "Configuration_adv.h"):
85
+                if (p / f).is_file():
86
+                    err = "ERROR: Config files found in directory %s. Please move them into the Marlin subfolder." % p
87
+                    raise SystemExit(err)
88
+
89
+        #
90
+        # Find the name.cpp.o or name.o and remove it
91
+        #
92
+        def rm_ofile(subdir, name):
93
+            build_dir = Path(env['PROJECT_BUILD_DIR'], build_env);
94
+            for outdir in (build_dir, build_dir / "debug"):
95
+                for ext in (".cpp.o", ".o"):
96
+                    fpath = outdir / "src/src" / subdir / (name + ext)
97
+                    if fpath.exists():
98
+                        fpath.unlink()
99
+
100
+        #
101
+        # Give warnings on every build
102
+        #
103
+        rm_ofile("inc", "Warnings")
104
+
105
+        #
106
+        # Rebuild 'settings.cpp' for EEPROM_INIT_NOW
107
+        #
108
+        if 'EEPROM_INIT_NOW' in env['MARLIN_FEATURES']:
109
+            rm_ofile("module", "settings")
110
+
111
+        #
112
+        # Check for old files indicating an entangled Marlin (mixing old and new code)
113
+        #
114
+        mixedin = []
115
+        p = Path(env['PROJECT_DIR'], "Marlin/src/lcd/dogm")
116
+        for f in [ "ultralcd_DOGM.cpp", "ultralcd_DOGM.h" ]:
117
+            if (p / f).is_file():
118
+                mixedin += [ f ]
119
+        p = Path(env['PROJECT_DIR'], "Marlin/src/feature/bedlevel/abl")
120
+        for f in [ "abl.cpp", "abl.h" ]:
121
+            if (p / f).is_file():
122
+                mixedin += [ f ]
123
+        if mixedin:
124
+            err = "ERROR: Old files fell into your Marlin folder. Remove %s and try again" % ", ".join(mixedin)
125
+            raise SystemExit(err)
126
+
127
+    sanity_check_target()

+ 70
- 70
buildroot/share/PlatformIO/scripts/preprocessor.py Parādīt failu

@@ -7,8 +7,8 @@ nocache = 1
7 7
 verbose = 0
8 8
 
9 9
 def blab(str):
10
-	if verbose:
11
-		print(str)
10
+    if verbose:
11
+        print(str)
12 12
 
13 13
 ################################################################################
14 14
 #
@@ -16,36 +16,36 @@ def blab(str):
16 16
 #
17 17
 preprocessor_cache = {}
18 18
 def run_preprocessor(env, fn=None):
19
-	filename = fn or 'buildroot/share/PlatformIO/scripts/common-dependencies.h'
20
-	if filename in preprocessor_cache:
21
-		return preprocessor_cache[filename]
22
-
23
-	# Process defines
24
-	build_flags = env.get('BUILD_FLAGS')
25
-	build_flags = env.ParseFlagsExtended(build_flags)
26
-
27
-	cxx = search_compiler(env)
28
-	cmd = ['"' + cxx + '"']
29
-
30
-	# Build flags from board.json
31
-	#if 'BOARD' in env:
32
-	#	cmd += [env.BoardConfig().get("build.extra_flags")]
33
-	for s in build_flags['CPPDEFINES']:
34
-		if isinstance(s, tuple):
35
-			cmd += ['-D' + s[0] + '=' + str(s[1])]
36
-		else:
37
-			cmd += ['-D' + s]
38
-
39
-	cmd += ['-D__MARLIN_DEPS__ -w -dM -E -x c++']
40
-	depcmd = cmd + [ filename ]
41
-	cmd = ' '.join(depcmd)
42
-	blab(cmd)
43
-	try:
44
-		define_list = subprocess.check_output(cmd, shell=True).splitlines()
45
-	except:
46
-		define_list = {}
47
-	preprocessor_cache[filename] = define_list
48
-	return define_list
19
+    filename = fn or 'buildroot/share/PlatformIO/scripts/common-dependencies.h'
20
+    if filename in preprocessor_cache:
21
+        return preprocessor_cache[filename]
22
+
23
+    # Process defines
24
+    build_flags = env.get('BUILD_FLAGS')
25
+    build_flags = env.ParseFlagsExtended(build_flags)
26
+
27
+    cxx = search_compiler(env)
28
+    cmd = ['"' + cxx + '"']
29
+
30
+    # Build flags from board.json
31
+    #if 'BOARD' in env:
32
+    #   cmd += [env.BoardConfig().get("build.extra_flags")]
33
+    for s in build_flags['CPPDEFINES']:
34
+        if isinstance(s, tuple):
35
+            cmd += ['-D' + s[0] + '=' + str(s[1])]
36
+        else:
37
+            cmd += ['-D' + s]
38
+
39
+    cmd += ['-D__MARLIN_DEPS__ -w -dM -E -x c++']
40
+    depcmd = cmd + [ filename ]
41
+    cmd = ' '.join(depcmd)
42
+    blab(cmd)
43
+    try:
44
+        define_list = subprocess.check_output(cmd, shell=True).splitlines()
45
+    except:
46
+        define_list = {}
47
+    preprocessor_cache[filename] = define_list
48
+    return define_list
49 49
 
50 50
 
51 51
 ################################################################################
@@ -54,41 +54,41 @@ def run_preprocessor(env, fn=None):
54 54
 #
55 55
 def search_compiler(env):
56 56
 
57
-	from pathlib import Path, PurePath
58
-
59
-	ENV_BUILD_PATH = Path(env['PROJECT_BUILD_DIR'], env['PIOENV'])
60
-	GCC_PATH_CACHE = ENV_BUILD_PATH / ".gcc_path"
61
-
62
-	try:
63
-		gccpath = env.GetProjectOption('custom_gcc')
64
-		blab("Getting compiler from env")
65
-		return gccpath
66
-	except:
67
-		pass
68
-
69
-	# Warning: The cached .gcc_path will obscure a newly-installed toolkit
70
-	if not nocache and GCC_PATH_CACHE.exists():
71
-		blab("Getting g++ path from cache")
72
-		return GCC_PATH_CACHE.read_text()
73
-
74
-	# Use any item in $PATH corresponding to a platformio toolchain bin folder
75
-	path_separator = ':'
76
-	gcc_exe = '*g++'
77
-	if env['PLATFORM'] == 'win32':
78
-		path_separator = ';'
79
-		gcc_exe += ".exe"
80
-
81
-	# Search for the compiler in PATH
82
-	for ppath in map(Path, env['ENV']['PATH'].split(path_separator)):
83
-		if ppath.match(env['PROJECT_PACKAGES_DIR'] + "/**/bin"):
84
-			for gpath in ppath.glob(gcc_exe):
85
-				gccpath = str(gpath.resolve())
86
-				# Cache the g++ path to no search always
87
-				if not nocache and ENV_BUILD_PATH.exists():
88
-					blab("Caching g++ for current env")
89
-					GCC_PATH_CACHE.write_text(gccpath)
90
-				return gccpath
91
-
92
-	gccpath = env.get('CXX')
93
-	blab("Couldn't find a compiler! Fallback to %s" % gccpath)
94
-	return gccpath
57
+    from pathlib import Path, PurePath
58
+
59
+    ENV_BUILD_PATH = Path(env['PROJECT_BUILD_DIR'], env['PIOENV'])
60
+    GCC_PATH_CACHE = ENV_BUILD_PATH / ".gcc_path"
61
+
62
+    try:
63
+        gccpath = env.GetProjectOption('custom_gcc')
64
+        blab("Getting compiler from env")
65
+        return gccpath
66
+    except:
67
+        pass
68
+
69
+    # Warning: The cached .gcc_path will obscure a newly-installed toolkit
70
+    if not nocache and GCC_PATH_CACHE.exists():
71
+        blab("Getting g++ path from cache")
72
+        return GCC_PATH_CACHE.read_text()
73
+
74
+    # Use any item in $PATH corresponding to a platformio toolchain bin folder
75
+    path_separator = ':'
76
+    gcc_exe = '*g++'
77
+    if env['PLATFORM'] == 'win32':
78
+        path_separator = ';'
79
+        gcc_exe += ".exe"
80
+
81
+    # Search for the compiler in PATH
82
+    for ppath in map(Path, env['ENV']['PATH'].split(path_separator)):
83
+        if ppath.match(env['PROJECT_PACKAGES_DIR'] + "/**/bin"):
84
+            for gpath in ppath.glob(gcc_exe):
85
+                gccpath = str(gpath.resolve())
86
+                # Cache the g++ path to no search always
87
+                if not nocache and ENV_BUILD_PATH.exists():
88
+                    blab("Caching g++ for current env")
89
+                    GCC_PATH_CACHE.write_text(gccpath)
90
+                return gccpath
91
+
92
+    gccpath = env.get('CXX')
93
+    blab("Couldn't find a compiler! Fallback to %s" % gccpath)
94
+    return gccpath

+ 3
- 3
buildroot/share/PlatformIO/scripts/random-bin.py Parādīt failu

@@ -4,6 +4,6 @@
4 4
 #
5 5
 import pioutil
6 6
 if pioutil.is_pio_build():
7
-	from datetime import datetime
8
-	Import("env")
9
-	env['PROGNAME'] = datetime.now().strftime("firmware-%Y%m%d-%H%M%S")
7
+    from datetime import datetime
8
+    Import("env")
9
+    env['PROGNAME'] = datetime.now().strftime("firmware-%Y%m%d-%H%M%S")

+ 385
- 385
buildroot/share/PlatformIO/scripts/schema.py Parādīt failu

@@ -9,413 +9,413 @@ import re,json
9 9
 from pathlib import Path
10 10
 
11 11
 def extend_dict(d:dict, k:tuple):
12
-	if len(k) >= 1 and k[0] not in d:
13
-		d[k[0]] = {}
14
-	if len(k) >= 2 and k[1] not in d[k[0]]:
15
-		d[k[0]][k[1]] = {}
16
-	if len(k) >= 3 and k[2] not in d[k[0]][k[1]]:
17
-		d[k[0]][k[1]][k[2]] = {}
12
+    if len(k) >= 1 and k[0] not in d:
13
+        d[k[0]] = {}
14
+    if len(k) >= 2 and k[1] not in d[k[0]]:
15
+        d[k[0]][k[1]] = {}
16
+    if len(k) >= 3 and k[2] not in d[k[0]][k[1]]:
17
+        d[k[0]][k[1]][k[2]] = {}
18 18
 
19 19
 grouping_patterns = [
20
-	re.compile(r'^([XYZIJKUVW]|[XYZ]2|Z[34]|E[0-7])$'),
21
-	re.compile(r'^AXIS\d$'),
22
-	re.compile(r'^(MIN|MAX)$'),
23
-	re.compile(r'^[0-8]$'),
24
-	re.compile(r'^HOTEND[0-7]$'),
25
-	re.compile(r'^(HOTENDS|BED|PROBE|COOLER)$'),
26
-	re.compile(r'^[XYZIJKUVW]M(IN|AX)$')
20
+    re.compile(r'^([XYZIJKUVW]|[XYZ]2|Z[34]|E[0-7])$'),
21
+    re.compile(r'^AXIS\d$'),
22
+    re.compile(r'^(MIN|MAX)$'),
23
+    re.compile(r'^[0-8]$'),
24
+    re.compile(r'^HOTEND[0-7]$'),
25
+    re.compile(r'^(HOTENDS|BED|PROBE|COOLER)$'),
26
+    re.compile(r'^[XYZIJKUVW]M(IN|AX)$')
27 27
 ]
28 28
 # If the indexed part of the option name matches a pattern
29 29
 # then add it to the dictionary.
30 30
 def find_grouping(gdict, filekey, sectkey, optkey, pindex):
31
-	optparts = optkey.split('_')
32
-	if 1 < len(optparts) > pindex:
33
-		for patt in grouping_patterns:
34
-			if patt.match(optparts[pindex]):
35
-				subkey = optparts[pindex]
36
-				modkey = '_'.join(optparts)
37
-				optparts[pindex] = '*'
38
-				wildkey = '_'.join(optparts)
39
-				kkey = f'{filekey}|{sectkey}|{wildkey}'
40
-				if kkey not in gdict: gdict[kkey] = []
41
-				gdict[kkey].append((subkey, modkey))
31
+    optparts = optkey.split('_')
32
+    if 1 < len(optparts) > pindex:
33
+        for patt in grouping_patterns:
34
+            if patt.match(optparts[pindex]):
35
+                subkey = optparts[pindex]
36
+                modkey = '_'.join(optparts)
37
+                optparts[pindex] = '*'
38
+                wildkey = '_'.join(optparts)
39
+                kkey = f'{filekey}|{sectkey}|{wildkey}'
40
+                if kkey not in gdict: gdict[kkey] = []
41
+                gdict[kkey].append((subkey, modkey))
42 42
 
43 43
 # Build a list of potential groups. Only those with multiple items will be grouped.
44 44
 def group_options(schema):
45
-	for pindex in range(10, -1, -1):
46
-		found_groups = {}
47
-		for filekey, f in schema.items():
48
-			for sectkey, s in f.items():
49
-				for optkey in s:
50
-					find_grouping(found_groups, filekey, sectkey, optkey, pindex)
51
-
52
-		fkeys = [ k for k in found_groups.keys() ]
53
-		for kkey in fkeys:
54
-			items = found_groups[kkey]
55
-			if len(items) > 1:
56
-				f, s, w = kkey.split('|')
57
-				extend_dict(schema, (f, s, w))						# Add wildcard group to schema
58
-				for subkey, optkey in items:						# Add all items to wildcard group
59
-					schema[f][s][w][subkey] = schema[f][s][optkey]	# Move non-wildcard item to wildcard group
60
-					del schema[f][s][optkey]
61
-			del found_groups[kkey]
45
+    for pindex in range(10, -1, -1):
46
+        found_groups = {}
47
+        for filekey, f in schema.items():
48
+            for sectkey, s in f.items():
49
+                for optkey in s:
50
+                    find_grouping(found_groups, filekey, sectkey, optkey, pindex)
51
+
52
+        fkeys = [ k for k in found_groups.keys() ]
53
+        for kkey in fkeys:
54
+            items = found_groups[kkey]
55
+            if len(items) > 1:
56
+                f, s, w = kkey.split('|')
57
+                extend_dict(schema, (f, s, w))                      # Add wildcard group to schema
58
+                for subkey, optkey in items:                        # Add all items to wildcard group
59
+                    schema[f][s][w][subkey] = schema[f][s][optkey]  # Move non-wildcard item to wildcard group
60
+                    del schema[f][s][optkey]
61
+            del found_groups[kkey]
62 62
 
63 63
 # Extract all board names from boards.h
64 64
 def load_boards():
65
-	bpath = Path("Marlin/src/core/boards.h")
66
-	if bpath.is_file():
67
-		with bpath.open() as bfile:
68
-			boards = []
69
-			for line in bfile:
70
-				if line.startswith("#define BOARD_"):
71
-					bname = line.split()[1]
72
-					if bname != "BOARD_UNKNOWN": boards.append(bname)
73
-			return "['" + "','".join(boards) + "']"
74
-	return ''
65
+    bpath = Path("Marlin/src/core/boards.h")
66
+    if bpath.is_file():
67
+        with bpath.open() as bfile:
68
+            boards = []
69
+            for line in bfile:
70
+                if line.startswith("#define BOARD_"):
71
+                    bname = line.split()[1]
72
+                    if bname != "BOARD_UNKNOWN": boards.append(bname)
73
+            return "['" + "','".join(boards) + "']"
74
+    return ''
75 75
 
76 76
 #
77 77
 # Extract a schema from the current configuration files
78 78
 #
79 79
 def extract():
80
-	# Load board names from boards.h
81
-	boards = load_boards()
82
-
83
-	# Parsing states
84
-	class Parse:
85
-		NORMAL			= 0 # No condition yet
86
-		BLOCK_COMMENT	= 1 # Looking for the end of the block comment
87
-		EOL_COMMENT		= 2 # EOL comment started, maybe add the next comment?
88
-		GET_SENSORS		= 3 # Gathering temperature sensor options
89
-		ERROR			= 9 # Syntax error
90
-
91
-	# List of files to process, with shorthand
92
-	filekey = { 'Configuration.h':'basic', 'Configuration_adv.h':'advanced' }
93
-	# A JSON object to store the data
94
-	sch_out = { 'basic':{}, 'advanced':{} }
95
-	# Regex for #define NAME [VALUE] [COMMENT] with sanitized line
96
-	defgrep = re.compile(r'^(//)?\s*(#define)\s+([A-Za-z0-9_]+)\s*(.*?)\s*(//.+)?$')
97
-	# Defines to ignore
98
-	ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXAMPLES_DIR', 'CONFIG_EXPORT')
99
-	# Start with unknown state
100
-	state = Parse.NORMAL
101
-	# Serial ID
102
-	sid = 0
103
-	# Loop through files and parse them line by line
104
-	for fn, fk in filekey.items():
105
-		with Path("Marlin", fn).open() as fileobj:
106
-			section = 'none'		# Current Settings section
107
-			line_number = 0			# Counter for the line number of the file
108
-			conditions = []			# Create a condition stack for the current file
109
-			comment_buff = []		# A temporary buffer for comments
110
-			options_json = ''		# A buffer for the most recent options JSON found
111
-			eol_options = False		# The options came from end of line, so only apply once
112
-			join_line = False		# A flag that the line should be joined with the previous one
113
-			line = ''				# A line buffer to handle \ continuation
114
-			last_added_ref = None	# Reference to the last added item
115
-			# Loop through the lines in the file
116
-			for the_line in fileobj.readlines():
117
-				line_number += 1
118
-
119
-				# Clean the line for easier parsing
120
-				the_line = the_line.strip()
121
-
122
-				if join_line:	# A previous line is being made longer
123
-					line += (' ' if line else '') + the_line
124
-				else:			# Otherwise, start the line anew
125
-					line, line_start = the_line, line_number
126
-
127
-				# If the resulting line ends with a \, don't process now.
128
-				# Strip the end off. The next line will be joined with it.
129
-				join_line = line.endswith("\\")
130
-				if join_line:
131
-					line = line[:-1].strip()
132
-					continue
133
-				else:
134
-					line_end = line_number
135
-
136
-				defmatch = defgrep.match(line)
137
-
138
-				# Special handling for EOL comments after a #define.
139
-				# At this point the #define is already digested and inserted,
140
-				# so we have to extend it
141
-				if state == Parse.EOL_COMMENT:
142
-					# If the line is not a comment, we're done with the EOL comment
143
-					if not defmatch and the_line.startswith('//'):
144
-						comment_buff.append(the_line[2:].strip())
145
-					else:
146
-						last_added_ref['comment'] = ' '.join(comment_buff)
147
-						comment_buff = []
148
-						state = Parse.NORMAL
149
-
150
-				def use_comment(c, opt, sec, bufref):
151
-					if c.startswith(':'):				# If the comment starts with : then it has magic JSON
152
-						d = c[1:].strip()				# Strip the leading :
153
-						cbr = c.rindex('}') if d.startswith('{') else c.rindex(']') if d.startswith('[') else 0
154
-						if cbr:
155
-							opt, cmt = c[1:cbr+1].strip(), c[cbr+1:].strip()
156
-							if cmt != '': bufref.append(cmt)
157
-						else:
158
-							opt = c[1:].strip()
159
-					elif c.startswith('@section'):		# Start a new section
160
-						sec = c[8:].strip()
161
-					elif not c.startswith('========'):
162
-						bufref.append(c)
163
-					return opt, sec
164
-
165
-				# In a block comment, capture lines up to the end of the comment.
166
-				# Assume nothing follows the comment closure.
167
-				if state in (Parse.BLOCK_COMMENT, Parse.GET_SENSORS):
168
-					endpos = line.find('*/')
169
-					if endpos < 0:
170
-						cline = line
171
-					else:
172
-						cline, line = line[:endpos].strip(), line[endpos+2:].strip()
173
-
174
-						# Temperature sensors are done
175
-						if state == Parse.GET_SENSORS:
176
-							options_json = f'[ {options_json[:-2]} ]'
177
-
178
-						state = Parse.NORMAL
179
-
180
-					# Strip the leading '*' from block comments
181
-					if cline.startswith('*'): cline = cline[1:].strip()
182
-
183
-					# Collect temperature sensors
184
-					if state == Parse.GET_SENSORS:
185
-						sens = re.match(r'^(-?\d+)\s*:\s*(.+)$', cline)
186
-						if sens:
187
-							s2 = sens[2].replace("'","''")
188
-							options_json += f"{sens[1]}:'{s2}', "
189
-
190
-					elif state == Parse.BLOCK_COMMENT:
191
-
192
-						# Look for temperature sensors
193
-						if cline == "Temperature sensors available:":
194
-							state, cline = Parse.GET_SENSORS, "Temperature Sensors"
195
-
196
-						options_json, section = use_comment(cline, options_json, section, comment_buff)
197
-
198
-				# For the normal state we're looking for any non-blank line
199
-				elif state == Parse.NORMAL:
200
-					# Skip a commented define when evaluating comment opening
201
-					st = 2 if re.match(r'^//\s*#define', line) else 0
202
-					cpos1 = line.find('/*')		# Start a block comment on the line?
203
-					cpos2 = line.find('//', st)	# Start an end of line comment on the line?
204
-
205
-					# Only the first comment starter gets evaluated
206
-					cpos = -1
207
-					if cpos1 != -1 and (cpos1 < cpos2 or cpos2 == -1):
208
-						cpos = cpos1
209
-						comment_buff = []
210
-						state = Parse.BLOCK_COMMENT
211
-						eol_options = False
212
-
213
-					elif cpos2 != -1 and (cpos2 < cpos1 or cpos1 == -1):
214
-						cpos = cpos2
215
-
216
-						# Comment after a define may be continued on the following lines
217
-						if defmatch != None and cpos > 10:
218
-							state = Parse.EOL_COMMENT
219
-							comment_buff = []
220
-
221
-					# Process the start of a new comment
222
-					if cpos != -1:
223
-						cline, line = line[cpos+2:].strip(), line[:cpos].strip()
224
-
225
-						if state == Parse.BLOCK_COMMENT:
226
-							# Strip leading '*' from block comments
227
-							if cline.startswith('*'): cline = cline[1:].strip()
228
-						else:
229
-							# Expire end-of-line options after first use
230
-							if cline.startswith(':'): eol_options = True
231
-
232
-						# Buffer a non-empty comment start
233
-						if cline != '':
234
-							options_json, section = use_comment(cline, options_json, section, comment_buff)
235
-
236
-					# If the line has nothing before the comment, go to the next line
237
-					if line == '':
238
-						options_json = ''
239
-						continue
240
-
241
-					# Parenthesize the given expression if needed
242
-					def atomize(s):
243
-						if s == '' \
244
-						or re.match(r'^[A-Za-z0-9_]*(\([^)]+\))?$', s) \
245
-						or re.match(r'^[A-Za-z0-9_]+ == \d+?$', s):
246
-							return s
247
-						return f'({s})'
248
-
249
-					#
250
-					# The conditions stack is an array containing condition-arrays.
251
-					# Each condition-array lists the conditions for the current block.
252
-					# IF/N/DEF adds a new condition-array to the stack.
253
-					# ELSE/ELIF/ENDIF pop the condition-array.
254
-					# ELSE/ELIF negate the last item in the popped condition-array.
255
-					# ELIF adds a new condition to the end of the array.
256
-					# ELSE/ELIF re-push the condition-array.
257
-					#
258
-					cparts = line.split()
259
-					iselif, iselse = cparts[0] == '#elif', cparts[0] == '#else'
260
-					if iselif or iselse or cparts[0] == '#endif':
261
-						if len(conditions) == 0:
262
-							raise Exception(f'no #if block at line {line_number}')
263
-
264
-						# Pop the last condition-array from the stack
265
-						prev = conditions.pop()
266
-
267
-						if iselif or iselse:
268
-							prev[-1] = '!' + prev[-1] # Invert the last condition
269
-							if iselif: prev.append(atomize(line[5:].strip()))
270
-							conditions.append(prev)
271
-
272
-					elif cparts[0] == '#if':
273
-						conditions.append([ atomize(line[3:].strip()) ])
274
-					elif cparts[0] == '#ifdef':
275
-						conditions.append([ f'defined({line[6:].strip()})' ])
276
-					elif cparts[0] == '#ifndef':
277
-						conditions.append([ f'!defined({line[7:].strip()})' ])
278
-
279
-					# Handle a complete #define line
280
-					elif defmatch != None:
281
-
282
-						# Get the match groups into vars
283
-						enabled, define_name, val = defmatch[1] == None, defmatch[3], defmatch[4]
284
-
285
-						# Increment the serial ID
286
-						sid += 1
287
-
288
-						# Create a new dictionary for the current #define
289
-						define_info = {
290
-							'section': section,
291
-							'name': define_name,
292
-							'enabled': enabled,
293
-							'line': line_start,
294
-							'sid': sid
295
-						}
296
-
297
-						# Type is based on the value
298
-						if val == '':
299
-							value_type = 'switch'
300
-						elif re.match(r'^(true|false)$', val):
301
-							value_type = 'bool'
302
-							val = val == 'true'
303
-						elif re.match(r'^[-+]?\s*\d+$', val):
304
-							value_type = 'int'
305
-							val = int(val)
306
-						elif re.match(r'[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?', val):
307
-							value_type = 'float'
308
-							val = float(val.replace('f',''))
309
-						else:
310
-							value_type = 'string'	if val[0] == '"' \
311
-									else 'char'		if val[0] == "'" \
312
-									else 'state'	if re.match(r'^(LOW|HIGH)$', val) \
313
-									else 'enum'		if re.match(r'^[A-Za-z0-9_]{3,}$', val) \
314
-									else 'int[]'	if re.match(r'^{(\s*[-+]?\s*\d+\s*(,\s*)?)+}$', val) \
315
-									else 'float[]'	if re.match(r'^{(\s*[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?\s*(,\s*)?)+}$', val) \
316
-									else 'array'	if val[0] == '{' \
317
-									else ''
318
-
319
-						if val != '': define_info['value'] = val
320
-						if value_type != '': define_info['type'] = value_type
321
-
322
-						# Join up accumulated conditions with &&
323
-						if conditions: define_info['requires'] = ' && '.join(sum(conditions, []))
324
-
325
-						# If the comment_buff is not empty, add the comment to the info
326
-						if comment_buff:
327
-							full_comment = '\n'.join(comment_buff)
328
-
329
-							# An EOL comment will be added later
330
-							# The handling could go here instead of above
331
-							if state == Parse.EOL_COMMENT:
332
-								define_info['comment'] = ''
333
-							else:
334
-								define_info['comment'] = full_comment
335
-								comment_buff = []
336
-
337
-							# If the comment specifies units, add that to the info
338
-							units = re.match(r'^\(([^)]+)\)', full_comment)
339
-							if units:
340
-								units = units[1]
341
-								if units == 's' or units == 'sec': units = 'seconds'
342
-								define_info['units'] = units
343
-
344
-						# Set the options for the current #define
345
-						if define_name == "MOTHERBOARD" and boards != '':
346
-							define_info['options'] = boards
347
-						elif options_json != '':
348
-							define_info['options'] = options_json
349
-							if eol_options: options_json = ''
350
-
351
-						# Create section dict if it doesn't exist yet
352
-						if section not in sch_out[fk]: sch_out[fk][section] = {}
353
-
354
-						# If define has already been seen...
355
-						if define_name in sch_out[fk][section]:
356
-							info = sch_out[fk][section][define_name]
357
-							if isinstance(info, dict): info = [ info ]	# Convert a single dict into a list
358
-							info.append(define_info)					# Add to the list
359
-						else:
360
-							# Add the define dict with name as key
361
-							sch_out[fk][section][define_name] = define_info
362
-
363
-						if state == Parse.EOL_COMMENT:
364
-							last_added_ref = define_info
365
-
366
-	return sch_out
80
+    # Load board names from boards.h
81
+    boards = load_boards()
82
+
83
+    # Parsing states
84
+    class Parse:
85
+        NORMAL          = 0 # No condition yet
86
+        BLOCK_COMMENT   = 1 # Looking for the end of the block comment
87
+        EOL_COMMENT     = 2 # EOL comment started, maybe add the next comment?
88
+        GET_SENSORS     = 3 # Gathering temperature sensor options
89
+        ERROR           = 9 # Syntax error
90
+
91
+    # List of files to process, with shorthand
92
+    filekey = { 'Configuration.h':'basic', 'Configuration_adv.h':'advanced' }
93
+    # A JSON object to store the data
94
+    sch_out = { 'basic':{}, 'advanced':{} }
95
+    # Regex for #define NAME [VALUE] [COMMENT] with sanitized line
96
+    defgrep = re.compile(r'^(//)?\s*(#define)\s+([A-Za-z0-9_]+)\s*(.*?)\s*(//.+)?$')
97
+    # Defines to ignore
98
+    ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXAMPLES_DIR', 'CONFIG_EXPORT')
99
+    # Start with unknown state
100
+    state = Parse.NORMAL
101
+    # Serial ID
102
+    sid = 0
103
+    # Loop through files and parse them line by line
104
+    for fn, fk in filekey.items():
105
+        with Path("Marlin", fn).open() as fileobj:
106
+            section = 'none'        # Current Settings section
107
+            line_number = 0         # Counter for the line number of the file
108
+            conditions = []         # Create a condition stack for the current file
109
+            comment_buff = []       # A temporary buffer for comments
110
+            options_json = ''       # A buffer for the most recent options JSON found
111
+            eol_options = False     # The options came from end of line, so only apply once
112
+            join_line = False       # A flag that the line should be joined with the previous one
113
+            line = ''               # A line buffer to handle \ continuation
114
+            last_added_ref = None   # Reference to the last added item
115
+            # Loop through the lines in the file
116
+            for the_line in fileobj.readlines():
117
+                line_number += 1
118
+
119
+                # Clean the line for easier parsing
120
+                the_line = the_line.strip()
121
+
122
+                if join_line:   # A previous line is being made longer
123
+                    line += (' ' if line else '') + the_line
124
+                else:           # Otherwise, start the line anew
125
+                    line, line_start = the_line, line_number
126
+
127
+                # If the resulting line ends with a \, don't process now.
128
+                # Strip the end off. The next line will be joined with it.
129
+                join_line = line.endswith("\\")
130
+                if join_line:
131
+                    line = line[:-1].strip()
132
+                    continue
133
+                else:
134
+                    line_end = line_number
135
+
136
+                defmatch = defgrep.match(line)
137
+
138
+                # Special handling for EOL comments after a #define.
139
+                # At this point the #define is already digested and inserted,
140
+                # so we have to extend it
141
+                if state == Parse.EOL_COMMENT:
142
+                    # If the line is not a comment, we're done with the EOL comment
143
+                    if not defmatch and the_line.startswith('//'):
144
+                        comment_buff.append(the_line[2:].strip())
145
+                    else:
146
+                        last_added_ref['comment'] = ' '.join(comment_buff)
147
+                        comment_buff = []
148
+                        state = Parse.NORMAL
149
+
150
+                def use_comment(c, opt, sec, bufref):
151
+                    if c.startswith(':'):               # If the comment starts with : then it has magic JSON
152
+                        d = c[1:].strip()               # Strip the leading :
153
+                        cbr = c.rindex('}') if d.startswith('{') else c.rindex(']') if d.startswith('[') else 0
154
+                        if cbr:
155
+                            opt, cmt = c[1:cbr+1].strip(), c[cbr+1:].strip()
156
+                            if cmt != '': bufref.append(cmt)
157
+                        else:
158
+                            opt = c[1:].strip()
159
+                    elif c.startswith('@section'):      # Start a new section
160
+                        sec = c[8:].strip()
161
+                    elif not c.startswith('========'):
162
+                        bufref.append(c)
163
+                    return opt, sec
164
+
165
+                # In a block comment, capture lines up to the end of the comment.
166
+                # Assume nothing follows the comment closure.
167
+                if state in (Parse.BLOCK_COMMENT, Parse.GET_SENSORS):
168
+                    endpos = line.find('*/')
169
+                    if endpos < 0:
170
+                        cline = line
171
+                    else:
172
+                        cline, line = line[:endpos].strip(), line[endpos+2:].strip()
173
+
174
+                        # Temperature sensors are done
175
+                        if state == Parse.GET_SENSORS:
176
+                            options_json = f'[ {options_json[:-2]} ]'
177
+
178
+                        state = Parse.NORMAL
179
+
180
+                    # Strip the leading '*' from block comments
181
+                    if cline.startswith('*'): cline = cline[1:].strip()
182
+
183
+                    # Collect temperature sensors
184
+                    if state == Parse.GET_SENSORS:
185
+                        sens = re.match(r'^(-?\d+)\s*:\s*(.+)$', cline)
186
+                        if sens:
187
+                            s2 = sens[2].replace("'","''")
188
+                            options_json += f"{sens[1]}:'{s2}', "
189
+
190
+                    elif state == Parse.BLOCK_COMMENT:
191
+
192
+                        # Look for temperature sensors
193
+                        if cline == "Temperature sensors available:":
194
+                            state, cline = Parse.GET_SENSORS, "Temperature Sensors"
195
+
196
+                        options_json, section = use_comment(cline, options_json, section, comment_buff)
197
+
198
+                # For the normal state we're looking for any non-blank line
199
+                elif state == Parse.NORMAL:
200
+                    # Skip a commented define when evaluating comment opening
201
+                    st = 2 if re.match(r'^//\s*#define', line) else 0
202
+                    cpos1 = line.find('/*')     # Start a block comment on the line?
203
+                    cpos2 = line.find('//', st) # Start an end of line comment on the line?
204
+
205
+                    # Only the first comment starter gets evaluated
206
+                    cpos = -1
207
+                    if cpos1 != -1 and (cpos1 < cpos2 or cpos2 == -1):
208
+                        cpos = cpos1
209
+                        comment_buff = []
210
+                        state = Parse.BLOCK_COMMENT
211
+                        eol_options = False
212
+
213
+                    elif cpos2 != -1 and (cpos2 < cpos1 or cpos1 == -1):
214
+                        cpos = cpos2
215
+
216
+                        # Comment after a define may be continued on the following lines
217
+                        if defmatch != None and cpos > 10:
218
+                            state = Parse.EOL_COMMENT
219
+                            comment_buff = []
220
+
221
+                    # Process the start of a new comment
222
+                    if cpos != -1:
223
+                        cline, line = line[cpos+2:].strip(), line[:cpos].strip()
224
+
225
+                        if state == Parse.BLOCK_COMMENT:
226
+                            # Strip leading '*' from block comments
227
+                            if cline.startswith('*'): cline = cline[1:].strip()
228
+                        else:
229
+                            # Expire end-of-line options after first use
230
+                            if cline.startswith(':'): eol_options = True
231
+
232
+                        # Buffer a non-empty comment start
233
+                        if cline != '':
234
+                            options_json, section = use_comment(cline, options_json, section, comment_buff)
235
+
236
+                    # If the line has nothing before the comment, go to the next line
237
+                    if line == '':
238
+                        options_json = ''
239
+                        continue
240
+
241
+                    # Parenthesize the given expression if needed
242
+                    def atomize(s):
243
+                        if s == '' \
244
+                        or re.match(r'^[A-Za-z0-9_]*(\([^)]+\))?$', s) \
245
+                        or re.match(r'^[A-Za-z0-9_]+ == \d+?$', s):
246
+                            return s
247
+                        return f'({s})'
248
+
249
+                    #
250
+                    # The conditions stack is an array containing condition-arrays.
251
+                    # Each condition-array lists the conditions for the current block.
252
+                    # IF/N/DEF adds a new condition-array to the stack.
253
+                    # ELSE/ELIF/ENDIF pop the condition-array.
254
+                    # ELSE/ELIF negate the last item in the popped condition-array.
255
+                    # ELIF adds a new condition to the end of the array.
256
+                    # ELSE/ELIF re-push the condition-array.
257
+                    #
258
+                    cparts = line.split()
259
+                    iselif, iselse = cparts[0] == '#elif', cparts[0] == '#else'
260
+                    if iselif or iselse or cparts[0] == '#endif':
261
+                        if len(conditions) == 0:
262
+                            raise Exception(f'no #if block at line {line_number}')
263
+
264
+                        # Pop the last condition-array from the stack
265
+                        prev = conditions.pop()
266
+
267
+                        if iselif or iselse:
268
+                            prev[-1] = '!' + prev[-1] # Invert the last condition
269
+                            if iselif: prev.append(atomize(line[5:].strip()))
270
+                            conditions.append(prev)
271
+
272
+                    elif cparts[0] == '#if':
273
+                        conditions.append([ atomize(line[3:].strip()) ])
274
+                    elif cparts[0] == '#ifdef':
275
+                        conditions.append([ f'defined({line[6:].strip()})' ])
276
+                    elif cparts[0] == '#ifndef':
277
+                        conditions.append([ f'!defined({line[7:].strip()})' ])
278
+
279
+                    # Handle a complete #define line
280
+                    elif defmatch != None:
281
+
282
+                        # Get the match groups into vars
283
+                        enabled, define_name, val = defmatch[1] == None, defmatch[3], defmatch[4]
284
+
285
+                        # Increment the serial ID
286
+                        sid += 1
287
+
288
+                        # Create a new dictionary for the current #define
289
+                        define_info = {
290
+                            'section': section,
291
+                            'name': define_name,
292
+                            'enabled': enabled,
293
+                            'line': line_start,
294
+                            'sid': sid
295
+                        }
296
+
297
+                        # Type is based on the value
298
+                        if val == '':
299
+                            value_type = 'switch'
300
+                        elif re.match(r'^(true|false)$', val):
301
+                            value_type = 'bool'
302
+                            val = val == 'true'
303
+                        elif re.match(r'^[-+]?\s*\d+$', val):
304
+                            value_type = 'int'
305
+                            val = int(val)
306
+                        elif re.match(r'[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?', val):
307
+                            value_type = 'float'
308
+                            val = float(val.replace('f',''))
309
+                        else:
310
+                            value_type = 'string'   if val[0] == '"' \
311
+                                    else 'char'     if val[0] == "'" \
312
+                                    else 'state'    if re.match(r'^(LOW|HIGH)$', val) \
313
+                                    else 'enum'     if re.match(r'^[A-Za-z0-9_]{3,}$', val) \
314
+                                    else 'int[]'    if re.match(r'^{(\s*[-+]?\s*\d+\s*(,\s*)?)+}$', val) \
315
+                                    else 'float[]'  if re.match(r'^{(\s*[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?\s*(,\s*)?)+}$', val) \
316
+                                    else 'array'    if val[0] == '{' \
317
+                                    else ''
318
+
319
+                        if val != '': define_info['value'] = val
320
+                        if value_type != '': define_info['type'] = value_type
321
+
322
+                        # Join up accumulated conditions with &&
323
+                        if conditions: define_info['requires'] = ' && '.join(sum(conditions, []))
324
+
325
+                        # If the comment_buff is not empty, add the comment to the info
326
+                        if comment_buff:
327
+                            full_comment = '\n'.join(comment_buff)
328
+
329
+                            # An EOL comment will be added later
330
+                            # The handling could go here instead of above
331
+                            if state == Parse.EOL_COMMENT:
332
+                                define_info['comment'] = ''
333
+                            else:
334
+                                define_info['comment'] = full_comment
335
+                                comment_buff = []
336
+
337
+                            # If the comment specifies units, add that to the info
338
+                            units = re.match(r'^\(([^)]+)\)', full_comment)
339
+                            if units:
340
+                                units = units[1]
341
+                                if units == 's' or units == 'sec': units = 'seconds'
342
+                                define_info['units'] = units
343
+
344
+                        # Set the options for the current #define
345
+                        if define_name == "MOTHERBOARD" and boards != '':
346
+                            define_info['options'] = boards
347
+                        elif options_json != '':
348
+                            define_info['options'] = options_json
349
+                            if eol_options: options_json = ''
350
+
351
+                        # Create section dict if it doesn't exist yet
352
+                        if section not in sch_out[fk]: sch_out[fk][section] = {}
353
+
354
+                        # If define has already been seen...
355
+                        if define_name in sch_out[fk][section]:
356
+                            info = sch_out[fk][section][define_name]
357
+                            if isinstance(info, dict): info = [ info ]  # Convert a single dict into a list
358
+                            info.append(define_info)                    # Add to the list
359
+                        else:
360
+                            # Add the define dict with name as key
361
+                            sch_out[fk][section][define_name] = define_info
362
+
363
+                        if state == Parse.EOL_COMMENT:
364
+                            last_added_ref = define_info
365
+
366
+    return sch_out
367 367
 
368 368
 def dump_json(schema:dict, jpath:Path):
369
-	with jpath.open('w') as jfile:
370
-		json.dump(schema, jfile, ensure_ascii=False, indent=2)
369
+    with jpath.open('w') as jfile:
370
+        json.dump(schema, jfile, ensure_ascii=False, indent=2)
371 371
 
372 372
 def dump_yaml(schema:dict, ypath:Path):
373
-	import yaml
374
-	with ypath.open('w') as yfile:
375
-		yaml.dump(schema, yfile, default_flow_style=False, width=120, indent=2)
373
+    import yaml
374
+    with ypath.open('w') as yfile:
375
+        yaml.dump(schema, yfile, default_flow_style=False, width=120, indent=2)
376 376
 
377 377
 def main():
378
-	try:
379
-		schema = extract()
380
-	except Exception as exc:
381
-		print("Error: " + str(exc))
382
-		schema = None
383
-
384
-	if schema:
385
-
386
-		# Get the first command line argument
387
-		import sys
388
-		if len(sys.argv) > 1:
389
-			arg = sys.argv[1]
390
-		else:
391
-			arg = 'some'
392
-
393
-		# JSON schema
394
-		if arg in ['some', 'json', 'jsons']:
395
-			print("Generating JSON ...")
396
-			dump_json(schema, Path('schema.json'))
397
-
398
-		# JSON schema (wildcard names)
399
-		if arg in ['group', 'jsons']:
400
-			group_options(schema)
401
-			dump_json(schema, Path('schema_grouped.json'))
402
-
403
-		# YAML
404
-		if arg in ['some', 'yml', 'yaml']:
405
-			try:
406
-				import yaml
407
-			except ImportError:
408
-				print("Installing YAML module ...")
409
-				import subprocess
410
-				try:
411
-					subprocess.run(['python3', '-m', 'pip', 'install', 'pyyaml'])
412
-					import yaml
413
-				except:
414
-					print("Failed to install YAML module")
415
-					return
416
-
417
-			print("Generating YML ...")
418
-			dump_yaml(schema, Path('schema.yml'))
378
+    try:
379
+        schema = extract()
380
+    except Exception as exc:
381
+        print("Error: " + str(exc))
382
+        schema = None
383
+
384
+    if schema:
385
+
386
+        # Get the first command line argument
387
+        import sys
388
+        if len(sys.argv) > 1:
389
+            arg = sys.argv[1]
390
+        else:
391
+            arg = 'some'
392
+
393
+        # JSON schema
394
+        if arg in ['some', 'json', 'jsons']:
395
+            print("Generating JSON ...")
396
+            dump_json(schema, Path('schema.json'))
397
+
398
+        # JSON schema (wildcard names)
399
+        if arg in ['group', 'jsons']:
400
+            group_options(schema)
401
+            dump_json(schema, Path('schema_grouped.json'))
402
+
403
+        # YAML
404
+        if arg in ['some', 'yml', 'yaml']:
405
+            try:
406
+                import yaml
407
+            except ImportError:
408
+                print("Installing YAML module ...")
409
+                import subprocess
410
+                try:
411
+                    subprocess.run(['python3', '-m', 'pip', 'install', 'pyyaml'])
412
+                    import yaml
413
+                except:
414
+                    print("Failed to install YAML module")
415
+                    return
416
+
417
+            print("Generating YML ...")
418
+            dump_yaml(schema, Path('schema.yml'))
419 419
 
420 420
 if __name__ == '__main__':
421
-	main()
421
+    main()

+ 242
- 242
buildroot/share/PlatformIO/scripts/signature.py Parādīt failu

@@ -16,32 +16,32 @@ from pathlib import Path
16 16
 # resulting config.ini to produce more exact configuration files.
17 17
 #
18 18
 def extract_defines(filepath):
19
-	f = open(filepath, encoding="utf8").read().split("\n")
20
-	a = []
21
-	for line in f:
22
-		sline = line.strip()
23
-		if sline[:7] == "#define":
24
-			# Extract the key here (we don't care about the value)
25
-			kv = sline[8:].strip().split()
26
-			a.append(kv[0])
27
-	return a
19
+    f = open(filepath, encoding="utf8").read().split("\n")
20
+    a = []
21
+    for line in f:
22
+        sline = line.strip()
23
+        if sline[:7] == "#define":
24
+            # Extract the key here (we don't care about the value)
25
+            kv = sline[8:].strip().split()
26
+            a.append(kv[0])
27
+    return a
28 28
 
29 29
 # Compute the SHA256 hash of a file
30 30
 def get_file_sha256sum(filepath):
31
-	sha256_hash = hashlib.sha256()
32
-	with open(filepath,"rb") as f:
33
-		# Read and update hash string value in blocks of 4K
34
-		for byte_block in iter(lambda: f.read(4096),b""):
35
-			sha256_hash.update(byte_block)
36
-	return sha256_hash.hexdigest()
31
+    sha256_hash = hashlib.sha256()
32
+    with open(filepath,"rb") as f:
33
+        # Read and update hash string value in blocks of 4K
34
+        for byte_block in iter(lambda: f.read(4096),b""):
35
+            sha256_hash.update(byte_block)
36
+    return sha256_hash.hexdigest()
37 37
 
38 38
 #
39 39
 # Compress a JSON file into a zip file
40 40
 #
41 41
 import zipfile
42 42
 def compress_file(filepath, outpath):
43
-	with zipfile.ZipFile(outpath, 'w', compression=zipfile.ZIP_BZIP2, compresslevel=9) as zipf:
44
-		zipf.write(filepath, compress_type=zipfile.ZIP_BZIP2, compresslevel=9)
43
+    with zipfile.ZipFile(outpath, 'w', compression=zipfile.ZIP_BZIP2, compresslevel=9) as zipf:
44
+        zipf.write(filepath, compress_type=zipfile.ZIP_BZIP2, compresslevel=9)
45 45
 
46 46
 #
47 47
 # Compute the build signature. The idea is to extract all defines in the configuration headers
@@ -49,228 +49,228 @@ def compress_file(filepath, outpath):
49 49
 # We can reverse the signature to get a 1:1 equivalent configuration file
50 50
 #
51 51
 def compute_build_signature(env):
52
-	if 'BUILD_SIGNATURE' in env:
53
-		return
54
-
55
-	# Definitions from these files will be kept
56
-	files_to_keep = [ 'Marlin/Configuration.h', 'Marlin/Configuration_adv.h' ]
57
-
58
-	build_path = Path(env['PROJECT_BUILD_DIR'], env['PIOENV'])
59
-
60
-	# Check if we can skip processing
61
-	hashes = ''
62
-	for header in files_to_keep:
63
-		hashes += get_file_sha256sum(header)[0:10]
64
-
65
-	marlin_json = build_path / 'marlin_config.json'
66
-	marlin_zip = build_path / 'mc.zip'
67
-
68
-	# Read existing config file
69
-	try:
70
-		with marlin_json.open() as infile:
71
-			conf = json.load(infile)
72
-			if conf['__INITIAL_HASH'] == hashes:
73
-				# Same configuration, skip recomputing the building signature
74
-				compress_file(marlin_json, marlin_zip)
75
-				return
76
-	except:
77
-		pass
78
-
79
-	# Get enabled config options based on preprocessor
80
-	from preprocessor import run_preprocessor
81
-	complete_cfg = run_preprocessor(env)
82
-
83
-	# Dumb #define extraction from the configuration files
84
-	conf_defines = {}
85
-	all_defines = []
86
-	for header in files_to_keep:
87
-		defines = extract_defines(header)
88
-		# To filter only the define we want
89
-		all_defines += defines
90
-		# To remember from which file it cames from
91
-		conf_defines[header.split('/')[-1]] = defines
92
-
93
-	r = re.compile(r"\(+(\s*-*\s*_.*)\)+")
94
-
95
-	# First step is to collect all valid macros
96
-	defines = {}
97
-	for line in complete_cfg:
98
-
99
-		# Split the define from the value
100
-		key_val = line[8:].strip().decode().split(' ')
101
-		key, value = key_val[0], ' '.join(key_val[1:])
102
-
103
-		# Ignore values starting with two underscore, since it's low level
104
-		if len(key) > 2 and key[0:2] == "__" :
105
-			continue
106
-		# Ignore values containing a parenthesis (likely a function macro)
107
-		if '(' in key and ')' in key:
108
-			continue
109
-
110
-		# Then filter dumb values
111
-		if r.match(value):
112
-			continue
113
-
114
-		defines[key] = value if len(value) else ""
115
-
116
-	#
117
-	# Continue to gather data for CONFIGURATION_EMBEDDING or CONFIG_EXPORT
118
-	#
119
-	if not ('CONFIGURATION_EMBEDDING' in defines or 'CONFIG_EXPORT' in defines):
120
-		return
121
-
122
-	# Second step is to filter useless macro
123
-	resolved_defines = {}
124
-	for key in defines:
125
-		# Remove all boards now
126
-		if key.startswith("BOARD_") and key != "BOARD_INFO_NAME":
127
-			continue
128
-		# Remove all keys ending by "_NAME" as it does not make a difference to the configuration
129
-		if key.endswith("_NAME") and key != "CUSTOM_MACHINE_NAME":
130
-			continue
131
-		# Remove all keys ending by "_T_DECLARED" as it's a copy of extraneous system stuff
132
-		if key.endswith("_T_DECLARED"):
133
-			continue
134
-		# Remove keys that are not in the #define list in the Configuration list
135
-		if key not in all_defines + [ 'DETAILED_BUILD_VERSION', 'STRING_DISTRIBUTION_DATE' ]:
136
-			continue
137
-
138
-		# Don't be that smart guy here
139
-		resolved_defines[key] = defines[key]
140
-
141
-	# Generate a build signature now
142
-	# We are making an object that's a bit more complex than a basic dictionary here
143
-	data = {}
144
-	data['__INITIAL_HASH'] = hashes
145
-	# First create a key for each header here
146
-	for header in conf_defines:
147
-		data[header] = {}
148
-
149
-	# Then populate the object where each key is going to (that's a O(N^2) algorithm here...)
150
-	for key in resolved_defines:
151
-		for header in conf_defines:
152
-			if key in conf_defines[header]:
153
-				data[header][key] = resolved_defines[key]
154
-
155
-	# Every python needs this toy
156
-	def tryint(key):
157
-		try:
158
-			return int(defines[key])
159
-		except:
160
-			return 0
161
-
162
-	config_dump = tryint('CONFIG_EXPORT')
163
-
164
-	#
165
-	# Produce an INI file if CONFIG_EXPORT == 2
166
-	#
167
-	if config_dump == 2:
168
-		print("Generating config.ini ...")
169
-		config_ini = build_path / 'config.ini'
170
-		with config_ini.open('w') as outfile:
171
-			ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXPORT')
172
-			filegrp = { 'Configuration.h':'config:basic', 'Configuration_adv.h':'config:advanced' }
173
-			vers = defines["CONFIGURATION_H_VERSION"]
174
-			dt_string = datetime.now().strftime("%Y-%m-%d at %H:%M:%S")
175
-			ini_fmt = '{0:40}{1}\n'
176
-			outfile.write(
177
-				'#\n'
178
-				+ '# Marlin Firmware\n'
179
-				+ '# config.ini - Options to apply before the build\n'
180
-				+ '#\n'
181
-				+ f'# Generated by Marlin build on {dt_string}\n'
182
-				+ '#\n'
183
-				+ '\n'
184
-				+ '[config:base]\n'
185
-				+ ini_fmt.format('ini_use_config', ' = all')
186
-				+ ini_fmt.format('ini_config_vers', f' = {vers}')
187
-			)
188
-			# Loop through the data array of arrays
189
-			for header in data:
190
-				if header.startswith('__'):
191
-					continue
192
-				outfile.write('\n[' + filegrp[header] + ']\n')
193
-				for key in sorted(data[header]):
194
-					if key not in ignore:
195
-						val = 'on' if data[header][key] == '' else data[header][key]
196
-						outfile.write(ini_fmt.format(key.lower(), ' = ' + val))
197
-
198
-	#
199
-	# Produce a schema.json file if CONFIG_EXPORT == 3
200
-	#
201
-	if config_dump >= 3:
202
-		try:
203
-			conf_schema = schema.extract()
204
-		except Exception as exc:
205
-			print("Error: " + str(exc))
206
-			conf_schema = None
207
-
208
-		if conf_schema:
209
-			#
210
-			# Produce a schema.json file if CONFIG_EXPORT == 3
211
-			#
212
-			if config_dump in (3, 13):
213
-				print("Generating schema.json ...")
214
-				schema.dump_json(conf_schema, build_path / 'schema.json')
215
-				if config_dump == 13:
216
-					schema.group_options(conf_schema)
217
-					schema.dump_json(conf_schema, build_path / 'schema_grouped.json')
218
-
219
-			#
220
-			# Produce a schema.yml file if CONFIG_EXPORT == 4
221
-			#
222
-			elif config_dump == 4:
223
-				print("Generating schema.yml ...")
224
-				try:
225
-					import yaml
226
-				except ImportError:
227
-					env.Execute(env.VerboseAction(
228
-						'$PYTHONEXE -m pip install "pyyaml"',
229
-						"Installing YAML for schema.yml export",
230
-					))
231
-					import yaml
232
-				schema.dump_yaml(conf_schema, build_path / 'schema.yml')
233
-
234
-	# Append the source code version and date
235
-	data['VERSION'] = {}
236
-	data['VERSION']['DETAILED_BUILD_VERSION'] = resolved_defines['DETAILED_BUILD_VERSION']
237
-	data['VERSION']['STRING_DISTRIBUTION_DATE'] = resolved_defines['STRING_DISTRIBUTION_DATE']
238
-	try:
239
-		curver = subprocess.check_output(["git", "describe", "--match=NeVeRmAtCh", "--always"]).strip()
240
-		data['VERSION']['GIT_REF'] = curver.decode()
241
-	except:
242
-		pass
243
-
244
-	#
245
-	# Produce a JSON file for CONFIGURATION_EMBEDDING or CONFIG_EXPORT == 1
246
-	#
247
-	if config_dump == 1 or 'CONFIGURATION_EMBEDDING' in defines:
248
-		with marlin_json.open('w') as outfile:
249
-			json.dump(data, outfile, separators=(',', ':'))
250
-
251
-	#
252
-	# The rest only applies to CONFIGURATION_EMBEDDING
253
-	#
254
-	if not 'CONFIGURATION_EMBEDDING' in defines:
255
-		return
256
-
257
-	# Compress the JSON file as much as we can
258
-	compress_file(marlin_json, marlin_zip)
259
-
260
-	# Generate a C source file for storing this array
261
-	with open('Marlin/src/mczip.h','wb') as result_file:
262
-		result_file.write(
263
-				b'#ifndef NO_CONFIGURATION_EMBEDDING_WARNING\n'
264
-			+ b'  #warning "Generated file \'mc.zip\' is embedded (Define NO_CONFIGURATION_EMBEDDING_WARNING to suppress this warning.)"\n'
265
-			+ b'#endif\n'
266
-			+ b'const unsigned char mc_zip[] PROGMEM = {\n '
267
-		)
268
-		count = 0
269
-		for b in (build_path / 'mc.zip').open('rb').read():
270
-			result_file.write(b' 0x%02X,' % b)
271
-			count += 1
272
-			if count % 16 == 0:
273
-				result_file.write(b'\n ')
274
-		if count % 16:
275
-			result_file.write(b'\n')
276
-		result_file.write(b'};\n')
52
+    if 'BUILD_SIGNATURE' in env:
53
+        return
54
+
55
+    # Definitions from these files will be kept
56
+    files_to_keep = [ 'Marlin/Configuration.h', 'Marlin/Configuration_adv.h' ]
57
+
58
+    build_path = Path(env['PROJECT_BUILD_DIR'], env['PIOENV'])
59
+
60
+    # Check if we can skip processing
61
+    hashes = ''
62
+    for header in files_to_keep:
63
+        hashes += get_file_sha256sum(header)[0:10]
64
+
65
+    marlin_json = build_path / 'marlin_config.json'
66
+    marlin_zip = build_path / 'mc.zip'
67
+
68
+    # Read existing config file
69
+    try:
70
+        with marlin_json.open() as infile:
71
+            conf = json.load(infile)
72
+            if conf['__INITIAL_HASH'] == hashes:
73
+                # Same configuration, skip recomputing the building signature
74
+                compress_file(marlin_json, marlin_zip)
75
+                return
76
+    except:
77
+        pass
78
+
79
+    # Get enabled config options based on preprocessor
80
+    from preprocessor import run_preprocessor
81
+    complete_cfg = run_preprocessor(env)
82
+
83
+    # Dumb #define extraction from the configuration files
84
+    conf_defines = {}
85
+    all_defines = []
86
+    for header in files_to_keep:
87
+        defines = extract_defines(header)
88
+        # To filter only the define we want
89
+        all_defines += defines
90
+        # To remember from which file it cames from
91
+        conf_defines[header.split('/')[-1]] = defines
92
+
93
+    r = re.compile(r"\(+(\s*-*\s*_.*)\)+")
94
+
95
+    # First step is to collect all valid macros
96
+    defines = {}
97
+    for line in complete_cfg:
98
+
99
+        # Split the define from the value
100
+        key_val = line[8:].strip().decode().split(' ')
101
+        key, value = key_val[0], ' '.join(key_val[1:])
102
+
103
+        # Ignore values starting with two underscore, since it's low level
104
+        if len(key) > 2 and key[0:2] == "__" :
105
+            continue
106
+        # Ignore values containing a parenthesis (likely a function macro)
107
+        if '(' in key and ')' in key:
108
+            continue
109
+
110
+        # Then filter dumb values
111
+        if r.match(value):
112
+            continue
113
+
114
+        defines[key] = value if len(value) else ""
115
+
116
+    #
117
+    # Continue to gather data for CONFIGURATION_EMBEDDING or CONFIG_EXPORT
118
+    #
119
+    if not ('CONFIGURATION_EMBEDDING' in defines or 'CONFIG_EXPORT' in defines):
120
+        return
121
+
122
+    # Second step is to filter useless macro
123
+    resolved_defines = {}
124
+    for key in defines:
125
+        # Remove all boards now
126
+        if key.startswith("BOARD_") and key != "BOARD_INFO_NAME":
127
+            continue
128
+        # Remove all keys ending by "_NAME" as it does not make a difference to the configuration
129
+        if key.endswith("_NAME") and key != "CUSTOM_MACHINE_NAME":
130
+            continue
131
+        # Remove all keys ending by "_T_DECLARED" as it's a copy of extraneous system stuff
132
+        if key.endswith("_T_DECLARED"):
133
+            continue
134
+        # Remove keys that are not in the #define list in the Configuration list
135
+        if key not in all_defines + [ 'DETAILED_BUILD_VERSION', 'STRING_DISTRIBUTION_DATE' ]:
136
+            continue
137
+
138
+        # Don't be that smart guy here
139
+        resolved_defines[key] = defines[key]
140
+
141
+    # Generate a build signature now
142
+    # We are making an object that's a bit more complex than a basic dictionary here
143
+    data = {}
144
+    data['__INITIAL_HASH'] = hashes
145
+    # First create a key for each header here
146
+    for header in conf_defines:
147
+        data[header] = {}
148
+
149
+    # Then populate the object where each key is going to (that's a O(N^2) algorithm here...)
150
+    for key in resolved_defines:
151
+        for header in conf_defines:
152
+            if key in conf_defines[header]:
153
+                data[header][key] = resolved_defines[key]
154
+
155
+    # Every python needs this toy
156
+    def tryint(key):
157
+        try:
158
+            return int(defines[key])
159
+        except:
160
+            return 0
161
+
162
+    config_dump = tryint('CONFIG_EXPORT')
163
+
164
+    #
165
+    # Produce an INI file if CONFIG_EXPORT == 2
166
+    #
167
+    if config_dump == 2:
168
+        print("Generating config.ini ...")
169
+        config_ini = build_path / 'config.ini'
170
+        with config_ini.open('w') as outfile:
171
+            ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXPORT')
172
+            filegrp = { 'Configuration.h':'config:basic', 'Configuration_adv.h':'config:advanced' }
173
+            vers = defines["CONFIGURATION_H_VERSION"]
174
+            dt_string = datetime.now().strftime("%Y-%m-%d at %H:%M:%S")
175
+            ini_fmt = '{0:40}{1}\n'
176
+            outfile.write(
177
+                '#\n'
178
+                + '# Marlin Firmware\n'
179
+                + '# config.ini - Options to apply before the build\n'
180
+                + '#\n'
181
+                + f'# Generated by Marlin build on {dt_string}\n'
182
+                + '#\n'
183
+                + '\n'
184
+                + '[config:base]\n'
185
+                + ini_fmt.format('ini_use_config', ' = all')
186
+                + ini_fmt.format('ini_config_vers', f' = {vers}')
187
+            )
188
+            # Loop through the data array of arrays
189
+            for header in data:
190
+                if header.startswith('__'):
191
+                    continue
192
+                outfile.write('\n[' + filegrp[header] + ']\n')
193
+                for key in sorted(data[header]):
194
+                    if key not in ignore:
195
+                        val = 'on' if data[header][key] == '' else data[header][key]
196
+                        outfile.write(ini_fmt.format(key.lower(), ' = ' + val))
197
+
198
+    #
199
+    # Produce a schema.json file if CONFIG_EXPORT == 3
200
+    #
201
+    if config_dump >= 3:
202
+        try:
203
+            conf_schema = schema.extract()
204
+        except Exception as exc:
205
+            print("Error: " + str(exc))
206
+            conf_schema = None
207
+
208
+        if conf_schema:
209
+            #
210
+            # Produce a schema.json file if CONFIG_EXPORT == 3
211
+            #
212
+            if config_dump in (3, 13):
213
+                print("Generating schema.json ...")
214
+                schema.dump_json(conf_schema, build_path / 'schema.json')
215
+                if config_dump == 13:
216
+                    schema.group_options(conf_schema)
217
+                    schema.dump_json(conf_schema, build_path / 'schema_grouped.json')
218
+
219
+            #
220
+            # Produce a schema.yml file if CONFIG_EXPORT == 4
221
+            #
222
+            elif config_dump == 4:
223
+                print("Generating schema.yml ...")
224
+                try:
225
+                    import yaml
226
+                except ImportError:
227
+                    env.Execute(env.VerboseAction(
228
+                        '$PYTHONEXE -m pip install "pyyaml"',
229
+                        "Installing YAML for schema.yml export",
230
+                    ))
231
+                    import yaml
232
+                schema.dump_yaml(conf_schema, build_path / 'schema.yml')
233
+
234
+    # Append the source code version and date
235
+    data['VERSION'] = {}
236
+    data['VERSION']['DETAILED_BUILD_VERSION'] = resolved_defines['DETAILED_BUILD_VERSION']
237
+    data['VERSION']['STRING_DISTRIBUTION_DATE'] = resolved_defines['STRING_DISTRIBUTION_DATE']
238
+    try:
239
+        curver = subprocess.check_output(["git", "describe", "--match=NeVeRmAtCh", "--always"]).strip()
240
+        data['VERSION']['GIT_REF'] = curver.decode()
241
+    except:
242
+        pass
243
+
244
+    #
245
+    # Produce a JSON file for CONFIGURATION_EMBEDDING or CONFIG_EXPORT == 1
246
+    #
247
+    if config_dump == 1 or 'CONFIGURATION_EMBEDDING' in defines:
248
+        with marlin_json.open('w') as outfile:
249
+            json.dump(data, outfile, separators=(',', ':'))
250
+
251
+    #
252
+    # The rest only applies to CONFIGURATION_EMBEDDING
253
+    #
254
+    if not 'CONFIGURATION_EMBEDDING' in defines:
255
+        return
256
+
257
+    # Compress the JSON file as much as we can
258
+    compress_file(marlin_json, marlin_zip)
259
+
260
+    # Generate a C source file for storing this array
261
+    with open('Marlin/src/mczip.h','wb') as result_file:
262
+        result_file.write(
263
+                b'#ifndef NO_CONFIGURATION_EMBEDDING_WARNING\n'
264
+            + b'  #warning "Generated file \'mc.zip\' is embedded (Define NO_CONFIGURATION_EMBEDDING_WARNING to suppress this warning.)"\n'
265
+            + b'#endif\n'
266
+            + b'const unsigned char mc_zip[] PROGMEM = {\n '
267
+        )
268
+        count = 0
269
+        for b in (build_path / 'mc.zip').open('rb').read():
270
+            result_file.write(b' 0x%02X,' % b)
271
+            count += 1
272
+            if count % 16 == 0:
273
+                result_file.write(b'\n ')
274
+        if count % 16:
275
+            result_file.write(b'\n')
276
+        result_file.write(b'};\n')

+ 31
- 31
buildroot/share/PlatformIO/scripts/simulator.py Parādīt failu

@@ -5,49 +5,49 @@
5 5
 
6 6
 import pioutil
7 7
 if pioutil.is_pio_build():
8
-	# Get the environment thus far for the build
9
-	Import("env")
8
+    # Get the environment thus far for the build
9
+    Import("env")
10 10
 
11
-	#print(env.Dump())
11
+    #print(env.Dump())
12 12
 
13
-	#
14
-	# Give the binary a distinctive name
15
-	#
13
+    #
14
+    # Give the binary a distinctive name
15
+    #
16 16
 
17
-	env['PROGNAME'] = "MarlinSimulator"
17
+    env['PROGNAME'] = "MarlinSimulator"
18 18
 
19
-	#
20
-	# If Xcode is installed add the path to its Frameworks folder,
21
-	# or if Mesa is installed try to use its GL/gl.h.
22
-	#
19
+    #
20
+    # If Xcode is installed add the path to its Frameworks folder,
21
+    # or if Mesa is installed try to use its GL/gl.h.
22
+    #
23 23
 
24
-	import sys
25
-	if sys.platform == 'darwin':
24
+    import sys
25
+    if sys.platform == 'darwin':
26 26
 
27
-		#
28
-		# Silence half of the ranlib warnings. (No equivalent for 'ARFLAGS')
29
-		#
30
-		env['RANLIBFLAGS'] += [ "-no_warning_for_no_symbols" ]
27
+        #
28
+        # Silence half of the ranlib warnings. (No equivalent for 'ARFLAGS')
29
+        #
30
+        env['RANLIBFLAGS'] += [ "-no_warning_for_no_symbols" ]
31 31
 
32
-		# Default paths for Xcode and a lucky GL/gl.h dropped by Mesa
33
-		xcode_path = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks"
34
-		mesa_path = "/opt/local/include/GL/gl.h"
32
+        # Default paths for Xcode and a lucky GL/gl.h dropped by Mesa
33
+        xcode_path = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks"
34
+        mesa_path = "/opt/local/include/GL/gl.h"
35 35
 
36
-		import os.path
36
+        import os.path
37 37
 
38
-		if os.path.exists(xcode_path):
38
+        if os.path.exists(xcode_path):
39 39
 
40
-			env['BUILD_FLAGS'] += [ "-F" + xcode_path ]
41
-			print("Using OpenGL framework headers from Xcode.app")
40
+            env['BUILD_FLAGS'] += [ "-F" + xcode_path ]
41
+            print("Using OpenGL framework headers from Xcode.app")
42 42
 
43
-		elif os.path.exists(mesa_path):
43
+        elif os.path.exists(mesa_path):
44 44
 
45
-			env['BUILD_FLAGS'] += [ '-D__MESA__' ]
46
-			print("Using OpenGL header from", mesa_path)
45
+            env['BUILD_FLAGS'] += [ '-D__MESA__' ]
46
+            print("Using OpenGL header from", mesa_path)
47 47
 
48
-		else:
48
+        else:
49 49
 
50
-			print("\n\nNo OpenGL headers found. Install Xcode for matching headers, or use 'sudo port install mesa' to get a GL/gl.h.\n\n")
50
+            print("\n\nNo OpenGL headers found. Install Xcode for matching headers, or use 'sudo port install mesa' to get a GL/gl.h.\n\n")
51 51
 
52
-			# Break out of the PIO build immediately
53
-			sys.exit(1)
52
+            # Break out of the PIO build immediately
53
+            sys.exit(1)

+ 56
- 56
buildroot/share/PlatformIO/scripts/stm32_serialbuffer.py Parādīt failu

@@ -3,59 +3,59 @@
3 3
 #
4 4
 import pioutil
5 5
 if pioutil.is_pio_build():
6
-	Import("env")
7
-
8
-	# Get a build flag's value or None
9
-	def getBuildFlagValue(name):
10
-		for flag in build_flags:
11
-			if isinstance(flag, list) and flag[0] == name:
12
-				return flag[1]
13
-
14
-		return None
15
-
16
-	# Get an overriding buffer size for RX or TX from the build flags
17
-	def getInternalSize(side):
18
-		return	getBuildFlagValue(f"MF_{side}_BUFFER_SIZE") or \
19
-				getBuildFlagValue(f"SERIAL_{side}_BUFFER_SIZE") or \
20
-				getBuildFlagValue(f"USART_{side}_BUF_SIZE")
21
-
22
-	# Get the largest defined buffer size for RX or TX
23
-	def getBufferSize(side, default):
24
-		# Get a build flag value or fall back to the given default
25
-		internal = int(getInternalSize(side) or default)
26
-		flag = side + "_BUFFER_SIZE"
27
-		# Return the largest value
28
-		return max(int(mf[flag]), internal) if flag in mf else internal
29
-
30
-	# Add a build flag if it's not already defined
31
-	def tryAddFlag(name, value):
32
-		if getBuildFlagValue(name) is None:
33
-			env.Append(BUILD_FLAGS=[f"-D{name}={value}"])
34
-
35
-	# Marlin uses the `RX_BUFFER_SIZE` \ `TX_BUFFER_SIZE` options to
36
-	# configure buffer sizes for receiving \ transmitting serial data.
37
-	# Stm32duino uses another set of defines for the same purpose, so this
38
-	# script gets the values from the configuration and uses them to define
39
-	# `SERIAL_RX_BUFFER_SIZE` and `SERIAL_TX_BUFFER_SIZE` as global build
40
-	# flags so they are available for use by the platform.
41
-	#
42
-	# The script will set the value as the default one (64 bytes)
43
-	# or the user-configured one, whichever is higher.
44
-	#
45
-	# Marlin's default buffer sizes are 128 for RX and 32 for TX.
46
-	# The highest value is taken (128/64).
47
-	#
48
-	# If MF_*_BUFFER_SIZE, SERIAL_*_BUFFER_SIZE, USART_*_BUF_SIZE, are
49
-	# defined, the first of these values will be used as the minimum.
50
-	build_flags = env.ParseFlags(env.get('BUILD_FLAGS'))["CPPDEFINES"]
51
-	mf = env["MARLIN_FEATURES"]
52
-
53
-	# Get the largest defined buffer sizes for RX or TX, using defaults for undefined
54
-	rxBuf = getBufferSize("RX", 128)
55
-	txBuf = getBufferSize("TX",  64)
56
-
57
-	# Provide serial buffer sizes to the stm32duino platform
58
-	tryAddFlag("SERIAL_RX_BUFFER_SIZE", rxBuf)
59
-	tryAddFlag("SERIAL_TX_BUFFER_SIZE", txBuf)
60
-	tryAddFlag("USART_RX_BUF_SIZE", rxBuf)
61
-	tryAddFlag("USART_TX_BUF_SIZE", txBuf)
6
+    Import("env")
7
+
8
+    # Get a build flag's value or None
9
+    def getBuildFlagValue(name):
10
+        for flag in build_flags:
11
+            if isinstance(flag, list) and flag[0] == name:
12
+                return flag[1]
13
+
14
+        return None
15
+
16
+    # Get an overriding buffer size for RX or TX from the build flags
17
+    def getInternalSize(side):
18
+        return  getBuildFlagValue(f"MF_{side}_BUFFER_SIZE") or \
19
+                getBuildFlagValue(f"SERIAL_{side}_BUFFER_SIZE") or \
20
+                getBuildFlagValue(f"USART_{side}_BUF_SIZE")
21
+
22
+    # Get the largest defined buffer size for RX or TX
23
+    def getBufferSize(side, default):
24
+        # Get a build flag value or fall back to the given default
25
+        internal = int(getInternalSize(side) or default)
26
+        flag = side + "_BUFFER_SIZE"
27
+        # Return the largest value
28
+        return max(int(mf[flag]), internal) if flag in mf else internal
29
+
30
+    # Add a build flag if it's not already defined
31
+    def tryAddFlag(name, value):
32
+        if getBuildFlagValue(name) is None:
33
+            env.Append(BUILD_FLAGS=[f"-D{name}={value}"])
34
+
35
+    # Marlin uses the `RX_BUFFER_SIZE` \ `TX_BUFFER_SIZE` options to
36
+    # configure buffer sizes for receiving \ transmitting serial data.
37
+    # Stm32duino uses another set of defines for the same purpose, so this
38
+    # script gets the values from the configuration and uses them to define
39
+    # `SERIAL_RX_BUFFER_SIZE` and `SERIAL_TX_BUFFER_SIZE` as global build
40
+    # flags so they are available for use by the platform.
41
+    #
42
+    # The script will set the value as the default one (64 bytes)
43
+    # or the user-configured one, whichever is higher.
44
+    #
45
+    # Marlin's default buffer sizes are 128 for RX and 32 for TX.
46
+    # The highest value is taken (128/64).
47
+    #
48
+    # If MF_*_BUFFER_SIZE, SERIAL_*_BUFFER_SIZE, USART_*_BUF_SIZE, are
49
+    # defined, the first of these values will be used as the minimum.
50
+    build_flags = env.ParseFlags(env.get('BUILD_FLAGS'))["CPPDEFINES"]
51
+    mf = env["MARLIN_FEATURES"]
52
+
53
+    # Get the largest defined buffer sizes for RX or TX, using defaults for undefined
54
+    rxBuf = getBufferSize("RX", 128)
55
+    txBuf = getBufferSize("TX",  64)
56
+
57
+    # Provide serial buffer sizes to the stm32duino platform
58
+    tryAddFlag("SERIAL_RX_BUFFER_SIZE", rxBuf)
59
+    tryAddFlag("SERIAL_TX_BUFFER_SIZE", txBuf)
60
+    tryAddFlag("USART_RX_BUF_SIZE", rxBuf)
61
+    tryAddFlag("USART_TX_BUF_SIZE", txBuf)

+ 314
- 314
buildroot/share/scripts/upload.py Parādīt failu

@@ -25,320 +25,320 @@ import MarlinBinaryProtocol
25 25
 #-----------------#
26 26
 def Upload(source, target, env):
27 27
 
28
-    #-------#
29
-    # Debug #
30
-    #-------#
31
-    Debug = False                # Set to True to enable script debug
32
-    def debugPrint(data):
33
-        if Debug: print(f"[Debug]: {data}")
34
-
35
-    #------------------#
36
-    # Marlin functions #
37
-    #------------------#
38
-    def _GetMarlinEnv(marlinEnv, feature):
39
-        if not marlinEnv: return None
40
-        return marlinEnv[feature] if feature in marlinEnv else None
41
-
42
-    #----------------#
43
-    # Port functions #
44
-    #----------------#
45
-    def _GetUploadPort(env):
46
-        debugPrint('Autodetecting upload port...')
47
-        env.AutodetectUploadPort(env)
48
-        portName = env.subst('$UPLOAD_PORT')
49
-        if not portName:
50
-            raise Exception('Error detecting the upload port.')
51
-        debugPrint('OK')
52
-        return portName
53
-
54
-    #-------------------------#
55
-    # Simple serial functions #
56
-    #-------------------------#
57
-    def _OpenPort():
58
-        # Open serial port
59
-        if port.is_open: return
60
-        debugPrint('Opening upload port...')
61
-        port.open()
62
-        port.reset_input_buffer()
63
-        debugPrint('OK')
64
-
65
-    def _ClosePort():
66
-        # Open serial port
67
-        if port is None: return
68
-        if not port.is_open: return
69
-        debugPrint('Closing upload port...')
70
-        port.close()
71
-        debugPrint('OK')
72
-
73
-    def _Send(data):
74
-        debugPrint(f'>> {data}')
75
-        strdata = bytearray(data, 'utf8') + b'\n'
76
-        port.write(strdata)
77
-        time.sleep(0.010)
78
-
79
-    def _Recv():
80
-        clean_responses = []
81
-        responses = port.readlines()
82
-        for Resp in responses:
83
-            # Suppress invalid chars (coming from debug info)
84
-            try:
85
-                clean_response = Resp.decode('utf8').rstrip().lstrip()
86
-                clean_responses.append(clean_response)
87
-                debugPrint(f'<< {clean_response}')
88
-            except:
89
-                pass
90
-        return clean_responses
91
-
92
-    #------------------#
93
-    # SDCard functions #
94
-    #------------------#
95
-    def _CheckSDCard():
96
-        debugPrint('Checking SD card...')
97
-        _Send('M21')
98
-        Responses = _Recv()
99
-        if len(Responses) < 1 or not any('SD card ok' in r for r in Responses):
100
-            raise Exception('Error accessing SD card')
101
-        debugPrint('SD Card OK')
102
-        return True
103
-
104
-    #----------------#
105
-    # File functions #
106
-    #----------------#
107
-    def _GetFirmwareFiles(UseLongFilenames):
108
-        debugPrint('Get firmware files...')
109
-        _Send(f"M20 F{'L' if UseLongFilenames else ''}")
110
-        Responses = _Recv()
111
-        if len(Responses) < 3 or not any('file list' in r for r in Responses):
112
-            raise Exception('Error getting firmware files')
113
-        debugPrint('OK')
114
-        return Responses
115
-
116
-    def _FilterFirmwareFiles(FirmwareList, UseLongFilenames):
117
-        Firmwares = []
118
-        for FWFile in FirmwareList:
119
-            # For long filenames take the 3rd column of the firmwares list
120
-            if UseLongFilenames:
121
-                Space = 0
122
-                Space = FWFile.find(' ')
123
-                if Space >= 0: Space = FWFile.find(' ', Space + 1)
124
-                if Space >= 0: FWFile = FWFile[Space + 1:]
125
-            if not '/' in FWFile and '.BIN' in FWFile.upper():
126
-                Firmwares.append(FWFile[:FWFile.upper().index('.BIN') + 4])
127
-        return Firmwares
128
-
129
-    def _RemoveFirmwareFile(FirmwareFile):
130
-        _Send(f'M30 /{FirmwareFile}')
131
-        Responses = _Recv()
132
-        Removed = len(Responses) >= 1 and any('File deleted' in r for r in Responses)
133
-        if not Removed:
134
-            raise Exception(f"Firmware file '{FirmwareFile}' not removed")
135
-        return Removed
136
-
137
-    def _RollbackUpload(FirmwareFile):
138
-        if not rollback: return
139
-        print(f"Rollback: trying to delete firmware '{FirmwareFile}'...")
140
-        _OpenPort()
141
-        # Wait for SD card release
142
-        time.sleep(1)
143
-        # Remount SD card
144
-        _CheckSDCard()
145
-        print(' OK' if _RemoveFirmwareFile(FirmwareFile) else ' Error!')
146
-        _ClosePort()
147
-
148
-
149
-    #---------------------#
150
-    # Callback Entrypoint #
151
-    #---------------------#
152
-    port = None
153
-    protocol = None
154
-    filetransfer = None
155
-    rollback = False
156
-
157
-    # Get Marlin evironment vars
158
-    MarlinEnv = env['MARLIN_FEATURES']
159
-    marlin_pioenv = _GetMarlinEnv(MarlinEnv, 'PIOENV')
160
-    marlin_motherboard = _GetMarlinEnv(MarlinEnv, 'MOTHERBOARD')
161
-    marlin_board_info_name = _GetMarlinEnv(MarlinEnv, 'BOARD_INFO_NAME')
162
-    marlin_board_custom_build_flags = _GetMarlinEnv(MarlinEnv, 'BOARD_CUSTOM_BUILD_FLAGS')
163
-    marlin_firmware_bin = _GetMarlinEnv(MarlinEnv, 'FIRMWARE_BIN')
164
-    marlin_long_filename_host_support = _GetMarlinEnv(MarlinEnv, 'LONG_FILENAME_HOST_SUPPORT') is not None
165
-    marlin_longname_write = _GetMarlinEnv(MarlinEnv, 'LONG_FILENAME_WRITE_SUPPORT') is not None
166
-    marlin_custom_firmware_upload = _GetMarlinEnv(MarlinEnv, 'CUSTOM_FIRMWARE_UPLOAD') is not None
167
-    marlin_short_build_version = _GetMarlinEnv(MarlinEnv, 'SHORT_BUILD_VERSION')
168
-    marlin_string_config_h_author = _GetMarlinEnv(MarlinEnv, 'STRING_CONFIG_H_AUTHOR')
169
-
170
-    # Get firmware upload params
171
-    upload_firmware_source_name = str(source[0])    # Source firmware filename
172
-    upload_speed = env['UPLOAD_SPEED'] if 'UPLOAD_SPEED' in env else 115200
173
-                                                    # baud rate of serial connection
174
-    upload_port = _GetUploadPort(env)               # Serial port to use
175
-
176
-    # Set local upload params
177
-    upload_firmware_target_name = os.path.basename(upload_firmware_source_name)
178
-                                                    # Target firmware filename
179
-    upload_timeout = 1000                           # Communication timout, lossy/slow connections need higher values
180
-    upload_blocksize = 512                          # Transfer block size. 512 = Autodetect
181
-    upload_compression = True                       # Enable compression
182
-    upload_error_ratio = 0                          # Simulated corruption ratio
183
-    upload_test = False                             # Benchmark the serial link without storing the file
184
-    upload_reset = True                             # Trigger a soft reset for firmware update after the upload
185
-
186
-    # Set local upload params based on board type to change script behavior
187
-    # "upload_delete_old_bins": delete all *.bin files in the root of SD Card
188
-    upload_delete_old_bins = marlin_motherboard in ['BOARD_CREALITY_V4',   'BOARD_CREALITY_V4210', 'BOARD_CREALITY_V422', 'BOARD_CREALITY_V423',
189
-                                                    'BOARD_CREALITY_V427', 'BOARD_CREALITY_V431',  'BOARD_CREALITY_V452', 'BOARD_CREALITY_V453',
190
-                                                    'BOARD_CREALITY_V24S1']
191
-    # "upload_random_name": generate a random 8.3 firmware filename to upload
192
-    upload_random_filename = marlin_motherboard in ['BOARD_CREALITY_V4',   'BOARD_CREALITY_V4210', 'BOARD_CREALITY_V422', 'BOARD_CREALITY_V423',
193
-                                                    'BOARD_CREALITY_V427', 'BOARD_CREALITY_V431',  'BOARD_CREALITY_V452', 'BOARD_CREALITY_V453',
194
-                                                    'BOARD_CREALITY_V24S1'] and not marlin_long_filename_host_support
195
-
196
-    try:
197
-
198
-        # Start upload job
199
-        print(f"Uploading firmware '{os.path.basename(upload_firmware_target_name)}' to '{marlin_motherboard}' via '{upload_port}'")
200
-
201
-        # Dump some debug info
202
-        if Debug:
203
-            print('Upload using:')
204
-            print('---- Marlin -----------------------------------')
205
-            print(f' PIOENV                      : {marlin_pioenv}')
206
-            print(f' SHORT_BUILD_VERSION         : {marlin_short_build_version}')
207
-            print(f' STRING_CONFIG_H_AUTHOR      : {marlin_string_config_h_author}')
208
-            print(f' MOTHERBOARD                 : {marlin_motherboard}')
209
-            print(f' BOARD_INFO_NAME             : {marlin_board_info_name}')
210
-            print(f' CUSTOM_BUILD_FLAGS          : {marlin_board_custom_build_flags}')
211
-            print(f' FIRMWARE_BIN                : {marlin_firmware_bin}')
212
-            print(f' LONG_FILENAME_HOST_SUPPORT  : {marlin_long_filename_host_support}')
213
-            print(f' LONG_FILENAME_WRITE_SUPPORT : {marlin_longname_write}')
214
-            print(f' CUSTOM_FIRMWARE_UPLOAD      : {marlin_custom_firmware_upload}')
215
-            print('---- Upload parameters ------------------------')
216
-            print(f' Source                      : {upload_firmware_source_name}')
217
-            print(f' Target                      : {upload_firmware_target_name}')
218
-            print(f' Port                        : {upload_port} @ {upload_speed} baudrate')
219
-            print(f' Timeout                     : {upload_timeout}')
220
-            print(f' Block size                  : {upload_blocksize}')
221
-            print(f' Compression                 : {upload_compression}')
222
-            print(f' Error ratio                 : {upload_error_ratio}')
223
-            print(f' Test                        : {upload_test}')
224
-            print(f' Reset                       : {upload_reset}')
225
-            print('-----------------------------------------------')
226
-
227
-        # Custom implementations based on board parameters
228
-        # Generate a new 8.3 random filename
229
-        if upload_random_filename:
230
-            upload_firmware_target_name = f"fw-{''.join(random.choices('ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', k=5))}.BIN"
231
-            print(f"Board {marlin_motherboard}: Overriding firmware filename to '{upload_firmware_target_name}'")
232
-
233
-        # Delete all *.bin files on the root of SD Card (if flagged)
234
-        if upload_delete_old_bins:
235
-            # CUSTOM_FIRMWARE_UPLOAD is needed for this feature
236
-            if not marlin_custom_firmware_upload:
237
-                raise Exception(f"CUSTOM_FIRMWARE_UPLOAD must be enabled in 'Configuration_adv.h' for '{marlin_motherboard}'")
238
-
239
-            # Init & Open serial port
240
-            port = serial.Serial(upload_port, baudrate = upload_speed, write_timeout = 0, timeout = 0.1)
241
-            _OpenPort()
242
-
243
-            # Check SD card status
244
-            _CheckSDCard()
245
-
246
-            # Get firmware files
247
-            FirmwareFiles = _GetFirmwareFiles(marlin_long_filename_host_support)
248
-            if Debug:
249
-                for FirmwareFile in FirmwareFiles:
250
-                    print(f'Found: {FirmwareFile}')
251
-
252
-            # Get all 1st level firmware files (to remove)
253
-            OldFirmwareFiles = _FilterFirmwareFiles(FirmwareFiles[1:len(FirmwareFiles)-2], marlin_long_filename_host_support)   # Skip header and footers of list
254
-            if len(OldFirmwareFiles) == 0:
255
-                print('No old firmware files to delete')
256
-            else:
257
-                print(f"Remove {len(OldFirmwareFiles)} old firmware file{'s' if len(OldFirmwareFiles) != 1 else ''}:")
258
-                for OldFirmwareFile in OldFirmwareFiles:
259
-                    print(f" -Removing- '{OldFirmwareFile}'...")
260
-                    print(' OK' if _RemoveFirmwareFile(OldFirmwareFile) else ' Error!')
261
-
262
-            # Close serial
263
-            _ClosePort()
264
-
265
-            # Cleanup completed
266
-            debugPrint('Cleanup completed')
267
-
268
-        # WARNING! The serial port must be closed here because the serial transfer that follow needs it!
269
-
270
-        # Upload firmware file
271
-        debugPrint(f"Copy '{upload_firmware_source_name}' --> '{upload_firmware_target_name}'")
272
-        protocol = MarlinBinaryProtocol.Protocol(upload_port, upload_speed, upload_blocksize, float(upload_error_ratio), int(upload_timeout))
273
-        #echologger = MarlinBinaryProtocol.EchoProtocol(protocol)
274
-        protocol.connect()
275
-        # Mark the rollback (delete broken transfer) from this point on
276
-        rollback = True
277
-        filetransfer = MarlinBinaryProtocol.FileTransferProtocol(protocol)
278
-        transferOK = filetransfer.copy(upload_firmware_source_name, upload_firmware_target_name, upload_compression, upload_test)
279
-        protocol.disconnect()
280
-
281
-        # Notify upload completed
282
-        protocol.send_ascii('M117 Firmware uploaded' if transferOK else 'M117 Firmware upload failed')
283
-
284
-        # Remount SD card
285
-        print('Wait for SD card release...')
286
-        time.sleep(1)
287
-        print('Remount SD card')
288
-        protocol.send_ascii('M21')
289
-
290
-        # Transfer failed?
291
-        if not transferOK:
292
-            protocol.shutdown()
293
-            _RollbackUpload(upload_firmware_target_name)
294
-        else:
295
-            # Trigger firmware update
296
-            if upload_reset:
297
-                print('Trigger firmware update...')
298
-                protocol.send_ascii('M997', True)
299
-            protocol.shutdown()
300
-
301
-        print('Firmware update completed' if transferOK else 'Firmware update failed')
302
-        return 0 if transferOK else -1
303
-
304
-    except KeyboardInterrupt:
305
-        print('Aborted by user')
306
-        if filetransfer: filetransfer.abort()
307
-        if protocol: 
308
-            protocol.disconnect()
309
-            protocol.shutdown()
310
-        _RollbackUpload(upload_firmware_target_name)
311
-        _ClosePort()
312
-        raise
313
-
314
-    except serial.SerialException as se:
315
-        # This exception is raised only for send_ascii data (not for binary transfer)
316
-        print(f'Serial excepion: {se}, transfer aborted')
317
-        if protocol: 
318
-            protocol.disconnect()
319
-            protocol.shutdown()
320
-        _RollbackUpload(upload_firmware_target_name)
321
-        _ClosePort()
322
-        raise Exception(se)
323
-
324
-    except MarlinBinaryProtocol.FatalError:
325
-        print('Too many retries, transfer aborted')
326
-        if protocol: 
327
-            protocol.disconnect()
328
-            protocol.shutdown()
329
-        _RollbackUpload(upload_firmware_target_name)
330
-        _ClosePort()
331
-        raise
332
-
333
-    except Exception as ex:
334
-        print(f"\nException: {ex}, transfer aborted")
335
-        if protocol: 
336
-            protocol.disconnect()
337
-            protocol.shutdown()
338
-        _RollbackUpload(upload_firmware_target_name)
339
-        _ClosePort()
340
-        print('Firmware not updated')
341
-        raise
28
+	#-------#
29
+	# Debug #
30
+	#-------#
31
+	Debug = False                # Set to True to enable script debug
32
+	def debugPrint(data):
33
+		if Debug: print(f"[Debug]: {data}")
34
+
35
+	#------------------#
36
+	# Marlin functions #
37
+	#------------------#
38
+	def _GetMarlinEnv(marlinEnv, feature):
39
+		if not marlinEnv: return None
40
+		return marlinEnv[feature] if feature in marlinEnv else None
41
+
42
+	#----------------#
43
+	# Port functions #
44
+	#----------------#
45
+	def _GetUploadPort(env):
46
+		debugPrint('Autodetecting upload port...')
47
+		env.AutodetectUploadPort(env)
48
+		portName = env.subst('$UPLOAD_PORT')
49
+		if not portName:
50
+			raise Exception('Error detecting the upload port.')
51
+		debugPrint('OK')
52
+		return portName
53
+
54
+	#-------------------------#
55
+	# Simple serial functions #
56
+	#-------------------------#
57
+	def _OpenPort():
58
+		# Open serial port
59
+		if port.is_open: return
60
+		debugPrint('Opening upload port...')
61
+		port.open()
62
+		port.reset_input_buffer()
63
+		debugPrint('OK')
64
+
65
+	def _ClosePort():
66
+		# Open serial port
67
+		if port is None: return
68
+		if not port.is_open: return
69
+		debugPrint('Closing upload port...')
70
+		port.close()
71
+		debugPrint('OK')
72
+
73
+	def _Send(data):
74
+		debugPrint(f'>> {data}')
75
+		strdata = bytearray(data, 'utf8') + b'\n'
76
+		port.write(strdata)
77
+		time.sleep(0.010)
78
+
79
+	def _Recv():
80
+		clean_responses = []
81
+		responses = port.readlines()
82
+		for Resp in responses:
83
+			# Suppress invalid chars (coming from debug info)
84
+			try:
85
+				clean_response = Resp.decode('utf8').rstrip().lstrip()
86
+				clean_responses.append(clean_response)
87
+				debugPrint(f'<< {clean_response}')
88
+			except:
89
+				pass
90
+		return clean_responses
91
+
92
+	#------------------#
93
+	# SDCard functions #
94
+	#------------------#
95
+	def _CheckSDCard():
96
+		debugPrint('Checking SD card...')
97
+		_Send('M21')
98
+		Responses = _Recv()
99
+		if len(Responses) < 1 or not any('SD card ok' in r for r in Responses):
100
+			raise Exception('Error accessing SD card')
101
+		debugPrint('SD Card OK')
102
+		return True
103
+
104
+	#----------------#
105
+	# File functions #
106
+	#----------------#
107
+	def _GetFirmwareFiles(UseLongFilenames):
108
+		debugPrint('Get firmware files...')
109
+		_Send(f"M20 F{'L' if UseLongFilenames else ''}")
110
+		Responses = _Recv()
111
+		if len(Responses) < 3 or not any('file list' in r for r in Responses):
112
+			raise Exception('Error getting firmware files')
113
+		debugPrint('OK')
114
+		return Responses
115
+
116
+	def _FilterFirmwareFiles(FirmwareList, UseLongFilenames):
117
+		Firmwares = []
118
+		for FWFile in FirmwareList:
119
+			# For long filenames take the 3rd column of the firmwares list
120
+			if UseLongFilenames:
121
+				Space = 0
122
+				Space = FWFile.find(' ')
123
+				if Space >= 0: Space = FWFile.find(' ', Space + 1)
124
+				if Space >= 0: FWFile = FWFile[Space + 1:]
125
+			if not '/' in FWFile and '.BIN' in FWFile.upper():
126
+				Firmwares.append(FWFile[:FWFile.upper().index('.BIN') + 4])
127
+		return Firmwares
128
+
129
+	def _RemoveFirmwareFile(FirmwareFile):
130
+		_Send(f'M30 /{FirmwareFile}')
131
+		Responses = _Recv()
132
+		Removed = len(Responses) >= 1 and any('File deleted' in r for r in Responses)
133
+		if not Removed:
134
+			raise Exception(f"Firmware file '{FirmwareFile}' not removed")
135
+		return Removed
136
+
137
+	def _RollbackUpload(FirmwareFile):
138
+		if not rollback: return
139
+		print(f"Rollback: trying to delete firmware '{FirmwareFile}'...")
140
+		_OpenPort()
141
+		# Wait for SD card release
142
+		time.sleep(1)
143
+		# Remount SD card
144
+		_CheckSDCard()
145
+		print(' OK' if _RemoveFirmwareFile(FirmwareFile) else ' Error!')
146
+		_ClosePort()
147
+
148
+
149
+	#---------------------#
150
+	# Callback Entrypoint #
151
+	#---------------------#
152
+	port = None
153
+	protocol = None
154
+	filetransfer = None
155
+	rollback = False
156
+
157
+	# Get Marlin evironment vars
158
+	MarlinEnv = env['MARLIN_FEATURES']
159
+	marlin_pioenv = _GetMarlinEnv(MarlinEnv, 'PIOENV')
160
+	marlin_motherboard = _GetMarlinEnv(MarlinEnv, 'MOTHERBOARD')
161
+	marlin_board_info_name = _GetMarlinEnv(MarlinEnv, 'BOARD_INFO_NAME')
162
+	marlin_board_custom_build_flags = _GetMarlinEnv(MarlinEnv, 'BOARD_CUSTOM_BUILD_FLAGS')
163
+	marlin_firmware_bin = _GetMarlinEnv(MarlinEnv, 'FIRMWARE_BIN')
164
+	marlin_long_filename_host_support = _GetMarlinEnv(MarlinEnv, 'LONG_FILENAME_HOST_SUPPORT') is not None
165
+	marlin_longname_write = _GetMarlinEnv(MarlinEnv, 'LONG_FILENAME_WRITE_SUPPORT') is not None
166
+	marlin_custom_firmware_upload = _GetMarlinEnv(MarlinEnv, 'CUSTOM_FIRMWARE_UPLOAD') is not None
167
+	marlin_short_build_version = _GetMarlinEnv(MarlinEnv, 'SHORT_BUILD_VERSION')
168
+	marlin_string_config_h_author = _GetMarlinEnv(MarlinEnv, 'STRING_CONFIG_H_AUTHOR')
169
+
170
+	# Get firmware upload params
171
+	upload_firmware_source_name = str(source[0])    # Source firmware filename
172
+	upload_speed = env['UPLOAD_SPEED'] if 'UPLOAD_SPEED' in env else 115200
173
+													# baud rate of serial connection
174
+	upload_port = _GetUploadPort(env)               # Serial port to use
175
+
176
+	# Set local upload params
177
+	upload_firmware_target_name = os.path.basename(upload_firmware_source_name)
178
+													# Target firmware filename
179
+	upload_timeout = 1000                           # Communication timout, lossy/slow connections need higher values
180
+	upload_blocksize = 512                          # Transfer block size. 512 = Autodetect
181
+	upload_compression = True                       # Enable compression
182
+	upload_error_ratio = 0                          # Simulated corruption ratio
183
+	upload_test = False                             # Benchmark the serial link without storing the file
184
+	upload_reset = True                             # Trigger a soft reset for firmware update after the upload
185
+
186
+	# Set local upload params based on board type to change script behavior
187
+	# "upload_delete_old_bins": delete all *.bin files in the root of SD Card
188
+	upload_delete_old_bins = marlin_motherboard in ['BOARD_CREALITY_V4',   'BOARD_CREALITY_V4210', 'BOARD_CREALITY_V422', 'BOARD_CREALITY_V423',
189
+													'BOARD_CREALITY_V427', 'BOARD_CREALITY_V431',  'BOARD_CREALITY_V452', 'BOARD_CREALITY_V453',
190
+													'BOARD_CREALITY_V24S1']
191
+	# "upload_random_name": generate a random 8.3 firmware filename to upload
192
+	upload_random_filename = marlin_motherboard in ['BOARD_CREALITY_V4',   'BOARD_CREALITY_V4210', 'BOARD_CREALITY_V422', 'BOARD_CREALITY_V423',
193
+													'BOARD_CREALITY_V427', 'BOARD_CREALITY_V431',  'BOARD_CREALITY_V452', 'BOARD_CREALITY_V453',
194
+													'BOARD_CREALITY_V24S1'] and not marlin_long_filename_host_support
195
+
196
+	try:
197
+
198
+		# Start upload job
199
+		print(f"Uploading firmware '{os.path.basename(upload_firmware_target_name)}' to '{marlin_motherboard}' via '{upload_port}'")
200
+
201
+		# Dump some debug info
202
+		if Debug:
203
+			print('Upload using:')
204
+			print('---- Marlin -----------------------------------')
205
+			print(f' PIOENV                      : {marlin_pioenv}')
206
+			print(f' SHORT_BUILD_VERSION         : {marlin_short_build_version}')
207
+			print(f' STRING_CONFIG_H_AUTHOR      : {marlin_string_config_h_author}')
208
+			print(f' MOTHERBOARD                 : {marlin_motherboard}')
209
+			print(f' BOARD_INFO_NAME             : {marlin_board_info_name}')
210
+			print(f' CUSTOM_BUILD_FLAGS          : {marlin_board_custom_build_flags}')
211
+			print(f' FIRMWARE_BIN                : {marlin_firmware_bin}')
212
+			print(f' LONG_FILENAME_HOST_SUPPORT  : {marlin_long_filename_host_support}')
213
+			print(f' LONG_FILENAME_WRITE_SUPPORT : {marlin_longname_write}')
214
+			print(f' CUSTOM_FIRMWARE_UPLOAD      : {marlin_custom_firmware_upload}')
215
+			print('---- Upload parameters ------------------------')
216
+			print(f' Source                      : {upload_firmware_source_name}')
217
+			print(f' Target                      : {upload_firmware_target_name}')
218
+			print(f' Port                        : {upload_port} @ {upload_speed} baudrate')
219
+			print(f' Timeout                     : {upload_timeout}')
220
+			print(f' Block size                  : {upload_blocksize}')
221
+			print(f' Compression                 : {upload_compression}')
222
+			print(f' Error ratio                 : {upload_error_ratio}')
223
+			print(f' Test                        : {upload_test}')
224
+			print(f' Reset                       : {upload_reset}')
225
+			print('-----------------------------------------------')
226
+
227
+		# Custom implementations based on board parameters
228
+		# Generate a new 8.3 random filename
229
+		if upload_random_filename:
230
+			upload_firmware_target_name = f"fw-{''.join(random.choices('ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', k=5))}.BIN"
231
+			print(f"Board {marlin_motherboard}: Overriding firmware filename to '{upload_firmware_target_name}'")
232
+
233
+		# Delete all *.bin files on the root of SD Card (if flagged)
234
+		if upload_delete_old_bins:
235
+			# CUSTOM_FIRMWARE_UPLOAD is needed for this feature
236
+			if not marlin_custom_firmware_upload:
237
+				raise Exception(f"CUSTOM_FIRMWARE_UPLOAD must be enabled in 'Configuration_adv.h' for '{marlin_motherboard}'")
238
+
239
+			# Init & Open serial port
240
+			port = serial.Serial(upload_port, baudrate = upload_speed, write_timeout = 0, timeout = 0.1)
241
+			_OpenPort()
242
+
243
+			# Check SD card status
244
+			_CheckSDCard()
245
+
246
+			# Get firmware files
247
+			FirmwareFiles = _GetFirmwareFiles(marlin_long_filename_host_support)
248
+			if Debug:
249
+				for FirmwareFile in FirmwareFiles:
250
+					print(f'Found: {FirmwareFile}')
251
+
252
+			# Get all 1st level firmware files (to remove)
253
+			OldFirmwareFiles = _FilterFirmwareFiles(FirmwareFiles[1:len(FirmwareFiles)-2], marlin_long_filename_host_support)   # Skip header and footers of list
254
+			if len(OldFirmwareFiles) == 0:
255
+				print('No old firmware files to delete')
256
+			else:
257
+				print(f"Remove {len(OldFirmwareFiles)} old firmware file{'s' if len(OldFirmwareFiles) != 1 else ''}:")
258
+				for OldFirmwareFile in OldFirmwareFiles:
259
+					print(f" -Removing- '{OldFirmwareFile}'...")
260
+					print(' OK' if _RemoveFirmwareFile(OldFirmwareFile) else ' Error!')
261
+
262
+			# Close serial
263
+			_ClosePort()
264
+
265
+			# Cleanup completed
266
+			debugPrint('Cleanup completed')
267
+
268
+		# WARNING! The serial port must be closed here because the serial transfer that follow needs it!
269
+
270
+		# Upload firmware file
271
+		debugPrint(f"Copy '{upload_firmware_source_name}' --> '{upload_firmware_target_name}'")
272
+		protocol = MarlinBinaryProtocol.Protocol(upload_port, upload_speed, upload_blocksize, float(upload_error_ratio), int(upload_timeout))
273
+		#echologger = MarlinBinaryProtocol.EchoProtocol(protocol)
274
+		protocol.connect()
275
+		# Mark the rollback (delete broken transfer) from this point on
276
+		rollback = True
277
+		filetransfer = MarlinBinaryProtocol.FileTransferProtocol(protocol)
278
+		transferOK = filetransfer.copy(upload_firmware_source_name, upload_firmware_target_name, upload_compression, upload_test)
279
+		protocol.disconnect()
280
+
281
+		# Notify upload completed
282
+		protocol.send_ascii('M117 Firmware uploaded' if transferOK else 'M117 Firmware upload failed')
283
+
284
+		# Remount SD card
285
+		print('Wait for SD card release...')
286
+		time.sleep(1)
287
+		print('Remount SD card')
288
+		protocol.send_ascii('M21')
289
+
290
+		# Transfer failed?
291
+		if not transferOK:
292
+			protocol.shutdown()
293
+			_RollbackUpload(upload_firmware_target_name)
294
+		else:
295
+			# Trigger firmware update
296
+			if upload_reset:
297
+				print('Trigger firmware update...')
298
+				protocol.send_ascii('M997', True)
299
+			protocol.shutdown()
300
+
301
+		print('Firmware update completed' if transferOK else 'Firmware update failed')
302
+		return 0 if transferOK else -1
303
+
304
+	except KeyboardInterrupt:
305
+		print('Aborted by user')
306
+		if filetransfer: filetransfer.abort()
307
+		if protocol:
308
+			protocol.disconnect()
309
+			protocol.shutdown()
310
+		_RollbackUpload(upload_firmware_target_name)
311
+		_ClosePort()
312
+		raise
313
+
314
+	except serial.SerialException as se:
315
+		# This exception is raised only for send_ascii data (not for binary transfer)
316
+		print(f'Serial excepion: {se}, transfer aborted')
317
+		if protocol:
318
+			protocol.disconnect()
319
+			protocol.shutdown()
320
+		_RollbackUpload(upload_firmware_target_name)
321
+		_ClosePort()
322
+		raise Exception(se)
323
+
324
+	except MarlinBinaryProtocol.FatalError:
325
+		print('Too many retries, transfer aborted')
326
+		if protocol:
327
+			protocol.disconnect()
328
+			protocol.shutdown()
329
+		_RollbackUpload(upload_firmware_target_name)
330
+		_ClosePort()
331
+		raise
332
+
333
+	except Exception as ex:
334
+		print(f"\nException: {ex}, transfer aborted")
335
+		if protocol:
336
+			protocol.disconnect()
337
+			protocol.shutdown()
338
+		_RollbackUpload(upload_firmware_target_name)
339
+		_ClosePort()
340
+		print('Firmware not updated')
341
+		raise
342 342
 
343 343
 # Attach custom upload callback
344 344
 env.Replace(UPLOADCMD=Upload)

+ 2
- 2
get_test_targets.py Parādīt failu

@@ -6,7 +6,7 @@ import yaml
6 6
 
7 7
 
8 8
 with open('.github/workflows/test-builds.yml') as f:
9
-	github_configuration = yaml.safe_load(f)
9
+    github_configuration = yaml.safe_load(f)
10 10
 test_platforms = github_configuration\
11
-	['jobs']['test_builds']['strategy']['matrix']['test-platform']
11
+    ['jobs']['test_builds']['strategy']['matrix']['test-platform']
12 12
 print(' '.join(test_platforms))

Notiek ielāde…
Atcelt
Saglabāt