Squashed commit of the following:
commit 9a5e1af9b969e3cbacdab1ece7ef25190194b3d5 Author: Joppe Blondel <joppe@blondel.nl> Date: Sun Sep 4 19:32:35 2022 +0200 Cleaned up tree Signed-off-by: Joppe Blondel <joppe@blondel.nl> commit 5f5556409a71afc904bb9df0915cd236d87fccb1 Author: Joppe Blondel <joppe@blondel.nl> Date: Sun Sep 4 19:31:20 2022 +0200 Split up different scripts Signed-off-by: Joppe Blondel <joppe@blondel.nl> commit 6855e9a1e808a99c4a326be7ef49b9b545eaf4bd Author: Jojojoppe <joppe@blondel.nl> Date: Sun Sep 4 14:21:35 2022 +0200 Client server structure done Signed-off-by: Jojojoppe <joppe@blondel.nl> commit 44923b8b3407adb1f8f1c0d24c016613da68a726 Author: Jojojoppe <joppe@blondel.nl> Date: Sat Sep 3 22:35:00 2022 +0200 Moved basic stuff to exec_class Signed-off-by: Jojojoppe <joppe@blondel.nl> Signed-off-by: Joppe Blondel <joppe@blondel.nl>
This commit is contained in:
2
.gitignore
vendored
2
.gitignore
vendored
@ -1,3 +1 @@
|
|||||||
OUT
|
|
||||||
BUILD
|
|
||||||
*__pycache__
|
*__pycache__
|
@ -1,23 +0,0 @@
|
|||||||
library IEEE;
|
|
||||||
use IEEE.STD_LOGIC_1164.all;
|
|
||||||
use IEEE.NUMERIC_STD.all;
|
|
||||||
entity toplevel is
|
|
||||||
port (
|
|
||||||
ACLK : in std_logic;
|
|
||||||
LED : out std_logic_vector(7 downto 0);
|
|
||||||
SW : in std_logic_vector(3 downto 0)
|
|
||||||
);
|
|
||||||
end toplevel;
|
|
||||||
architecture structural of toplevel is
|
|
||||||
signal ARESETN : std_logic;
|
|
||||||
begin
|
|
||||||
ARESETN <= SW(3);
|
|
||||||
process(ACLK, ARESETN)
|
|
||||||
begin
|
|
||||||
if ARESETN='0' then
|
|
||||||
LED <= "11111111";
|
|
||||||
elsif rising_edge(ACLK) then
|
|
||||||
LED <= SW & SW;
|
|
||||||
end if;
|
|
||||||
end process;
|
|
||||||
end architecture;
|
|
19
project.cfg
19
project.cfg
@ -1,19 +0,0 @@
|
|||||||
[project]
|
|
||||||
name = testproject
|
|
||||||
version = 0.1
|
|
||||||
out_dir = OUT
|
|
||||||
build_dir = BUILD
|
|
||||||
|
|
||||||
[target:default]
|
|
||||||
family = spartan6
|
|
||||||
device = xc6slx9
|
|
||||||
package = tqg144
|
|
||||||
speedgrade = -2
|
|
||||||
toolchain = ISE
|
|
||||||
|
|
||||||
[sources:default]
|
|
||||||
target = default
|
|
||||||
toplevel = toplevel
|
|
||||||
src_vhdl = RTL/toplevel.vhd
|
|
||||||
src_verilog =
|
|
||||||
src_sysverilog =
|
|
42
rbuild.py
42
rbuild.py
@ -1,42 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import remotesyn
|
|
||||||
import remotesyn.ISE as ISE
|
|
||||||
|
|
||||||
import configparser
|
|
||||||
import signal
|
|
||||||
import time
|
|
||||||
|
|
||||||
threads = []
|
|
||||||
|
|
||||||
def sighandler(sig, frame):
|
|
||||||
print("\nCRTL-C: stopping threads")
|
|
||||||
|
|
||||||
for t in threads:
|
|
||||||
t.stop();
|
|
||||||
|
|
||||||
exit(0);
|
|
||||||
|
|
||||||
if __name__=="__main__":
|
|
||||||
signal.signal(signal.SIGINT, sighandler)
|
|
||||||
|
|
||||||
config = configparser.ConfigParser()
|
|
||||||
config.read("project.cfg")
|
|
||||||
|
|
||||||
# Test local build
|
|
||||||
copy = remotesyn.copy_local(config)
|
|
||||||
synth = ISE.synth(config, copy, 'default')
|
|
||||||
threads.append(synth)
|
|
||||||
|
|
||||||
needed_files = synth.needed_files()
|
|
||||||
print(needed_files)
|
|
||||||
|
|
||||||
synth.start()
|
|
||||||
|
|
||||||
looping = True
|
|
||||||
while looping:
|
|
||||||
time.sleep(1)
|
|
||||||
looping = False
|
|
||||||
for t in threads:
|
|
||||||
if t.running:
|
|
||||||
looping = True
|
|
13
remotesyn.egg-info/PKG-INFO
Normal file
13
remotesyn.egg-info/PKG-INFO
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: remotesyn
|
||||||
|
Version: 0.2
|
||||||
|
Summary: Remote FPGA synthesis abstraction tool
|
||||||
|
Home-page: https://git.joppeb.nl/joppe/remotesyn
|
||||||
|
Download-URL:
|
||||||
|
Author: Joppe Blondel
|
||||||
|
Author-email: joppe@blondel.nl
|
||||||
|
License: BSD Licence
|
||||||
|
Keywords: FPGA,Synthesis,Xilinx,ISE,Vivado
|
||||||
|
Classifier: Development Status :: 3 - Alpha
|
||||||
|
Classifier: License :: OSI Approved :: BSD License
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
19
remotesyn.egg-info/SOURCES.txt
Normal file
19
remotesyn.egg-info/SOURCES.txt
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
.gitignore
|
||||||
|
README.md
|
||||||
|
setup.py
|
||||||
|
remotesyn/__init__.py
|
||||||
|
remotesyn.egg-info/PKG-INFO
|
||||||
|
remotesyn.egg-info/SOURCES.txt
|
||||||
|
remotesyn.egg-info/dependency_links.txt
|
||||||
|
remotesyn.egg-info/requires.txt
|
||||||
|
remotesyn.egg-info/top_level.txt
|
||||||
|
remotesyn/ISE/__init__.py
|
||||||
|
scripts/rbuild
|
||||||
|
scripts/rmbuild
|
||||||
|
scripts/rmserver
|
||||||
|
server/authorized_hosts
|
||||||
|
server/id_rsa
|
||||||
|
server/id_rsa.pub
|
||||||
|
test/.gitignore
|
||||||
|
test/project.cfg
|
||||||
|
test/RTL/toplevel.vhd
|
1
remotesyn.egg-info/dependency_links.txt
Normal file
1
remotesyn.egg-info/dependency_links.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
|
1
remotesyn.egg-info/requires.txt
Normal file
1
remotesyn.egg-info/requires.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
paramiko
|
1
remotesyn.egg-info/top_level.txt
Normal file
1
remotesyn.egg-info/top_level.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
remotesyn
|
@ -1 +0,0 @@
|
|||||||
from .synth import synth
|
|
@ -1,14 +0,0 @@
|
|||||||
import threading
|
|
||||||
import shutil
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
def runner(threads, process, name):
|
|
||||||
print(f" - executing {name}: ", end='', flush=True)
|
|
||||||
p = subprocess.Popen(process, shell=True, stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
||||||
threads.append(p)
|
|
||||||
while p.poll() is None:
|
|
||||||
print('.', end='', flush=True)
|
|
||||||
time.sleep(2)
|
|
||||||
res = p.returncode
|
|
79
remotesyn/ISE/syn.py
Normal file
79
remotesyn/ISE/syn.py
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
import threading
|
||||||
|
import shutil
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import subprocess
|
||||||
|
import signal
|
||||||
|
import random
|
||||||
|
|
||||||
|
def needed_files(config, target) -> list:
|
||||||
|
if not config.has_section(f'build:{target}'):
|
||||||
|
print("ERROR: config file has no build section for target")
|
||||||
|
return None
|
||||||
|
needed_files = []
|
||||||
|
for s in config.get(f'build:{target}', 'src_vhdl', fallback="").split():
|
||||||
|
needed_files.append(s)
|
||||||
|
for s in config.get(f'build:{target}', 'src_verilog', fallback="").split():
|
||||||
|
needed_files.append(s)
|
||||||
|
for s in config.get(f'build:{target}', 'src_sysverilog', fallback="").split():
|
||||||
|
needed_files.append(s)
|
||||||
|
return needed_files
|
||||||
|
|
||||||
|
def generated_files(config, target) -> list:
|
||||||
|
outdir = f"{config.get('project', 'out_dir', fallback='out')}"
|
||||||
|
return [
|
||||||
|
f'{outdir}/{target}/synth.log',
|
||||||
|
f'{outdir}/{target}/synth.ngc',
|
||||||
|
]
|
||||||
|
|
||||||
|
def do(config, target, log, subprocesses, prefix='.') -> int:
|
||||||
|
log("Synthesize:")
|
||||||
|
|
||||||
|
if not config.has_section(f'build:{target}'):
|
||||||
|
log("ERROR: config file has no build section for target")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
devtarget = f'target:{config.get(f"build:{target}", "target", fallback="")}'
|
||||||
|
if not config.has_section(devtarget):
|
||||||
|
log("ERROR: config file has no section for device target")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
device = f"{config.get(devtarget, 'device', fallback='')}{config.get(devtarget, 'speedgrade', fallback='')}-{config.get(devtarget, 'package', fallback='')}"
|
||||||
|
builddir = f"{prefix}/{config.get('project', 'build_dir', fallback='.build')}"
|
||||||
|
outdir = f"{prefix}/{config.get('project', 'out_dir', fallback='out')}"
|
||||||
|
|
||||||
|
os.makedirs(builddir, exist_ok=True)
|
||||||
|
curdir = f"{os.getcwd()}/{prefix}"
|
||||||
|
|
||||||
|
log(" - writing project file")
|
||||||
|
with open(f'{builddir}/syn.prj', 'w') as f:
|
||||||
|
for s in config.get(f'build:{target}', 'src_vhdl', fallback="").split():
|
||||||
|
f.write(f"vhdl work {curdir}/{s}\n")
|
||||||
|
for s in config.get(f'build:{target}', 'src_verilog', fallback="").split():
|
||||||
|
f.write(f"verilog work {curdir}/{s}\n")
|
||||||
|
for s in config.get(f'build:{target}', 'src_sysverilog', fallback="").split():
|
||||||
|
f.write(f"verilog work {curdir}/{s}\n")
|
||||||
|
|
||||||
|
log(" - writing project generation file")
|
||||||
|
with open(f'{builddir}/prj.scr', 'w') as f:
|
||||||
|
f.write(f'run\n-ifn syn.prj\n-ofn syn.ngc\n-ifmt mixed\n')
|
||||||
|
f.write(f'-top {config.get(f"sources:{target}", "toplevel", fallback="toplevel")}\n')
|
||||||
|
f.write(f'-p {device}\n-glob_opt max_delay -opt_mode speed')
|
||||||
|
|
||||||
|
p = subprocess.Popen("xst -intstyle xflow -ifn prj.scr", shell=True, cwd=builddir, stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
|
subprocesses.append(p)
|
||||||
|
while p.poll() is None:
|
||||||
|
time.sleep(1)
|
||||||
|
res = p.returncode
|
||||||
|
if res:
|
||||||
|
log(" - ERROR: return code is", res)
|
||||||
|
log(" - copy log")
|
||||||
|
os.makedirs(f'{outdir}/{target}', exist_ok=True)
|
||||||
|
shutil.copy(f'{builddir}/prj.srp', f'{outdir}/{target}/synth.log')
|
||||||
|
return res
|
||||||
|
|
||||||
|
log(" - copy output files")
|
||||||
|
os.makedirs(f'{outdir}/{target}', exist_ok=True)
|
||||||
|
shutil.copy(f'{builddir}/syn.ngc', f'{outdir}/{target}/synth.ngc')
|
||||||
|
shutil.copy(f'{builddir}/prj.srp', f'{outdir}/{target}/synth.log')
|
||||||
|
return 0
|
@ -1,107 +0,0 @@
|
|||||||
import threading
|
|
||||||
import shutil
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
import subprocess
|
|
||||||
import signal
|
|
||||||
import random
|
|
||||||
|
|
||||||
from .runner import runner
|
|
||||||
|
|
||||||
class synth(threading.Thread):
|
|
||||||
def __init__(self, config, copy, target):
|
|
||||||
threading.Thread.__init__(self)
|
|
||||||
self.config = config
|
|
||||||
self.copy = copy
|
|
||||||
self.target = target
|
|
||||||
|
|
||||||
self.threads = []
|
|
||||||
self.running = True
|
|
||||||
self.name = config.get('project', 'name', fallback=f'{random.random():.4f}')
|
|
||||||
self.version = config.get('project', 'version', fallback=f'{random.random():.4f}')
|
|
||||||
self.builddir = f"{config.get('project', 'build_dir', fallback='.build')}/synth_{target}_{self.name}_{self.version}"
|
|
||||||
self.outdir = config.get('project', 'out_dir', fallback='out')
|
|
||||||
|
|
||||||
# Returns the list of needed files to execute operation. Caller need
|
|
||||||
# to check for existance of the files and if in remote execution these
|
|
||||||
# files must be synched
|
|
||||||
def needed_files(self):
|
|
||||||
if not self.config.has_section(f'sources:{self.target}'):
|
|
||||||
print("ERROR: config file has no sources section for target")
|
|
||||||
return None
|
|
||||||
needed_files = []
|
|
||||||
for s in self.config.get(f'sources:{self.target}', 'src_vhdl', fallback="").split():
|
|
||||||
needed_files.append(s)
|
|
||||||
for s in self.config.get(f'sources:{self.target}', 'src_verilog', fallback="").split():
|
|
||||||
needed_files.append(s)
|
|
||||||
for s in self.config.get(f'sources:{self.target}', 'src_sysverilog', fallback="").split():
|
|
||||||
needed_files.append(s)
|
|
||||||
return needed_files
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
print("Stopping synth...")
|
|
||||||
for t in self.threads:
|
|
||||||
print(" <> kill", t)
|
|
||||||
t.send_signal(signal.SIGINT)
|
|
||||||
ti = 0
|
|
||||||
while t.poll() is None:
|
|
||||||
time.sleep(1)
|
|
||||||
ti += 1
|
|
||||||
if ti>2:
|
|
||||||
print(" <> force kill", t)
|
|
||||||
t.send_signal(signal.SIGKILL)
|
|
||||||
self.running = False
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
print("Synthesize:")
|
|
||||||
if not self.config.has_section(f'sources:{self.target}'):
|
|
||||||
print("ERROR: config file has no sources section for target")
|
|
||||||
self.running = False
|
|
||||||
return None
|
|
||||||
|
|
||||||
devtarget = f'target:{self.config.get(f"sources:{self.target}", "target", fallback="")}'
|
|
||||||
if not self.config.has_section(devtarget):
|
|
||||||
print("ERROR: config file has no section for device target")
|
|
||||||
self.running = False
|
|
||||||
return None
|
|
||||||
|
|
||||||
device = f"{self.config.get(devtarget, 'device', fallback='')}{self.config.get(devtarget, 'speedgrade', fallback='')}-{self.config.get(devtarget, 'package', fallback='')}"
|
|
||||||
|
|
||||||
os.makedirs(self.builddir, exist_ok=True)
|
|
||||||
curdir = os.getcwd()
|
|
||||||
os.chdir(self.builddir)
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
print(" - writing project file")
|
|
||||||
with open('syn.prj', 'w') as f:
|
|
||||||
for s in self.config.get(f'sources:{self.target}', 'src_vhdl', fallback="").split():
|
|
||||||
f.write(f"vhdl work {curdir}/{s}\n")
|
|
||||||
for s in self.config.get(f'sources:{self.target}', 'src_verilog', fallback="").split():
|
|
||||||
f.write(f"verilog work {curdir}/{s}\n")
|
|
||||||
for s in self.config.get(f'sources:{self.target}', 'src_sysverilog', fallback="").split():
|
|
||||||
f.write(f"verilog work {curdir}/{s}\n")
|
|
||||||
|
|
||||||
print(" - writing project generation file")
|
|
||||||
with open('prj.scr', 'w') as f:
|
|
||||||
f.write(f'run\n-ifn syn.prj\n-ofn syn.ngc\n-ifmt mixed\n')
|
|
||||||
f.write(f'-top {self.config.get(f"sources:{self.target}", "toplevel", fallback="toplevel")}\n')
|
|
||||||
f.write(f'-p {device}\n-glob_opt max_delay -opt_mode speed')
|
|
||||||
|
|
||||||
runner(self.threads, "xst -intstyle xflow -ifn prj.scr", "xst")
|
|
||||||
if not self.running:
|
|
||||||
os.chdir(curdir)
|
|
||||||
return
|
|
||||||
print('DONE')
|
|
||||||
|
|
||||||
print(" - copy output files")
|
|
||||||
os.makedirs(f'{curdir}/{self.outdir}/{self.target}', exist_ok=True)
|
|
||||||
self.copy.copy_to_dir('syn.ngc', f'{curdir}/{self.outdir}/{self.target}/synth.ngc')
|
|
||||||
self.copy.copy_to_dir('prj.srp', f'{curdir}/{self.outdir}/{self.target}/synth.log')
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
finally:
|
|
||||||
os.chdir(curdir)
|
|
||||||
self.running = False
|
|
@ -1 +0,0 @@
|
|||||||
from .copy import copy_local, copy_remote
|
|
@ -1,22 +0,0 @@
|
|||||||
import shutil
|
|
||||||
|
|
||||||
class copy_local:
|
|
||||||
def __init__(self, config):
|
|
||||||
self.config = config
|
|
||||||
|
|
||||||
def copy_from_dir(self, src, dst):
|
|
||||||
pass
|
|
||||||
# Nothing to do here since we are working in local build
|
|
||||||
|
|
||||||
def copy_to_dir(self, src, dst):
|
|
||||||
shutil.copy(src, dst)
|
|
||||||
|
|
||||||
class copy_remote:
|
|
||||||
def __init__(self, config):
|
|
||||||
self.config = config
|
|
||||||
|
|
||||||
def copy_from_dir(self, src, dst):
|
|
||||||
print("ERROR: Not yet implemented")
|
|
||||||
|
|
||||||
def copy_to_dir(self, src, dst):
|
|
||||||
print("ERROR: Not yet implemented")
|
|
86
scripts/rbuild
Executable file
86
scripts/rbuild
Executable file
@ -0,0 +1,86 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import configparser
|
||||||
|
import sys
|
||||||
|
|
||||||
|
def print_help():
|
||||||
|
print("Unified FPGA synthesizer frontend\r\n(c) Joppe Blondel - 2022\r\n")
|
||||||
|
print(f"Usage: {sys.argv[0]} [ OPTIONS ] action [ target ] ...")
|
||||||
|
print("")
|
||||||
|
print("Options:")
|
||||||
|
print(" -h Show this help message")
|
||||||
|
print(" -c <file> Configuration file, defaults to project.cfg")
|
||||||
|
print("")
|
||||||
|
print("Actions:")
|
||||||
|
print("ip <target> Generate IP files from vendor provided libraries")
|
||||||
|
print("syn <target> Synthesize design for target")
|
||||||
|
print("impl <target> Route and place design for target")
|
||||||
|
print("bit <target> Generate output files and run analysis for target")
|
||||||
|
print("all <target> Generate IP, synthesize, route and place design for target")
|
||||||
|
print("floorplan <target> Run floorplan editor, currently only for local execution")
|
||||||
|
print("sim <simtarget> Run simulation target")
|
||||||
|
|
||||||
|
if __name__=="__main__":
|
||||||
|
# Parse arguments
|
||||||
|
i = 1
|
||||||
|
nextarg = None
|
||||||
|
configpath = 'project.cfg'
|
||||||
|
actions = []
|
||||||
|
while i<len(sys.argv):
|
||||||
|
if nextarg is not None:
|
||||||
|
if nextarg=='config':
|
||||||
|
configpath = sys.argv[i]
|
||||||
|
nextarg = None
|
||||||
|
else:
|
||||||
|
actions.append((nextarg, sys.argv[i]))
|
||||||
|
nextarg = None
|
||||||
|
elif sys.argv[i]=='-h':
|
||||||
|
print_help()
|
||||||
|
exit(0)
|
||||||
|
elif sys.argv[i]=='-c':
|
||||||
|
nextarg = 'config'
|
||||||
|
else:
|
||||||
|
nextarg = sys.argv[i]
|
||||||
|
i += 1
|
||||||
|
if nextarg is not None:
|
||||||
|
print("ERROR: expected more arguments")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
config = configparser.ConfigParser()
|
||||||
|
config.read(configpath)
|
||||||
|
|
||||||
|
subprocesses = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
for action in actions:
|
||||||
|
target = action[1]
|
||||||
|
action = action[0]
|
||||||
|
|
||||||
|
if not config.has_section(f'build:{target}'):
|
||||||
|
print("ERROR: config file has no build section for target")
|
||||||
|
exit(1)
|
||||||
|
devtarget = f'target:{config.get(f"build:{target}", "target", fallback="")}'
|
||||||
|
if not config.has_section(devtarget):
|
||||||
|
print("ERROR: config file has no section for device target")
|
||||||
|
exit(1)
|
||||||
|
toolchain = config.get(devtarget, 'toolchain', fallback="NONE")
|
||||||
|
if toolchain=="NONE":
|
||||||
|
print("ERROR: no toolchain specified for device target")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
try:
|
||||||
|
exec(f"from remotesyn.{toolchain}.{action} import do")
|
||||||
|
except ImportError:
|
||||||
|
print(f"ERROR: Unknown action '{action}' for toolchain '{toolchain}'")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
ret = do(config, target, print, subprocesses)
|
||||||
|
|
||||||
|
if ret!=0:
|
||||||
|
exit(ret)
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print("\rStopping rbuild")
|
||||||
|
for p in subprocesses:
|
||||||
|
p.kill()
|
||||||
|
exit(0)
|
186
scripts/rmbuild
Normal file
186
scripts/rmbuild
Normal file
@ -0,0 +1,186 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import configparser
|
||||||
|
import sys
|
||||||
|
import paramiko
|
||||||
|
import base64
|
||||||
|
import struct
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
|
||||||
|
def cmd(cmd, channel):
|
||||||
|
channel.exec_command(base64.encodebytes(cmd))
|
||||||
|
|
||||||
|
def sstr(s):
|
||||||
|
return struct.pack('>I', len(s)) + s.encode('utf-8')
|
||||||
|
|
||||||
|
def rstr(channel):
|
||||||
|
l = struct.unpack('>I', channel.recv(4))[0]
|
||||||
|
return bytes.decode(channel.recv(l), 'utf-8')
|
||||||
|
|
||||||
|
def send_file(channel, file, othername=None):
|
||||||
|
print(f"> {file}")
|
||||||
|
if not os.path.exists(file):
|
||||||
|
print(f"Error: {file} does not exists")
|
||||||
|
with open(file, 'rb') as f:
|
||||||
|
stat = os.fstat(f.fileno())
|
||||||
|
print(' -> fsize', stat.st_size)
|
||||||
|
if othername is None:
|
||||||
|
othername = file
|
||||||
|
fsize = struct.pack('>q', stat.st_size)
|
||||||
|
cmd(b'sf'+sstr(othername)+fsize, channel)
|
||||||
|
status = channel.recv(3)
|
||||||
|
if status!=b'OK\n':
|
||||||
|
print('Something went wrong...')
|
||||||
|
exit(1)
|
||||||
|
i = stat.st_size
|
||||||
|
while i>0:
|
||||||
|
fdata = f.read(1024)
|
||||||
|
i -= 1024
|
||||||
|
channel.sendall(fdata)
|
||||||
|
|
||||||
|
def recv_file(channel, file):
|
||||||
|
print(f"< {file}")
|
||||||
|
if os.path.dirname(file) != '':
|
||||||
|
os.makedirs(os.path.dirname(file), exist_ok=True)
|
||||||
|
with open(file, 'wb') as f:
|
||||||
|
cmd(b'rf'+sstr(file), channel)
|
||||||
|
while True:
|
||||||
|
status = channel.recv(2)
|
||||||
|
if status != b'\x00\x00':
|
||||||
|
break
|
||||||
|
if status!=b'OK':
|
||||||
|
msg = channel.recv(1024)
|
||||||
|
print("Error:", bytes.decode(msg, 'ascii'))
|
||||||
|
exit(1)
|
||||||
|
fsize = channel.recv(8)
|
||||||
|
fsize = struct.unpack('>q', fsize)[0]
|
||||||
|
print(' -> fsize', fsize)
|
||||||
|
while fsize>0:
|
||||||
|
f.write(channel.recv(1024))
|
||||||
|
fsize -= 1024
|
||||||
|
|
||||||
|
def print_help():
|
||||||
|
print("Unified FPGA synthesizer frontend - remote execution\r\n(c) Joppe Blondel - 2022\r\n")
|
||||||
|
print(f"Usage: {sys.argv[0]} [ OPTIONS ] action [ target ] ...")
|
||||||
|
print("")
|
||||||
|
print("Options:")
|
||||||
|
print(" -h Show this help message")
|
||||||
|
print(" -c <file> Configuration file, defaults to project.cfg")
|
||||||
|
print("")
|
||||||
|
print("Actions:")
|
||||||
|
print("ip <target> Generate IP files from vendor provided libraries")
|
||||||
|
print("syn <target> Synthesize design for target")
|
||||||
|
print("impl <target> Route and place design for target")
|
||||||
|
print("bit <target> Generate output files and run analysis for target")
|
||||||
|
print("all <target> Generate IP, synthesize, route and place design for target")
|
||||||
|
print("floorplan <target> Run floorplan editor, currently only for local execution")
|
||||||
|
print("sim <simtarget> Run simulation target")
|
||||||
|
|
||||||
|
if __name__=="__main__":
|
||||||
|
# Parse arguments
|
||||||
|
i = 1
|
||||||
|
nextarg = None
|
||||||
|
configpath = 'project.cfg'
|
||||||
|
actions = []
|
||||||
|
while i<len(sys.argv):
|
||||||
|
if nextarg is not None:
|
||||||
|
if nextarg=='config':
|
||||||
|
configpath = sys.argv[i]
|
||||||
|
nextarg = None
|
||||||
|
else:
|
||||||
|
actions.append((nextarg, sys.argv[i]))
|
||||||
|
nextarg = None
|
||||||
|
elif sys.argv[i]=='-h':
|
||||||
|
print_help()
|
||||||
|
exit(0)
|
||||||
|
elif sys.argv[i]=='-c':
|
||||||
|
nextarg = 'config'
|
||||||
|
else:
|
||||||
|
nextarg = sys.argv[i]
|
||||||
|
i += 1
|
||||||
|
if nextarg is not None:
|
||||||
|
print("ERROR: expected more arguments")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
config = configparser.ConfigParser()
|
||||||
|
config.read(configpath)
|
||||||
|
|
||||||
|
# Get SSH configuration
|
||||||
|
privkey = config.get('server', 'privkey', fallback='__privkey__')
|
||||||
|
pubkey = config.get('server', 'pubkey', fallback='__pubkey__')
|
||||||
|
hostname = config.get('server', 'hostname', fallback='__hostname__')
|
||||||
|
port = config.get('server', 'port', fallback='__port__')
|
||||||
|
if privkey=='__privkey__' or pubkey=='__pubkey__' or hostname=='__hostname__' or port=='__port__':
|
||||||
|
print("ERROR: Not enough server information in the config file")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
# Connect to SSH and create channel
|
||||||
|
try:
|
||||||
|
host_key = paramiko.RSAKey(filename=privkey)
|
||||||
|
client = paramiko.SSHClient()
|
||||||
|
client.load_system_host_keys()
|
||||||
|
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
|
trans = paramiko.Transport((hostname, int(port)))
|
||||||
|
trans.connect(None, pkey=host_key)
|
||||||
|
channel = trans.open_channel('session')
|
||||||
|
except paramiko.ssh_exception.SSHException as e:
|
||||||
|
print("ERROR: Could not connect to server")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
# Send project identification
|
||||||
|
cmd(b'id' + struct.pack('>q', hash(host_key.get_base64())), channel)
|
||||||
|
# Send config
|
||||||
|
cmd(b'cf' + sstr(json.dumps({s:dict(config.items(s)) for s in config.sections()})), channel)
|
||||||
|
|
||||||
|
subprocesses = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
for action in actions:
|
||||||
|
target = action[1]
|
||||||
|
action = action[0]
|
||||||
|
|
||||||
|
if not config.has_section(f'build:{target}'):
|
||||||
|
print("ERROR: config file has no build section for target")
|
||||||
|
exit(1)
|
||||||
|
devtarget = f'target:{config.get(f"build:{target}", "target", fallback="")}'
|
||||||
|
if not config.has_section(devtarget):
|
||||||
|
print("ERROR: config file has no section for device target")
|
||||||
|
exit(1)
|
||||||
|
toolchain = config.get(devtarget, 'toolchain', fallback="NONE")
|
||||||
|
if toolchain=="NONE":
|
||||||
|
print("ERROR: no toolchain specified for device target")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
try:
|
||||||
|
exec(f"from remotesyn.{toolchain}.{action} import do, needed_files, generated_files")
|
||||||
|
except ImportError:
|
||||||
|
print(f"ERROR: Unknown action '{action}' for toolchain '{toolchain}'")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
# Send needed files
|
||||||
|
for f in needed_files(config, target):
|
||||||
|
send_file(channel, f)
|
||||||
|
|
||||||
|
# ret = do(config, target, print, subprocesses)
|
||||||
|
cmd(b'do'+sstr(f"{action} {target}"), channel)
|
||||||
|
ret = 0
|
||||||
|
|
||||||
|
# Get generated files
|
||||||
|
for f in generated_files(config, target):
|
||||||
|
recv_file(channel, f)
|
||||||
|
|
||||||
|
if ret!=0:
|
||||||
|
exit(ret)
|
||||||
|
|
||||||
|
except paramiko.ssh_exception.SSHException as e:
|
||||||
|
print("ERROR: Connection error...")
|
||||||
|
for p in subprocesses:
|
||||||
|
p.kill()
|
||||||
|
exit(0)
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print("\rStopping rmbuild")
|
||||||
|
for p in subprocesses:
|
||||||
|
p.kill()
|
||||||
|
exit(0)
|
287
scripts/rmserver
Normal file
287
scripts/rmserver
Normal file
@ -0,0 +1,287 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import configparser
|
||||||
|
import signal
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import subprocess
|
||||||
|
from types import NoneType
|
||||||
|
import paramiko
|
||||||
|
import base64
|
||||||
|
import struct
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import threading
|
||||||
|
import socket
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
# List of running threads
|
||||||
|
threads = []
|
||||||
|
running = False
|
||||||
|
|
||||||
|
def sighandler(sig, frame):
|
||||||
|
global threads
|
||||||
|
global running
|
||||||
|
|
||||||
|
print("\rStopping server")
|
||||||
|
running = False
|
||||||
|
for t in threads:
|
||||||
|
t.stop();
|
||||||
|
|
||||||
|
class FileTransferSF(threading.Thread):
|
||||||
|
def __init__(self, channel, fname, identifier, fsize):
|
||||||
|
threading.Thread.__init__(self)
|
||||||
|
self.channel = channel
|
||||||
|
self.fname = fname
|
||||||
|
self.identifier = identifier
|
||||||
|
self.fsize = fsize
|
||||||
|
self.running = True
|
||||||
|
def stop(self):
|
||||||
|
self.running = False
|
||||||
|
def run(self):
|
||||||
|
with open(f"{self.identifier}/{self.fname}", 'wb') as f:
|
||||||
|
fsize = self.fsize
|
||||||
|
while fsize>0 and self.running:
|
||||||
|
fdata = self.channel.recv(1024)
|
||||||
|
f.write(fdata)
|
||||||
|
fsize -= 1024
|
||||||
|
|
||||||
|
class FileTransferRF(threading.Thread):
|
||||||
|
def __init__(self, channel, fname, identifier):
|
||||||
|
threading.Thread.__init__(self)
|
||||||
|
self.channel = channel
|
||||||
|
self.fname = fname
|
||||||
|
self.identifier = identifier
|
||||||
|
self.running = True
|
||||||
|
def stop(self):
|
||||||
|
self.running = False
|
||||||
|
def run(self):
|
||||||
|
with open(f"{self.identifier}/{self.fname}", 'rb') as f:
|
||||||
|
stat = os.fstat(f.fileno())
|
||||||
|
print(' -> fsize', stat.st_size)
|
||||||
|
fsize = struct.pack('>q', stat.st_size)
|
||||||
|
i = stat.st_size
|
||||||
|
self.channel.sendall(b'OK'+fsize)
|
||||||
|
while i>0 and self.running:
|
||||||
|
fdata = f.read(1024)
|
||||||
|
self.channel.sendall(fdata)
|
||||||
|
i -= 1024
|
||||||
|
|
||||||
|
class SSHServer(paramiko.ServerInterface):
|
||||||
|
def __init__(self, authorized):
|
||||||
|
self.event = threading.Event()
|
||||||
|
self.authorized = authorized
|
||||||
|
self.identifier = ''
|
||||||
|
self.subprocesses = []
|
||||||
|
self.threads = []
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.event.set()
|
||||||
|
for s in self.subprocesses:
|
||||||
|
s.kill()
|
||||||
|
for t in self.threads:
|
||||||
|
if type(t) is not NoneType:
|
||||||
|
t.stop()
|
||||||
|
t.join()
|
||||||
|
|
||||||
|
def check_channel_request(self, kind, chanid):
|
||||||
|
if kind == 'session':
|
||||||
|
return paramiko.OPEN_SUCCEEDED
|
||||||
|
|
||||||
|
def check_auth_publickey(self, username, key):
|
||||||
|
keyascii = key.get_base64()
|
||||||
|
for auth in self.authorized:
|
||||||
|
authascii = auth.split(' ')[1]
|
||||||
|
if authascii==keyascii:
|
||||||
|
return paramiko.AUTH_SUCCESSFUL
|
||||||
|
return paramiko.AUTH_FAILED
|
||||||
|
|
||||||
|
def get_allowed_auths(self, username):
|
||||||
|
return 'publickey'
|
||||||
|
|
||||||
|
def rstr(self, b):
|
||||||
|
l = struct.unpack('>I', b[:4])[0]
|
||||||
|
return (bytes.decode(b[4:4+l], 'utf-8'), b[4+l:])
|
||||||
|
|
||||||
|
def sstr(self, s):
|
||||||
|
return struct.pack('>I', len(s)) + s.encode('utf-8')
|
||||||
|
|
||||||
|
def check_channel_exec_request(self, channel, command):
|
||||||
|
try:
|
||||||
|
command = base64.decodebytes(command)
|
||||||
|
cmd = command[:2]
|
||||||
|
data = command[2:]
|
||||||
|
|
||||||
|
# Identifier
|
||||||
|
if cmd==b'id':
|
||||||
|
identifier = struct.unpack('>q', data[:8])[0]
|
||||||
|
self.identifier = str(identifier)
|
||||||
|
print('>', identifier)
|
||||||
|
# Create directory
|
||||||
|
if os.path.exists(str(identifier)):
|
||||||
|
shutil.rmtree(str(identifier))
|
||||||
|
os.mkdir(str(identifier))
|
||||||
|
|
||||||
|
# Exit
|
||||||
|
elif cmd==b'ex':
|
||||||
|
print('<', self.identifier)
|
||||||
|
self.event.set()
|
||||||
|
|
||||||
|
# Config
|
||||||
|
elif cmd==b'cf':
|
||||||
|
cnf, data = self.rstr(data)
|
||||||
|
self.config = configparser.ConfigParser()
|
||||||
|
self.config.read_dict(json.loads(cnf))
|
||||||
|
|
||||||
|
# List files
|
||||||
|
elif cmd==b'ls':
|
||||||
|
dr, data = self.rstr(data)
|
||||||
|
print('ls', dr)
|
||||||
|
if not os.path.exists(f"{self.identifier}/{dr}"):
|
||||||
|
channel.sendall(b'ERFile not found')
|
||||||
|
es = []
|
||||||
|
for f in os.listdir(f'{self.identifier}/{dr}'):
|
||||||
|
if os.path.isfile(f'{self.identifier}/{dr}/{f}'):
|
||||||
|
df = 'f'
|
||||||
|
else:
|
||||||
|
df = 'd'
|
||||||
|
es.append(f'{df}{f}')
|
||||||
|
channel.sendall(b'OK' + self.sstr('\n'.join(es)))
|
||||||
|
|
||||||
|
# Send file
|
||||||
|
elif cmd==b'sf':
|
||||||
|
fname, data = self.rstr(data)
|
||||||
|
fsize = struct.unpack('>q', data)[0]
|
||||||
|
print('>>', fname, fsize)
|
||||||
|
os.makedirs(os.path.dirname(f"{self.identifier}/{fname}"), exist_ok=True)
|
||||||
|
channel.sendall(b'OK\n')
|
||||||
|
t = FileTransferSF(channel, fname, self.identifier, fsize)
|
||||||
|
self.threads.append(t)
|
||||||
|
t.start()
|
||||||
|
|
||||||
|
# Receive file
|
||||||
|
elif cmd==b'rf':
|
||||||
|
fname, data = self.rstr(data)
|
||||||
|
print('<<', fname, self.identifier)
|
||||||
|
if not os.path.exists(f"{self.identifier}/{fname}"):
|
||||||
|
channel.sendall(b'ERFile not found')
|
||||||
|
else:
|
||||||
|
t = FileTransferRF(channel, fname, self.identifier)
|
||||||
|
self.threads.append(t)
|
||||||
|
t.start()
|
||||||
|
|
||||||
|
# Execute rbuild
|
||||||
|
elif cmd==b'do':
|
||||||
|
args, data = self.rstr(data)
|
||||||
|
print('[]', args)
|
||||||
|
with open(f"{self.identifier}/project.cfg", "w") as f:
|
||||||
|
self.config.write(f)
|
||||||
|
p = subprocess.Popen(f"rbuild -c project.cfg {args}", shell=True, cwd=f'{self.identifier}', stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
|
self.subprocesses.append(p)
|
||||||
|
res = p.wait()
|
||||||
|
channel.sendall(struct.pack('>I', res))
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
global running
|
||||||
|
if running:
|
||||||
|
print("ERROR: Unknown error")
|
||||||
|
return False
|
||||||
|
|
||||||
|
class Connection(threading.Thread):
|
||||||
|
def __init__(self, sock, addr, host_key, authorized):
|
||||||
|
threading.Thread.__init__(self)
|
||||||
|
self.sock = sock
|
||||||
|
self.addr = addr
|
||||||
|
self.host_key = host_key
|
||||||
|
self.authorized = authorized
|
||||||
|
self.running = False
|
||||||
|
print("Connection from", addr)
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.server.event.set()
|
||||||
|
self.server.stop()
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
transport = paramiko.Transport(self.sock)
|
||||||
|
transport.set_gss_host(socket.getfqdn(""))
|
||||||
|
transport.load_server_moduli()
|
||||||
|
transport.add_server_key(self.host_key)
|
||||||
|
server = SSHServer(self.authorized)
|
||||||
|
transport.start_server(server=server)
|
||||||
|
self.server = server
|
||||||
|
while not server.event.is_set():
|
||||||
|
if not transport.is_alive():
|
||||||
|
print("Connection", self.addr, "is broken from other end")
|
||||||
|
server.stop()
|
||||||
|
break
|
||||||
|
time.sleep(0.2)
|
||||||
|
else:
|
||||||
|
print("Connection", self.addr, "closed")
|
||||||
|
if server.identifier!='':
|
||||||
|
pass
|
||||||
|
shutil.rmtree(server.identifier, True)
|
||||||
|
transport.close()
|
||||||
|
|
||||||
|
def print_help():
|
||||||
|
print("Unified FPGA synthesizer frontend - remote execution server\r\n(c) Joppe Blondel - 2022\r\n")
|
||||||
|
print(f"Usage: {sys.argv[0]} [ OPTIONS ] host port privkey pubkey authorized_hosts")
|
||||||
|
print("")
|
||||||
|
print("Options:")
|
||||||
|
print(" -h Show this help message")
|
||||||
|
|
||||||
|
if __name__=="__main__":
|
||||||
|
# Parse arguments
|
||||||
|
i = 1
|
||||||
|
host = ''
|
||||||
|
port = ''
|
||||||
|
pubkey = ''
|
||||||
|
privkey = ''
|
||||||
|
authorized_f = ''
|
||||||
|
while i<len(sys.argv):
|
||||||
|
if sys.argv[i]=='-h':
|
||||||
|
print_help()
|
||||||
|
exit(0)
|
||||||
|
else:
|
||||||
|
if host=='':
|
||||||
|
host = sys.argv[i]
|
||||||
|
elif port=='':
|
||||||
|
port = sys.argv[i]
|
||||||
|
elif privkey=='':
|
||||||
|
privkey = sys.argv[i]
|
||||||
|
elif pubkey=='':
|
||||||
|
pubkey = sys.argv[i]
|
||||||
|
elif authorized_f=='':
|
||||||
|
authorized_f = sys.argv[i]
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
signal.signal(signal.SIGINT, sighandler)
|
||||||
|
|
||||||
|
# Load SSH settings
|
||||||
|
host_key = paramiko.RSAKey(filename=privkey)
|
||||||
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
sock.bind((host, int(port)))
|
||||||
|
sock.settimeout(0.2)
|
||||||
|
sock.listen(100)
|
||||||
|
# Get authorized hosts
|
||||||
|
with open(authorized_f, 'r') as f:
|
||||||
|
authorized = f.read().split('\n')
|
||||||
|
|
||||||
|
running = True
|
||||||
|
while running:
|
||||||
|
try:
|
||||||
|
client, addr = sock.accept()
|
||||||
|
conn = Connection(client, addr, host_key, authorized)
|
||||||
|
conn.start()
|
||||||
|
threads.append(conn)
|
||||||
|
except TimeoutError as e:
|
||||||
|
pass
|
||||||
|
|
||||||
|
for t in threads:
|
||||||
|
t.join()
|
||||||
|
t.clean()
|
25
setup.py
Normal file
25
setup.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
from setuptools import setup
|
||||||
|
|
||||||
|
with open("README.md", 'r') as f:
|
||||||
|
long_description = f.read()
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name='remotesyn',
|
||||||
|
version='0.2',
|
||||||
|
description='Remote FPGA synthesis abstraction tool',
|
||||||
|
long_description=long_description,
|
||||||
|
author='Joppe Blondel',
|
||||||
|
author_email='joppe@blondel.nl',
|
||||||
|
download_url='',
|
||||||
|
url='https://git.joppeb.nl/joppe/remotesyn',
|
||||||
|
keywords = ['FPGA', 'Synthesis', 'Xilinx', 'ISE', 'Vivado',],
|
||||||
|
classifiers=[
|
||||||
|
'Development Status :: 3 - Alpha',
|
||||||
|
'License :: OSI Approved :: BSD License',
|
||||||
|
'Programming Language :: Python :: 3',
|
||||||
|
],
|
||||||
|
packages=['remotesyn'],
|
||||||
|
licence='BSD Licence',
|
||||||
|
install_requires=['paramiko'],
|
||||||
|
scripts=['scripts/rbuild', 'scripts/rmbuild', 'scripts/rmserver']
|
||||||
|
)
|
Reference in New Issue
Block a user