-
Notifications
You must be signed in to change notification settings - Fork 1
/
tsort-modules
executable file
·127 lines (94 loc) · 3.45 KB
/
tsort-modules
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
#!/usr/bin/env python3
#
# Translated from the Bash script `tsort-modules.orig`.
from glob import glob, escape as glob_escape
import json
import os.path
import sys
from typing import *
import click
import pysh
from pysh import cmd, check_cmd, check_cmd_f, slurp_cmd
# Original script behaved like this:
# this_file = os.path.realpath(__file__)
# rootdir = os.path.join(this_file, '../..')
# but this is more convenient to experiment with:
rootdir = os.getcwd()
bindir = f'{rootdir}/node_modules/.bin'
@click.group()
def main() -> None:
pass
def ltrimstr(prefix: AnyStr, s: AnyStr) -> AnyStr:
'''Inspired by jq's `ltrimstr` function.'''
return s[len(prefix):] if s.startswith(prefix) else s
def import_pairs() -> Iterator[Tuple[str, str]]:
# TODO glob+format similar to shwords's split+format
sources = glob('{}/src/**/*.js'.format(glob_escape(rootdir)),
recursive=True)
prefix = f'{rootdir}/'
imports = json.loads(pysh.slurp(
# TODO hand json.load a file, without slurp
cmd.run('{}/flow get-imports --json {!@}', # TODO f-style global
bindir, sources)))
# A little more typing than jq's `.[]`, and much more explicit.
# In particular, this disambiguates that we're expecting an object,
# not an array.
for item in imports.values():
for req in item['requirements']:
yield (ltrimstr(prefix, req['import']),
ltrimstr(prefix, req['path']))
@pysh.filter
@pysh.output(type='tstream')
def _print_import_pairs(output):
for imp, path in import_pairs():
print(imp, path, file=output)
@main.command(name='pairs')
def print_import_pairs() -> None:
# TODO make this wrapper unneeded (or automatic); slogan says
# a Pysh filter is a "shell function", so it should both get
# a command-line interface and be ready to use in pipelines.
pysh.to_stdout(_print_import_pairs())
def sorted_files() -> Iterator[Tuple[str, str]]:
for line in (
_print_import_pairs()
| cmd.encode()
| cmd.run('tsort', _check=False, _stderr=pysh.DEVNULL)
| cmd.splitlines()):
# TODO move decoding inside pipeline
yield os.fsdecode(line)
@main.command(name='list')
def sorted_ourfiles():
# Replace `grep` with pure Python where it's on a stream.
files = [f for f in sorted_files()
if f.startswith('src/') and f.endswith('js')]
# But on a bunch of files, real `grep` is much cleaner.
# TODO don't dump traceback on SIGPIPE, e.g. `tsort-modules list | head`
check_cmd_f('grep -l @flow -- {files!@}')
def number_lines():
# TODO some syntax to escape/quote spaces
return cmd.run('nl -ba -nln -w3 -s{}', ' ')
def with_flow():
files = [f for f in sorted_files()
if f.startswith('src/') and f.endswith('js')]
return cmd.run('grep @flow -- {!@}', files)
@main.command(name='flow')
def with_flow_numbered():
pysh.to_stdout(with_flow() | number_lines())
@main.command()
def todo():
pysh.to_stdout(with_flow() | number_lines()
| cmd.run('grep -Ev {}', r'@flow strict(\s|$)'))
@main.command()
@click.argument('filename')
def depends(filename):
for imp, path in import_pairs():
if path == filename:
print(imp)
@main.command()
@click.argument('filename')
def rdepends(filename):
for imp, path in import_pairs():
if imp == filename:
print(path)
if __name__ == '__main__':
main()