fileders load via get_index rather than list_facets/list_fileders
s-ol
3 years ago
59 | 59 | local browser = require 'mmm.mmmfs.browser' |
60 | 60 | local fileder = require 'mmm.mmmfs.fileder' |
61 | 61 | local web = require 'mmm.mmmfs.stores.web' |
62 | local root = fileder.Fileder(web.WebStore({ verbose = true }), path) | |
62 | ||
63 | local store = web.WebStore({ verbose = true }) | |
64 | local index = store:get_index(path, -1) | |
65 | local root = fileder.Fileder(store, index) | |
63 | 66 | |
64 | 67 | BROWSER = browser.Browser(root, path, true) |
65 | 68 | end) |
69 | 72 | -- serve fileder index |
70 | 73 | -- '?index': one level deep |
71 | 74 | -- '?tree': recursively |
72 | index = fileder\get_index facet.name == '?tree' | |
75 | depth = if facet.name == '?tree' then -1 else 1 | |
76 | index = @store\get_index path, depth | |
73 | 77 | convert 'table', facet.type, index, fileder, facet.name |
74 | 78 | else |
75 | 79 | -- fileder and facet given |
39 | 39 | base = base\match '^(.*)%.%w+$' |
40 | 40 | |
41 | 41 | (name, x) -> |
42 | name = base .. name if '.' == name\sub 1, 1 | |
42 | if name == '.' | |
43 | name = base | |
44 | else if '.' == name\sub 1, 1 | |
45 | name = base .. name | |
46 | ||
43 | 47 | _require name |
44 | 48 | |
45 | 49 | if on_load |
34 | 34 | base = base\match '^(.*)%.%w+$' |
35 | 35 | |
36 | 36 | (name, x) -> |
37 | name = base .. name if '.' == name\sub 1, 1 | |
37 | if name == '.' | |
38 | name = base | |
39 | else if '.' == name\sub 1, 1 | |
40 | name = base .. name | |
41 | ||
38 | 42 | _require name |
133 | 133 | @facet_keys[k] = v |
134 | 134 | } |
135 | 135 | |
136 | load: => | |
136 | -- this fails with JS objects from JSON.parse | |
137 | if 'table' == type @path | |
138 | index = @path | |
139 | @path = index.path | |
140 | @load index | |
141 | ||
142 | assert ('string' == type @path), "invalid path: '#{@path}'" | |
143 | ||
144 | load: (index) => | |
145 | assert not @loaded, "already loaded!" | |
137 | 146 | @loaded = true |
138 | 147 | |
139 | for path in @store\list_fileders_in @path | |
140 | table.insert @children, Fileder @store, path | |
141 | ||
142 | for name, type in @store\list_facets @path | |
143 | key = Key name, type | |
148 | if not index | |
149 | index = @store\get_index @path | |
150 | ||
151 | for path_or_index in *index.children | |
152 | table.insert @children, Fileder @store, path_or_index | |
153 | ||
154 | for key in *index.facets | |
155 | key = Key key | |
144 | 156 | @facet_keys[key] = key |
145 | 157 | |
146 | 158 | _, name = dir_base @path |
194 | 206 | |
195 | 207 | [name for name in pairs names] |
196 | 208 | |
197 | -- get an index table, listing path, facets and children | |
198 | -- optionally get recursive index | |
199 | get_index: (recursive=false) => | |
200 | { | |
201 | path: @path | |
202 | facets: [key for str, key in pairs @facet_keys] | |
203 | children: if recursive | |
204 | [child\get_index true for child in *@children] | |
205 | else | |
206 | [{ :path } for { :path } in *@children] | |
207 | } | |
208 | ||
209 | 209 | -- check whether a facet is directly available |
210 | 210 | has: (...) => |
211 | 211 | want = Key ... |
0 | require = relative ..., 1 | |
0 | 1 | lfs = require 'lfs' |
2 | import Store from require '.' | |
1 | 3 | |
2 | 4 | -- split filename into dirname + basename |
3 | 5 | dir_base = (path) -> |
7 | 9 | |
8 | 10 | dir, base |
9 | 11 | |
10 | class FSStore | |
12 | class FSStore extends Store | |
11 | 13 | new: (opts = {}) => |
14 | super opts | |
15 | ||
12 | 16 | opts.root or= 'root' |
13 | opts.verbose or= false | |
14 | ||
15 | if not opts.verbose | |
16 | @log = -> | |
17 | 17 | |
18 | 18 | -- ensure path doesnt end with a slash |
19 | 19 | @root = opts.root\match '^(.-)/?$' |
20 | 20 | @log "opening '#{opts.root}'..." |
21 | ||
22 | log: (...) => | |
23 | print "[DB]", ... | |
24 | 21 | |
25 | 22 | -- fileders |
26 | 23 | list_fileders_in: (path='') => |
30 | 27 | entry_path = @root .. "#{path}/#{entry_name}" |
31 | 28 | if 'directory' == lfs.attributes entry_path, 'mode' |
32 | 29 | coroutine.yield "#{path}/#{entry_name}" |
33 | ||
34 | list_all_fileders: (path='') => | |
35 | coroutine.wrap -> | |
36 | for path in @list_fileders_in path | |
37 | coroutine.yield path | |
38 | for p in @list_all_fileders path | |
39 | coroutine.yield p | |
40 | 30 | |
41 | 31 | create_fileder: (parent, name) => |
42 | 32 | @log "creating fileder #{path}" |
0 | 0 | require = relative ..., 0 |
1 | ||
2 | class Store | |
3 | new: (opts) => | |
4 | opts.verbose or= false | |
5 | ||
6 | if not opts.verbose | |
7 | @log = -> | |
8 | ||
9 | list_fileders_in: => error "not implemented" | |
10 | ||
11 | list_all_fileders: (path='') => | |
12 | coroutine.wrap -> | |
13 | for path in @list_fileders_in path | |
14 | coroutine.yield path | |
15 | for p in @list_all_fileders path | |
16 | coroutine.yield p | |
17 | ||
18 | get_index: (path='', depth=1) => | |
19 | if depth == 0 | |
20 | return path | |
21 | ||
22 | { | |
23 | :path | |
24 | facets: [{:name, :type} for name, type in @list_facets path] | |
25 | children: [@get_index child, depth - 1 for child in @list_fileders_in path] | |
26 | } | |
27 | ||
28 | close: => | |
29 | ||
30 | log: (...) => | |
31 | print "[#{@@__name}]", ... | |
1 | 32 | |
2 | 33 | -- instantiate a store from a CLI arg |
3 | 34 | -- e.g.: sql, fs:/path/to/root, sql:MEMORY, sql:db.sqlite3 |
28 | 59 | os.exit 1 |
29 | 60 | |
30 | 61 | { |
62 | :Store | |
31 | 63 | :get_store |
32 | 64 | } |
0 | require = relative ..., 1 | |
0 | 1 | sqlite = require 'sqlite3' |
1 | root = os.tmpname! | |
2 | import Store from require '.' | |
2 | 3 | |
3 | class SQLStore | |
4 | class SQLStore extends Store | |
4 | 5 | new: (opts = {}) => |
6 | super opts | |
7 | ||
5 | 8 | opts.file or= 'db.sqlite3' |
6 | opts.verbose or= false | |
7 | 9 | opts.memory or= false |
8 | ||
9 | if not opts.verbose | |
10 | @log = -> | |
11 | 10 | |
12 | 11 | if opts.memory |
13 | 12 | @log "opening in-memory DB..." |
42 | 41 | CREATE INDEX IF NOT EXISTS facet_name ON facet(name); |
43 | 42 | ]] |
44 | 43 | |
45 | log: (...) => | |
46 | print "[DB]", ... | |
47 | ||
48 | 44 | close: => |
49 | 45 | @db\close! |
50 | 46 | |
69 | 65 | for { path } in @fetch 'SELECT path |
70 | 66 | FROM fileder WHERE parent IS ?', path |
71 | 67 | coroutine.yield path |
72 | ||
73 | list_all_fileders: (path='') => | |
74 | coroutine.wrap -> | |
75 | for path in @list_fileders_in path | |
76 | coroutine.yield path | |
77 | for p in @list_all_fileders path | |
78 | coroutine.yield p | |
79 | 68 | |
80 | 69 | create_fileder: (parent, name) => |
81 | 70 | path = "#{parent}/#{name}" |
0 | require = relative ..., 1 | |
1 | import Store from require '.' | |
2 | { :location, :XMLHttpRequest, :JSON, :Object, :Array } = js.global | |
3 | ||
0 | 4 | -- split filename into dirname + basename |
1 | 5 | dir_base = (path) -> |
2 | 6 | dir, base = path\match '(.-)([^/]-)$' |
5 | 9 | |
6 | 10 | dir, base |
7 | 11 | |
8 | { :location, :XMLHttpRequest, :JSON } = js.global | |
9 | 12 | fetch = (url) -> |
10 | 13 | request = js.new XMLHttpRequest |
11 | 14 | request\open 'GET', url, false |
14 | 17 | assert request.status == 200, "unexpected status code: #{request.status}" |
15 | 18 | request.responseText |
16 | 19 | |
17 | class WebStore | |
20 | parse_json = do | |
21 | fix = (val) -> | |
22 | switch type val | |
23 | when 'userdata' | |
24 | if Array\isArray val | |
25 | [fix x for x in js.of val] | |
26 | else | |
27 | {(fix e[0]), (fix e[1]) for e in js.of Object\entries(val)} | |
28 | else | |
29 | val | |
30 | ||
31 | (string) -> | |
32 | print fix JSON\parse string | |
33 | fix JSON\parse string | |
34 | ||
35 | class WebStore extends Store | |
18 | 36 | new: (opts = {}) => |
37 | super opts | |
38 | ||
19 | 39 | if MODE == 'CLIENT' |
20 | 40 | origin = location.origin |
21 | 41 | opts.host or= origin |
22 | opts.verbose or= false | |
23 | ||
24 | if not opts.verbose | |
25 | @log = -> | |
26 | 42 | |
27 | 43 | -- ensure host ends without a slash |
28 | 44 | @host = opts.host\match '^(.-)/?$' |
29 | 45 | @log "connecting to '#{@host}'..." |
30 | 46 | |
31 | log: (...) => | |
32 | print "[DB]", ... | |
47 | get_index: (path='', depth=1) => | |
48 | pseudo = if depth > 1 or depth < 0 '?tree' else '?index' | |
49 | json = fetch "#{@host .. path}/#{pseudo}: application/json" | |
50 | parse_json json | |
33 | 51 | |
34 | 52 | -- fileders |
35 | 53 | list_fileders_in: (path='') => |
36 | 54 | coroutine.wrap -> |
37 | 55 | json = fetch "#{@host .. path}/?index: application/json" |
38 | index = JSON\parse json | |
56 | index = parse_json json | |
39 | 57 | for child in js.of index.children |
40 | 58 | coroutine.yield child.path |
41 | ||
42 | list_all_fileders: (path='') => | |
43 | coroutine.wrap -> | |
44 | for path in @list_fileders_in path | |
45 | coroutine.yield path | |
46 | for p in @list_all_fileders path | |
47 | coroutine.yield p | |
48 | 59 | |
49 | 60 | create_fileder: (parent, name) => |
50 | 61 | @log "creating fileder #{path}" |
69 | 80 | index = JSON\parse json |
70 | 81 | for facet in js.of index.facets |
71 | 82 | coroutine.yield facet.name, facet.type |
72 | -- @TODO: this doesn't belong here! | |
73 | if facet.type\match 'text/moonscript' | |
74 | coroutine.yield facet.name, facet.type\gsub 'text/moonscript', 'text/lua' | |
75 | 83 | |
76 | 84 | load_facet: (path, name, type) => |
77 | 85 | fetch "#{@host .. path}/#{name}: #{type}" |
0 | -- relative imports | |
1 | _G.relative = do | |
2 | _require = require | |
3 | ||
4 | (base, sub) -> | |
5 | sub = sub or 0 | |
6 | ||
7 | for i=1, sub | |
8 | base = base\match '^(.*)%.%w+$' | |
9 | ||
10 | (name, x) -> | |
11 | if name == '.' | |
12 | name = base | |
13 | else if '.' == name\sub 1, 1 | |
14 | name = base .. name | |
15 | ||
16 | _require name | |
17 | ||
0 | 18 | sort2 = (a, b) -> |
1 | 19 | {ax, ay}, {bx, by} = a, b |
2 | 20 | "#{ax}//#{ay}" < "#{bx}//#{by}" |
56 | 74 | assert.are.same {{'', 'text/markdown'}, {'name', 'alpha'}}, |
57 | 75 | toseq2 ts\list_facets '/hello/world' |
58 | 76 | |
77 | describe "can get indexes", -> | |
78 | hello_index = { | |
79 | path: '/hello' | |
80 | children: { | |
81 | '/hello/world' | |
82 | } | |
83 | facets: { | |
84 | { name: 'name', type: 'alpha' } | |
85 | } | |
86 | } | |
87 | ||
88 | it "for a single level", -> | |
89 | assert.are.same hello_index, ts\get_index '/hello' | |
90 | ||
91 | ||
92 | root_index = { | |
93 | path: '' | |
94 | children: { | |
95 | hello_index | |
96 | } | |
97 | facets: {} | |
98 | } | |
99 | it "for a limited number of levels", -> | |
100 | assert.are.same root_index, ts\get_index '', 2 | |
101 | ||
102 | it "recursively", -> | |
103 | hello_index.children[1] = { | |
104 | path: '/hello/world' | |
105 | children: { | |
106 | { | |
107 | path: '/hello/world/again' | |
108 | children: {} | |
109 | facets: {} | |
110 | } | |
111 | } | |
112 | facets: { | |
113 | { name: '', type: 'text/markdown' } | |
114 | { name: 'name', type: 'alpha' } | |
115 | } | |
116 | } | |
117 | ||
118 | assert.are.same root_index, ts\get_index '', -1 | |
119 | ||
120 | it "can get indexes recursively", -> | |
121 | ||
59 | 122 | it "can load facets", -> |
60 | 123 | assert.are.equal 'hello', ts\load_facet '/hello', 'name', 'alpha' |
61 | 124 | assert.are.equal 'world', ts\load_facet '/hello/world', 'name', 'alpha' |
88 | 151 | ts\remove_fileder '/hello' |
89 | 152 | assert.are.same {}, toseq ts\list_all_fileders! |
90 | 153 | |
91 | describe "SQL spec", -> | |
154 | it "can be closed", -> | |
155 | ts\close! | |
156 | ||
157 | describe "SQL store", -> | |
92 | 158 | import SQLStore from require 'mmm.mmmfs.stores.sql' |
93 | 159 | |
94 | 160 | test_store SQLStore memory: true |
104 | 170 | assert os.remove root |
105 | 171 | assert lfs.mkdir root |
106 | 172 | |
107 | test_store LFSStore :root | |
173 | test_store FSStore :root | |
108 | 174 | |
109 | 175 | teardown -> |
110 | 176 | assert lfs.rmdir root |