fabien 2 days ago
parent fc86c60475
commit a8c0d5200c
  1. 1
      backend/assets.mapped.layout.json
  2. 20
      backend/assets.names
  3. 31
      backend/custom-mime-example.xml
  4. 16
      backend/extract_as_json.js
  5. 28
      backend/mapping
  6. 3045
      backend/package-lock.json
  7. 6
      backend/package.json
  8. 14
      backend/packing_directory_script
  9. 0
      backend/pdf_xml.js
  10. 194
      backend/pmwiki_reader.lua
  11. 2
      backend/stt/whisper_setup.txt
  12. 26
      backend/withdefault.jxrstyles.json
  13. 1
      data/console.aframe.component
  14. 1
      data/default.layout.json
  15. 28
      data/demo_q1.json
  16. 52
      data/demo_q2.json
  17. 8
      data/demos_editor_example.html
  18. 52
      data/demos_example.html
  19. 195
      data/demos_feedback_example.html
  20. 101
      data/demos_feedback_viewer_example.html
  21. 75
      data/fabien_corneish_zen.keymap
  22. 31
      data/filters/csv.js
  23. 30
      data/filters/docx_packed.xml.js
  24. 24
      data/filters/docx_unpacked.xml.js
  25. 0
      data/filters/image_and_glb_gltf.js
  26. 41
      data/filters/json_ref_manual.js
  27. 32
      data/filters/keymap.js
  28. 36
      data/filters/layout.json.js
  29. 63
      data/filters/mapvisualmeta.json.js
  30. 82
      data/filters/mapvisualmeta.jsons.zip.js
  31. 42
      data/filters/markdown.js
  32. 84
      data/filters/odt_unpacked.xml.js
  33. 121
      data/filters/pdf_unpacked.xml.js
  34. 49
      data/filters/peertubeapi.js
  35. 50
      data/filters/pmwiki.js
  36. 40
      data/filters/q2layout.json.js
  37. 112
      data/filters/rete.bitbybit.json.js
  38. 61
      data/filters/sqlite.js
  39. 7
      data/filters/srt_to_json.js
  40. 3
      data/filters/svg.js
  41. 86
      data/filters/tapestry.json.js
  42. 61
      data/filters/template_example.js
  43. 41
      data/filters/visualmeta.json.js
  44. 337
      data/found_set_param.html
  45. 215
      data/gesture-exploration.js
  46. 5935
      data/index.html
  47. 9
      data/interactions/onreleased/color_change.js
  48. 1
      data/manuscript.txt
  49. 695
      data/references_manual_v04.json
  50. 838
      data/references_manual_v14.json
  51. 46
      data/selfcontained_test.html
  52. 3
      data/style.css
  53. 26
      data/withdefault.jxrstyles.json
  54. 13
      jxr-core.js

@ -0,0 +1 @@
[ {"filename":"console.aframe.component","position":"0 0 0","rotation":"0 0 0"}, {"filename":"hello_world.json","position":"0 0 0","rotation":"0 0 0"}, {"filename":"hello_world.txt","position":"1 1 -1.2","rotation":"0 -50 0"}, {"filename":"index.html","position":"0 0 0","rotation":"0 0 0"}, {"filename":"index.html.png","position":"0 0 0","rotation":"0 0 0"}, {"filename":"sloan_test.txt","position":"1.4 1.5 -1","rotation":"0 -80 0"}, {"filename":"SpaSca_JSONCanvas_export.canvas","position":"0 0 0","rotation":"0 0 0"} ]

@ -0,0 +1,20 @@
kenney_supermarket_assets/bottle-return.glb
kenney_supermarket_assets/cash-register.glb
kenney_supermarket_assets/character-employee.glb
kenney_supermarket_assets/column.glb
kenney_supermarket_assets/display-bread.glb
kenney_supermarket_assets/display-fruit.glb
kenney_supermarket_assets/fence-door-rotate.glb
kenney_supermarket_assets/fence.glb
kenney_supermarket_assets/floor.glb
kenney_supermarket_assets/freezer.glb
kenney_supermarket_assets/freezers-standing.glb
kenney_supermarket_assets/shelf-bags.glb
kenney_supermarket_assets/shelf-boxes.glb
kenney_supermarket_assets/shelf-end.glb
kenney_supermarket_assets/shopping-basket.glb
kenney_supermarket_assets/shopping-cart.glb
kenney_supermarket_assets/wall-corner.glb
kenney_supermarket_assets/wall-door-rotate.glb
kenney_supermarket_assets/wall.glb
kenney_supermarket_assets/wall-window.glb

@ -0,0 +1,31 @@
from /usr/share/mime/packages/
once modified should run update-mime-database
or probably sufficient thus better, in user mode
~/.local/share/mime/ then namely update-mime-database ~/.local/share/mime/
https://unix.stackexchange.com/a/564888/486198
could be done for
.images.json
.aframe.component
.extractedpdf.json
.exported_author.json
.layout.json
.layout.json
.packeddirectory.json
.jxrstyles.json
.aframe.entity
.canvas
.2dmap.png
.webannotation.json
.pmwiki
.glb
<?xml version="1.0" encoding="UTF-8"?>
<mime-info xmlns="http://www.freedesktop.org/standards/shared-mime-info">
<mime-type type="application/x-jxr">
<comment>jxr Model</comment>
<glob pattern="*.jxr"/>
<icon name="jxr"/>
</mime-type>
</mime-info>

@ -0,0 +1,16 @@
const PDFExtract = require('pdf.js-extract').PDFExtract;
const pdfExtract = new PDFExtract();
const options = {};
const fs = require('fs');
var args = process.argv.slice(2)
//console.log(args)
if (args.length < 1) return console.log('missing input pdf filename');
const filename = args[0]
const output = filename+'_text.json'
pdfExtract.extract(filename, options, (err, data) => {
if (err) return console.log(err);
fs.writeFileSync(output, JSON.stringify(data) )
});

@ -0,0 +1,28 @@
7 7 2 # 7 width, 7 high, 2 layers
layer 0
19 19 19 19 18 19 19
19 09 09 09 09 09 20
20 09 09 09 09 09 19
19 09 09 09 09 09 19
19 09 09 09 09 09 20
20 09 09 09 09 09 19
20 09 09 09 09 09 19
19 19 20 19 19 20 19
layer 0.1
00 00 00 00 00 00 00
00 10 10 10 10 10 00
00 00 00 00 00 00 00
00 00 00 00 00 00 00
00 13 13 00 13 13 00
00 00 00 00 00 00 00
00 00 00 00 00 00 00
00 10 10 10 10 10 00
00 00 00 00 00 00 00
rotations layer 0
00 00 00 00 00 00 00
00 00 00 00 00 00 00
00 00 00 00 00 00 00
00 00 00 00 00 00 00
00 00 00 00 00 00 00
00 00 00 00 00 00 00
00 00 00 00 00 00 00

File diff suppressed because it is too large Load Diff

@ -0,0 +1,6 @@
{
"dependencies": {
"express": "^4.21.1",
"pdf.js-extract": ""
}
}

@ -0,0 +1,14 @@
#!/bin/bash
cd public
echo -n '[' > ../pack.json; for X in *;
do
echo -n "{\"filename\":\"$X\", \"content\":\"$(base64 -w0 $X)\"" >> ../pack.json;
# curl --insecure -I https://192.168.0.129:3000/$X | grep "Content-Type" | sed 's/;.*//' | sed 's/\(.*\): \(.*\)/,"\1":"\2"/' #>> ../pack.json;
echo -n ",\"contenttype\":\"" >> ../pack.json;
curl --insecure -I -s -o /dev/null -w '%header{Content-Type}' https://192.168.0.129:3000/$X | sed 's/;.*//' >> ../pack.json;
echo -n "\"}," >> ../pack.json;
done;
sed -i "s/.$//" ../pack.json;
echo -n "]" >> ../pack.json

@ -0,0 +1,194 @@
-- Pandoc reader for PMWiki format: https://www.pmwiki.org/wiki/PmWiki/MarkupMasterIndex
-- Using LPeg: https://www.inf.puc-rio.br/~roberto/lpeg/
-- Inspired by https://pandoc.org/custom-readers.html
local P, S, R, Cf, Cc, Ct, V, Cs, Cg, Cb, B, C, Cmt =
lpeg.P, lpeg.S, lpeg.R, lpeg.Cf, lpeg.Cc, lpeg.Ct, lpeg.V,
lpeg.Cs, lpeg.Cg, lpeg.Cb, lpeg.B, lpeg.C, lpeg.Cmt
local whitespacechar = S(" \t\r\n")
local specialchar = S("/*~[]\\{}|")
local wordchar = (1 - (whitespacechar + specialchar))
local spacechar = S(" \t")
local newline = P"\r"^-1 * P"\n"
local blankline = spacechar^0 * newline
local endline = newline * #-blankline
local endequals = spacechar^0 * P"="^0 * spacechar^0 * newline
local cellsep = spacechar^0 * P"|"
local apostrophe = string.char(39)
local doubleApo = P(apostrophe) * P(apostrophe)
local fenced = '```\n%s\n```\n'
local cellsep = spacechar^0 * P"||"
local function trim(s)
return (s:gsub("^%s*(.-)%s*$", "%1"))
end
local function ListItem(lev, ch)
local start
if ch == nil then
start = S"*#"
else
start = P(ch)
end
local subitem = function(c)
if lev < 6 then
return ListItem(lev + 1, c)
else
return (1 - 1) -- fails
end
end
local parser = spacechar^0
* start^lev
* #(- start)
* spacechar^0
* Ct((V"Inline" - (newline * spacechar^0 * S"*#"))^0)
* newline
* (Ct(subitem("*")^1) / pandoc.BulletList
+
Ct(subitem("#")^1) / pandoc.OrderedList
+
Cc(nil))
/ function (ils, sublist)
return { pandoc.Plain(ils), sublist }
end
return parser
end
-- Grammar
G = P{ "Doc",
Doc = Ct(V"Block"^0)
/ pandoc.Pandoc ;
Block = blankline^0
* ( V"IndentedBlock"
+ V"Header"
+ V"HorizontalRule"
+ V"CodeBlock"
+ V"List"
+ V"Table"
+ V"Para"
) ;
IndentedBlock = C((spacechar^1
* (1 - newline)^1
* newline)^1
)
/ function(text)
block = pandoc.RawBlock('markdown', fenced:format(text))
return block
end;
CodeBlock = P"[@"
* blankline
* C((1 - (newline * P"@]"))^0)
* newline
* P"@]"
/ function(text)
block = pandoc.RawBlock('markdown', fenced:format(text))
return block
end;
List = V"BulletList"
+ V"OrderedList" ;
BulletList = Ct(ListItem(1,'*')^1)
/ pandoc.BulletList ;
OrderedList = Ct(ListItem(1,'#')^1)
/ pandoc.OrderedList ;
Table = V"TableProperties"
* (V"TableHeader" + Cc{})
* Ct(V"TableRow"^1)
/ function(headrow, bodyrows)
local numcolumns = #(bodyrows[1])
local aligns = {}
local widths = {}
for i = 1,numcolumns do
aligns[i] = pandoc.AlignDefault
widths[i] = 0
end
return pandoc.utils.from_simple_table(
pandoc.SimpleTable({}, aligns, widths, headrow, bodyrows))
end ;
TableProperties = cellsep
* spacechar^0
* P("border=")
* (1 - newline)^1
* newline;
TableHeader = Ct(V"HeaderCell"^1)
* cellsep^-1
* spacechar^0
* newline ;
TableRow = Ct(V"BodyCell"^1)
* cellsep^-1
* spacechar^0
* newline ;
HeaderCell = cellsep
* P"!"^-1
* spacechar^0
* Ct((V"Inline" - (newline + cellsep))^0)
/ function(ils) return { pandoc.Plain(ils) } end ;
BodyCell = cellsep
* spacechar^0
* Ct((V"Inline" - (newline + cellsep))^0)
/ function(ils) return { pandoc.Plain(ils) } end ;
Para = Ct(V"Inline"^1)
* newline
/ pandoc.Para ;
HorizontalRule = spacechar^0
* P"----"
* spacechar^0
* newline
/ pandoc.HorizontalRule;
Header = (P("!")^1 / string.len)
* spacechar^0
* Ct((V"Inline" - endequals)^1)
* endequals
/ pandoc.Header;
Inline = V"Link"
+ V"Url"
+ V"Code"
+ V"Bold"
+ V"Emph"
+ V"Strikeout"
+ V"Str"
+ V"Space"
+ V"Special";
Link = P"[["
* C((1 - (P"]]" + P"|"))^0)
* (P"|" * Ct((V"Inline" - P"]]")^1))^-1
* P"]]"
/ function(url, desc)
local txt = desc or {pandoc.Str(url)}
return pandoc.Link(txt, url)
end;
Url = C(
P"http"
* P"s"^-1
* P"://"
* (1 - whitespacechar)^1
)
/ function(url)
return pandoc.Link(url, url)
end;
Code = P'@@'
* C((1 - P'@@')^0)
* P'@@'
/ trim / pandoc.Code;
Emph = P"''"
* C(((wordchar + whitespacechar) - P"''")^1)
* P"''"
/ pandoc.Emph;
Bold = P"'''"
* C(((wordchar + whitespacechar) - P"'''")^1)
* P"'''"
/ pandoc.Strong;
Strikeout = P"{-"
* C(((wordchar + whitespacechar) - P"-}")^1)
* P"-}"
/ pandoc.Strikeout;
Str = wordchar^1
/ pandoc.Str;
Special = specialchar
/ pandoc.Str;
Space = spacechar^1
/ pandoc.Space ;
}
function Reader(input, reader_options)
return lpeg.match(G, tostring(input))
end

@ -0,0 +1,2 @@
ffmpeg -i public/poweruser_audiofile.ogg -ar 16000 -y terst.wav
LD_LIBRARY_PATH=/usr/app/whisper.cpp/build/bin/ /usr/app/whisper.cpp/build/bin/whisper-cli -f terst.wav -m /usr/app/whisper.cpp/models/ggml-base.en.bin

@ -0,0 +1,26 @@
{
"default" : [
{"selector":"#start_file_sloan_testtxt_end_file_hello_worldtxt", "attribute":"line", "value": "color:blue"},
{"selector":"a-sky", "attribute":"color", "value": "lightblue"},
{"selector":".notes", "attribute":"color", "value": "purple"},
{"selector":".notes", "attribute":"outline-color", "value": "darkblue"},
{"selector":"a-troika-text a-plane", "attribute":"color", "value": "white"},
{"selector":"a-troika-text a-triangle", "attribute":"color", "value": "gray"}
],
"light" : [
{"selector":"#start_file_sloan_testtxt_end_file_hello_worldtxt", "attribute":"line", "value": "color:blue"},
{"selector":"a-sky", "attribute":"color", "value": "gray"},
{"selector":".notes", "attribute":"color", "value": "black"},
{"selector":".notes", "attribute":"outline-color", "value": "white"},
{"selector":"a-troika-text a-plane", "attribute":"color", "value": "red"},
{"selector":"a-troika-text a-triangle", "attribute":"color", "value": "darkred"}
],
"print" : [
{"selector":"#start_file_sloan_testtxt_end_file_hello_worldtxt", "attribute":"line", "value": "color:brown"},
{"selector":"a-sky", "attribute":"color", "value": "#EEE"},
{"selector":".notes", "attribute":"color", "value": "black"},
{"selector":".notes", "attribute":"outline-color", "value": "white"},
{"selector":"a-troika-text a-plane", "attribute":"color", "value": "lightyellow"},
{"selector":"a-troika-text a-triangle", "attribute":"color", "value": "orange"}
]
}

@ -0,0 +1 @@
console.log('test')

@ -7,23 +7,25 @@
"content":[
{"name":"Curated demos for Q1", "old_description":"use the left wrist to show commands, look behind and press nextDemo()",
"alt":"You have a sphere on your left wrist, touch it to view code snippets",
"description":"You have a sphere on your left wrist.\n You can tap it to reveal snippets of code with your pointing finger.\n Touching these on the left side allows you to:\n - Move them with your right hand\n - Execute with your left hand\n\nTo go to the next step in the demo, look behind you and tap 'jxr demoNext()' (edited)",
"description":"You have a sphere on your left wrist.\n You can tap it to reveal snippets of code with your pointing finger.\n Touching these on the left side allows you to:\n - Move them with your right hand\n - Execute with your left hand\n\nTo go to the next step in the demo, look behind you and tap 'jxr nextDemo()'",
"screenshot":"demoqueueq1.png",
"code":["https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L79","https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L1912-L1922", "https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L785-L799"] ,
"usernames":["demoqueueq1"] },
{"name":"Physical Table in VR (alignment)", "video":"https://youtu.be/A_vH3wRVX_4?t=3336", "description":"move the yellow table from center and release it by your desk height", "screenshot":"tabletest.png", "usernames":["tabletest"]},
{"name":"Tap wrist as shortcut", "description":"left wrist to show hide/code snippets","usernames":["q1_step_wrist"] },
{"name":"Shortcut binding", "description":"drag and drop onto wrist to make new command\ntry with nextDemo() then tap it to move on", "usernames":["q1_step_shortcutset"] },
{"name":"Highlight Text", "video":"https://youtu.be/A_vH3wRVX_4?t=5446", "description": "Puck a line from a PDF to change its coloor.\nThe result becomes available in 2D for yourself and others.\n\nUse the highlighters to freely draw over the document, under its text.\n\nSee https://companion.benetou.fr/highlights_example.html", "screenshot":"q1_step_highlights.png", "usernames":["q1_step_highlights"] },
{"name":"References cards", "video":"https://youtu.be/A_vH3wRVX_4?t=6541", "description": "Load a bibliography and manipulate reference as cards.\nSee https://companion.benetou.fr/references_manual_v04.json", "screenshot":"q1_step_refcards.png", "usernames":["q1_step_refcards"] },
{"name":"Manuscript stick to closest panels", "description":"Use the right wrist to show commands, show panels,\npick then release the manuscript from its center to drop it on the closest panel.", "usernames":["q1_step_snappanels"] },
{"name":"Unfolding Cube", "screenshot":"demo_cube_screenshot.jpg", "description":"Unfold and fold the cube, scale it to room scale\nthen back to the size of your hand to mive.", "screenshot":"refoncubetester.png", "usernames":["refoncubetester"] },
{"name":"Screenshot in VR", "description": "Document your process by taking screenshots\nthat become instantly available on the Web for yourself\nand to collaborators.\nSee https://companion.benetou.fr/audio_notes_example.html", "usernames":["q1_step_screenshot"] },
{"name":"Audio recording ", "screenshot":"poweruser_screenshot_1739174489566.jpg", "description": "Document the screenshots by talking over them.\nThey also become available to share.\n\nTranscriptions are used to make the documentation searchable.\nSee https://companion.benetou.fr/audio_notes_example.html", "usernames":["q1_step_audio"] },
{"name":"Physical Table in VR (alignment)", "video":"https://youtu.be/A_vH3wRVX_4?t=3336", "description":"move the yellow table from center and release it by your desk height", "screenshot":"tabletest.png", "usernames":["tabletest"], "code":["https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L675-L684"]},
{"name":"Tap wrist as shortcut", "description":"left wrist to show hide/code snippets","usernames":["q1_step_wrist"], "code":["https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/jxr-core.js#L586-L652", "https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/jxr-core.js#L674-L729"] },
{"name":"Shortcut binding", "description":"drag and drop onto wrist to make new command\ntry with nextDemo() then tap it to move on", "usernames":["q1_step_shortcutset"], "code":["https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/jxr-core.js#L13", "https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/jxr-core.js#L182-L185"] },
{"name":"Highlight Text", "video":"https://youtu.be/A_vH3wRVX_4?t=5446", "description": "Puck a line from a PDF to change its coloor.\nThe result becomes available in 2D for yourself and others.\n\nUse the highlighters to freely draw over the document, under its text.\n\nSee https://companion.benetou.fr/highlights_example.html", "screenshot":"q1_step_highlights.png", "usernames":["q1_step_highlights"], "code":["https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L825-L842", "https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L966-L1027"] },
{"name":"References cards", "video":"https://youtu.be/A_vH3wRVX_4?t=6541", "description": "Load a bibliography and manipulate reference as cards.\nSee https://companion.benetou.fr/references_manual_v04.json", "screenshot":"q1_step_refcards.png", "usernames":["q1_step_refcards"], "code":["https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L806-L818","https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/filters/json_ref_manual.js"] },
{"name":"Manuscript stick to closest panels", "description":"Use the right wrist to show commands, show panels,\npick then release the manuscript from its center to drop it on the closest panel.", "usernames":["q1_step_snappanels"], "code":["https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L1668-L1693"] },
{"name":"Unfolding Cube", "screenshot":"demo_cube_screenshot.jpg", "description":"Unfold and fold the cube, scale it to room scale\nthen back to the size of your hand to mive.", "screenshot":"refoncubetester.png", "usernames":["refoncubetester"], "code":["https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L690-L712", "https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L1707-L1878"] },
{"name":"Screenshot in VR", "description": "Document your process by taking screenshots\nthat become instantly available on the Web for yourself\nand to collaborators.\nSee https://companion.benetou.fr/audio_notes_example.html", "usernames":["q1_step_screenshot"], "code":["https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L851-L877", "https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L1601-L1618"] },
{"name":"Audio recording ", "screenshot":"poweruser_screenshot_1739174489566.jpg", "description": "Document the screenshots by talking over them.\nThey also become available to share.\n\nTranscriptions are used to make the documentation searchable.\nSee https://companion.benetou.fr/audio_notes_example.html", "usernames":["q1_step_audio"], "code": ["https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L844-L849", "https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L1451-L1583", "https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/filters/srt_to_json.js", "https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/backend/converters/ogg_tts.js"] },
{"name":"Visual Background ", "description":"(non-functional) for grey, room (3D model) and ornaments (animations in background), potential for ambient info as image or semantically integrated widgets",
"usernames":[ "backgroundexploration", "backgroundexplorationlowopacity", "backgroundexplorationlowwhitestatic", "backgroundexplorationlowwhite", "backgroundexplorationlowwhitegrids" ]
"usernames":[ "backgroundexploration", "backgroundexplorationlowopacity", "backgroundexplorationlowwhitestatic", "backgroundexplorationlowwhite", "backgroundexplorationlowwhitegrids" ],
"code": ["https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L78", "https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L938-L948", "https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L2277-L2301"]
},
{"name":"Customization via URL set", "description":"Modify the URL to customize the experience and share that with others, e.g. https://companion.benetou.fr/index.html?set_IDmanuscript_color=lightyellow", "usernames":["q1_step_urlcustom"] },
{"name":"Upload Document via desktop", "description":"Using a desktop or laptop, drag and drop an image in the top right corner, see the result live in XR.", "usernames":["q1_step_showfile"] },
{"name":"Customization via URL set", "description":"Modify the URL to customize the experience and share that with others, e.g. https://companion.benetou.fr/index.html?set_IDmanuscript_color=lightyellow", "usernames":["q1_step_urlcustom"], "code": ["https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/filters/modifications_via_url.js"] },
{"name":"Upload Document via desktop", "description":"Using a desktop or laptop, drag and drop an image in the top right corner, see the result live in XR.", "usernames":["q1_step_showfile"], "code": ["https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L88-L138"] },
{"name":"End of currated demos for Q1", "description":"Thank you for testing, please feel free to share idea on how to open this work more.", "usernames":["demoqueueq1end"] },
{"unassigned-usernames":[
"poweruser",

@ -0,0 +1,52 @@
{
"configuration": {
"description":"update the URL sequentially because they are hosted on the same domain",
"clarification":"usernames are used as identifiers and thus must be unique, even if leading to no behavior change. Example would q1step4 then q1step5.",
"prefixurl":"https://companion.benetou.fr/index.html?username="
},
"content":[
{"name":"automated hand testing", "testingurl": ["https://companion.benetou.fr/index.html?username=q2_step_refcards_filtering&emulatexr=true"], "description":"left wrist to show hide/code snippets","usernames":["q1_step_wrist"], "code":["https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/jxr-core.js#L586-L652", "https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/jxr-core.js#L674-L729"] },
{"name":"refcards filtering", "testingurl": ["https://companion.benetou.fr/index.html?username=q2_step_refcards_filtering&emulatexr=true"], "description":"left wrist to show hide/code snippets","usernames":["q1_step_wrist"], "code":["https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/jxr-core.js#L586-L652", "https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/jxr-core.js#L674-L729"] },
{"name":"volumetric frames", "video":"https://youtu.be/A_vH3wRVX_4?t=3336", "description":"move the yellow table from center and release it by your desk height", "screenshot":"tabletest.png", "usernames":["tabletest"], "testingurl": ["https://companion.benetou.fr/index.html?username=q2_step_volumetric_frames&emulatexr=true"], "code":["https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L675-L684"]},
{"name": "icon_tags","usernames":[ "icon_tags"]},
{"name": "q2_annotated_bibliography","usernames":[ "q2_annotated_bibliography"]},
{"name": "q2_annotated_bibliography_week2","usernames":[ "q2_annotated_bibliography_week2"]},
{"name": "q2_drop_for_graph","usernames":[ "q2_drop_for_graph"]},
{"name": "q2_immersive_console","usernames":[ "q2_immersive_console"]},
{"name": "q2_json_collaborations","usernames":[ "q2_json_collaborations"]},
{"name": "q2_lense","usernames":[ "q2_lense"]},
{"name": "q2_os_keyboard","usernames":[ "q2_os_keyboard"]},
{"name": "q2_pasting","usernames":[ "q2_pasting"]},
{"name": "q2_picker","usernames":[ "q2_picker"]},
{"name": "q2_remote_ntfy_keyboard","usernames":[ "q2_remote_ntfy_keyboard"]},
{"name": "q2_ring_keyboard","usernames":[ "q2_ring_keyboard"]},
{"name": "q2_step_contextuallayouts","usernames":[ "q2_step_contextuallayouts"]},
{"name": "q2_step_end","usernames":[ "q2_step_end"]},
{"name": "q2_step_highlight","usernames":[ "q2_step_highlight"]},
{"name": "q2_step_jsonedit","usernames":[ "q2_step_jsonedit"]},
{"name": "q2_step_layout_animationtests","usernames":[ "q2_step_layout_animationtests"]},
{"name": "q2_step_refcards_filtering","usernames":[ "q2_step_refcards_filtering"]},
{"name": "q2_step_start","usernames":[ "q2_step_start"]},
{"name": "q2_step_volumetric_frames","usernames":[ "q2_step_volumetric_frames"]},
{"name": "q2_visualmetaexport","usernames":[ "q2_visualmetaexport"]},
{"name": "q2_visualmetaexport_map","usernames":[ "q2_visualmetaexport_map"]},
{"name": "q2_visualmetaexport_map_via_wordpress","usernames":[ "q2_visualmetaexport_map_via_wordpress"]},
{"name": "q2_wrist_rotations","usernames":[ "q2_wrist_rotations"]},
{"name": "ring_discovery","usernames":[ "ring_discovery"]},
{"name": "ring_discovery_with_keyboard","usernames":[ "ring_discovery_with_keyboard"]},
{"name": "ring_highlights","usernames":[ "ring_highlights"]},
{"name": "temple_test","usernames":[ "temple_test"]},
{"name": "q2_most_recent_file","usernames":[ "q2_most_recent_file"]},
{"name": "q2_onrelease_lookat","usernames":[ "q2_onrelease_lookat"]}
],
"withemulation":{
"keyword":"emulatexr=true",
"content":[
"q2_lense",
"q2_step_refcards_filtering",
"q2_step_volumetric_frames",
"q2_step_layout_animationtests"
]
}
}

@ -4,6 +4,7 @@
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<script src="https://fabien.benetou.fr/pub/home/future_of_text_demo/engine/dependencies/webdav.js"></script>
<link rel="stylesheet" href="style.css">
</head>
<body>
<button onclick="save()">save</button>
@ -34,7 +35,8 @@ function save(){
if ( uu ) newContent.content.push( uu )
let filename = "demos_saved_"+Date.now()+".json"
saveJSONToWebDAV( filename, newContent )
let url = webdavURL+subdirWebDAV+filename
let url = "https://companion.benetou.fr/demos_example.html?filename="+filename
// let url = webdavURL+subdirWebDAV+filename
window.open(url, '_blank')
}
@ -102,7 +104,8 @@ ideas :
</h3>
<ul>
<li>code links, e.g. "code":["https://git.benetou.fr/utopiah/text-code-xr-engine/src/branch/master/index.html#L129", "https://git.benetou.fr/utopiah/text-code-xr-engine/src/branch/master/index.html#L1375-L1408"]
<li>remove items (to split into N demos)
<li>update code link to proper repository, i.e. https://git.benetou.fr/utopiah/spasca-fot-sloan-q1
<li>better integration with viewer (e.g. side by side view)
<li>in XR mode
<li>listing of past saved demo queues
@ -117,6 +120,7 @@ done :
<li>linked from viewer
<li>re-order
<li>test output https://companion.benetou.fr/demos_saved_1742539103750.json
<li>open directly in viewer, e.g. https://companion.benetou.fr/demos_example.html?filename=demos_saved_1742882525475.json
</ul>
</body>

@ -4,6 +4,7 @@
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<script src="https://fabien.benetou.fr/pub/home/future_of_text_demo/engine/dependencies/webdav.js"></script>
<link rel="stylesheet" href="style.css">
</head>
<body>
@ -17,19 +18,31 @@ const subdirWebDAV = "/fotsave/fot_sloan_companion_public/"
var webdavClient = window.WebDAV.createClient(webdavURL)
const hmdURL = "https://hmd.link/?https://companion.benetou.fr"
const urlParams = new URLSearchParams(window.location.search);
let filename = urlParams.get('filename');
if (!filename) filename = 'demo_q1.json'
// should update on query parameter
// should then use that in demo editor page
// const eventSourceConverted = new EventSource( `https://ntfy.benetou.fr/convertedwebdav/sse` )
// to use for live updates
async function getContent(){
let rootEl = document.getElementById("content")
/*
const contents = await webdavClient.getDirectoryContents(subdirWebDAV);
// consider instead search https://github.com/perry-mitchell/webdav-client#search
contents.filter(f => f.basename.endsWith('demo_q1.json'))
contents.filter(f => f.basename.endsWith( filename ))
.map(a => {
fetch(a.basename).then( r => r.json() ).then( r => {
*/
// removed direct WebDAV as it seems to create CORS issues (which... wasn't the case until now?)
fetch(filename).then( r => r.json() ).then( r => {
r.content.filter( c => c.name ).map( c => {
let h2 = document.createElement("h2")
h2.innerText = c.name
h2.id = c.name.replaceAll(' ', '_') // is added after the page has loaded... should position back
rootEl.appendChild(h2)
if ( c.screenshot ){
let img = document.createElement("img")
@ -77,12 +90,43 @@ async function getContent(){
ul.appendChild(li)
})
}
if (c.testingurl) {
let h3 = document.createElement("h4")
h3.innerHTML = "testing URL:"
rootEl.appendChild(h3)
let ul = document.createElement("ul")
rootEl.appendChild(ul)
c.testingurl.map( h => {
let li = document.createElement("li")
let link = document.createElement("a")
link.href = h
link.innerText = h.replace(/.*\//,'')
li.appendChild(link)
ul.appendChild(li)
})
}
if (c.code) {
let h3 = document.createElement("h4")
h3.innerHTML = "code:"
rootEl.appendChild(h3)
let ul = document.createElement("ul")
rootEl.appendChild(ul)
c.code.map( h => {
let li = document.createElement("li")
let link = document.createElement("a")
link.href = h
link.innerText = h.replace(/.*\//,'')
li.appendChild(link)
ul.appendChild(li)
})
}
let hr = document.createElement("hr")
rootEl.appendChild(hr)
})
if (location.hash) document.getElementById(location.hash.replace('#','')).scrollIntoView()
})
})
//})
}
getContent()
@ -102,8 +146,9 @@ ideas :
<li>integrate better live messages (via ?allowNtfyFeedbackHUD=true , e.g. https://companion.benetou.fr/index.htm?allowNtfyFeedbackHUD=true )
<li>JSON editing, either as-is or via PmWiki (including raw text within JSON) or with CodeMirror as editor (just text area is plain text should be enough
<li>feedback intertwined per demo (based on screenshot/audio recording demos)
<li>richer text rendering
<li>richer text rendering (beyond just clickable link)
<li>couple live messages with inView(targetSelector)
<li>showcase sequentialFiltersInteractionOnReleased
</ul>
<h3>
@ -117,6 +162,7 @@ done :
<li>alternative descriptions https://futuretextlab.info/1st-quarter/ (more verbose but static)
<li>mobile view
<li>linked to editor : https://companion.benetou.fr/demos_editor_example.html
<li>edited list https://companion.benetou.fr/demos_example.html?filename=demos_saved_1742834331999.json
</ul>
</body>

@ -0,0 +1,195 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<script src="https://fabien.benetou.fr/pub/home/future_of_text_demo/engine/dependencies/webdav.js"></script>
</head>
<body>
<h1>Feedback on XR for academic authoring demo</h1>
<div>Please feel the form freely. You can add comment per demo, all optionals, and at the end global feedback and your email. Every word is appreciated. Write as much or as little as you like. Press send at the bottom to save.</div>
<div id=content></div>
<script>
// insert screenshots (could probably do the same way, i.e. filereader then webdav upload
const webdavURL = "https://webdav.benetou.fr";
const subdirWebDAV = "/fotsave/fot_sloan_companion_public/"
var webdavClient = window.WebDAV.createClient(webdavURL)
const hmdURL = "https://hmd.link/?https://companion.benetou.fr"
const urlParams = new URLSearchParams(window.location.search);
let filename = urlParams.get('filename');
if (!filename) filename = 'demo_q1.json'
function save(){
let newContent = {}
let saveFilename = "demos_feedback_"+Date.now()+".json"
newContent.filename = filename
Array.from( document.querySelectorAll('.tosave')).map( e => {
if ( e.value )
newContent[e.id] = e.value
})
saveJSONToWebDAV( saveFilename, newContent )
console.log( saveFilename, newContent )
let url = "https://companion.benetou.fr/demos_feedback_viewer_example.html?filename="+saveFilename
// let url = webdavURL+subdirWebDAV+filename
setTimeout( _ => window.open(url, '_blank'), 1000 )
}
function saveJSONToWebDAV(filename, content){
async function w(path = "/file.txt"){ return await webdavClient.putFileContents(path, JSON.stringify(content)) }
written = w(subdirWebDAV+filename)
}
async function getContent(){
let rootEl = document.getElementById("content")
const contents = await webdavClient.getDirectoryContents(subdirWebDAV);
// consider instead search https://github.com/perry-mitchell/webdav-client#search
contents.filter(f => f.basename.endsWith( filename ))
.map(a => {
fetch(a.basename).then( r => r.json() ).then( r => {
r.content.filter( c => c.name ).map( c => {
let h2 = document.createElement("h2")
h2.innerText = c.name
rootEl.appendChild(h2)
if ( c.screenshot ){
let img = document.createElement("img")
img.src = c.screenshot
img.style.height = "200px"
rootEl.appendChild(img)
}
if ( c.description ){
let h3 = document.createElement("h3")
//h3.innerText = c.description //.replace()
h3.innerHTML = c.description.replace(/(.*) (http.*)/,'$1 <a href="$2">$2</a>').replaceAll('\n','<br>')
// could be innerHTML instead
// should make link clickable
rootEl.appendChild(h3)
let textarea = document.createElement("textarea")
textarea.cols = 40
if ( window.screen.width > 1000 ) {
textarea.style = "float:right;margin-top:-100px"
textarea.cols = 80
}
textarea.rows = 5
textarea.id = c.name.replaceAll(' ','_')
textarea.classList.add('tosave')
rootEl.appendChild(textarea)
}
if ( c.video ){
let ul = document.createElement("ul")
rootEl.appendChild(ul)
let li = document.createElement("li")
let link = document.createElement("a")
link.href = c.video
link.innerText = "video extract"
ul.appendChild(li)
link.target = "_blank"
li.appendChild(link)
}
if (c.usernames) {
let ul = document.createElement("ul")
rootEl.appendChild(ul)
c.usernames.map( h => {
let li = document.createElement("li")
let link = document.createElement("a")
link.href = "/index.html?username="+h
link.innerText = "link"
li.appendChild(link)
let spanEl = document.createElement("span")
spanEl.innerText = " "
li.appendChild(spanEl)
let linkHMD = document.createElement("a")
linkHMD.href = hmdURL + "/index.html?username="+h
linkHMD.target = "_blank"
linkHMD.innerText = "(open on other device)"
li.appendChild(linkHMD)
ul.appendChild(li)
})
}
if (c.code) {
let h3 = document.createElement("h4")
h3.innerHTML = "code:"
rootEl.appendChild(h3)
let ul = document.createElement("ul")
rootEl.appendChild(ul)
c.code.map( h => {
let li = document.createElement("li")
let link = document.createElement("a")
link.href = h
link.innerText = h.replace(/.*\//,'')
li.appendChild(link)
ul.appendChild(li)
})
}
let hr = document.createElement("hr")
rootEl.appendChild(hr)
})
})
})
setTimeout( _ => {
let h3 = document.createElement("h4")
h3.innerHTML = "global feedback, any idea or suggestion, what hardware was used, context for the demo, etc, anything goes :"
rootEl.appendChild(h3)
let textarea = document.createElement("textarea")
//textarea.style = "float:right;margin-top:-100px"
textarea.cols = 80
textarea.rows = 5
textarea.id = 'freeform_feedback'
textarea.classList.add('tosave')
rootEl.appendChild(textarea)
let br = document.createElement("br")
rootEl.appendChild(br)
let span = document.createElement("span")
span.innerText = 'email:'
rootEl.appendChild(span)
let input = document.createElement("input")
input.classList.add('tosave')
input.id = "email"
rootEl.appendChild(input)
rootEl.appendChild(br.cloneNode())
let button = document.createElement("button")
button.innerText = 'send feedback'
button.onclick = save
rootEl.appendChild(button)
}, 1000)
}
getContent()
</script>
<div id=comments>
<br>
<br>
</div>
<hr>
<h3>
ideas :
</h3>
<ul>
<li>save on local storage (if so, provide a clear all fields button)
<li>accept audio feedback
<li>ntfy on feedback received
</ul>
<h3>
done :
</h3>
<ul>
<li>responsive-ish design
<li>save, including which JSON source file was used (now that experiment list and order can be modified)
<li>principle
</ul>
</body>
</html>

@ -0,0 +1,101 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<script src="https://fabien.benetou.fr/pub/home/future_of_text_demo/engine/dependencies/webdav.js"></script>
<link rel="stylesheet" href="style.css">
</head>
<body>
<h1>Feedback on XR for academic authoring demo</h1>
<div id=content></div>
<script>
// insert screenshots (could probably do the same way, i.e. filereader then webdav upload
const webdavURL = "https://webdav.benetou.fr";
const subdirWebDAV = "/fotsave/fot_sloan_companion_public/"
var webdavClient = window.WebDAV.createClient(webdavURL)
const hmdURL = "https://hmd.link/?https://companion.benetou.fr"
const urlParams = new URLSearchParams(window.location.search);
let filename = urlParams.get('filename');
if (!filename) filename = 'demos_feedback_1742934066973.json'
async function getContent(){
let rootEl = document.getElementById("content")
const contents = await webdavClient.getDirectoryContents(subdirWebDAV);
// consider instead search https://github.com/perry-mitchell/webdav-client#search
contents.filter(f => f.basename.endsWith( filename ))
.map(a => {
fetch(a.basename).then( r => r.json() ).then( r => {
if ( r.filename ){
let spanEl = document.createElement("div")
let linkEl = document.createElement("a")
linkEl.href = "https://companion.benetou.fr/demos_example.html?filename=" + r.filename
linkEl.innerText = r.filename
spanEl.appendChild(linkEl)
//spanEl.innerText = "source file: " + r.filename
rootEl.appendChild(spanEl)
}
if ( r.email ){
let spanEl = document.createElement("div")
spanEl.innerText = "email: " + r.email
rootEl.appendChild(spanEl)
}
if ( r.freeform_feedback ){
let spanEl = document.createElement("div")
spanEl.innerText = "free feedback form: " + r.freeform_feedback
rootEl.appendChild(spanEl)
}
fetch(r.filename).then( r => r.json() ).then( original => {
original.content.map( d => {
if ( d.name ){
let fixedId = d.name.replaceAll(' ','_')
if ( r[fixedId] ) {
let spanEl = document.createElement("div")
spanEl.innerText = r[fixedId] + " for "
rootEl.appendChild(spanEl)
let linkEl = document.createElement("a")
linkEl.href = "https://companion.benetou.fr/demos_example.html?filename=" + r.filename + "#"+fixedId
linkEl.innerText = d.name + " in " + r.filename
spanEl.appendChild(linkEl)
}
}
})
})
})
})
}
getContent()
</script>
<div id=comments>
<br>
<br>
</div>
<hr>
<h3>
ideas :
</h3>
<ul>
<li>link back to go anchors on orignial demo viewer page
</ul>
<h3>
done :
</h3>
<ul>
<li>link to proper example, e.g. https://companion.benetou.fr/demos_feedback_viewer_example.html?filename=demos_feedback_1742934066973.json
<li>display feedback
</ul>
</body>
</html>

@ -0,0 +1,75 @@
#include <behaviors.dtsi>
#include <dt-bindings/zmk/keys.h>
#include <dt-bindings/zmk/bt.h>
/ {
chosen {
zmk,matrix_transform = &default_transform;
//zmk,matrix_transform = &five_column_transform;
};
};
/ {
behaviors {
hm: homerow_mods {
compatible = "zmk,behavior-hold-tap";
label = "HOMEROW_MODS";
#binding-cells = <2>;
tapping-term-ms = <150>;
quick-tap-ms = <0>;
flavor = "tap-preferred";
bindings = <&kp>, <&kp>;
};
};
keymap {
compatible = "zmk,keymap";
default_layer {
label = "QWERTY";
// -----------------------------------------------------------------------------------------
// | TAB | Q | W | E | R | T | | Y | U | I | O | P | BKSP |
// | CTRL | A | S | D | F | G | | H | J | K | L | ; | ' |
// | SHFT | Z | X | C | V | B | | N | M | , | . | / | ESC |
// | GUI | LWR | SPC | | ENT | RSE | ALT |
bindings = <
&kp TAB &kp Q &kp W &kp E &kp R &kp T &kp Y &kp U &kp I &kp O &kp P &kp BSPC
&kp LCTRL &kp A &kp S &kp D &kp F &kp G &kp H &kp J &kp K &kp L &kp SEMI &kp SQT
//&kp LCTRL &kp A &kp S &kp D &kp F &kp G &kp H &kp J &kp K &kp L &kp SEMI &kp SQT
&kp LSHFT &kp Z &kp X &kp C &kp V &kp B &kp N &kp M &kp COMMA &kp DOT &kp FSLH &kp ESC
&kp LGUI &mo 1 &kp SPACE &kp RET &mo 2 &kp RALT
>;
};
lower_layer {
label = "NUMBER";
// -----------------------------------------------------------------------------------------
// | TAB | 1 | 2 | 3 | 4 | 5 | | 6 | 7 | 8 | 9 | 0 | BKSP |
// | | BT1 | BT2 | BT3 | BT4 | BT5 | | LFT | DWN | UP | RGT | | |
// | SHFT | F11 | F12 | | | | | | PAGE_DOWN | PAGE_UP | | | BTCLR |
// | GUI | | SPC | | ENT | | ALT |
bindings = <
&kp TAB &kp N1 &kp N2 &kp N3 &kp N4 &kp N5 &kp N6 &kp N7 &kp N8 &kp N9 &kp N0 &kp BSPC
&trans &bt BT_SEL 0 &bt BT_SEL 1 &bt BT_SEL 2 &bt BT_SEL 3 &bt BT_SEL 4 &kp LEFT &kp DOWN &kp UP &kp RIGHT &trans &trans
&kp LSHFT &kp F11 &kp F12 &trans &trans &trans &trans &kp PAGE_DOWN &kp PAGE_UP &trans &trans &bt BT_CLR
&kp LGUI &trans &kp SPACE &kp RET &trans &kp RALT
>;
};
raise_layer {
label = "SYMBOL";
// -----------------------------------------------------------------------------------------
// | TAB | ! | @ | # | $ | % | | ^ | & | * | ( | ) | BKSP |
// | CTRL | | | | | | | - | = | [ | ] | \ | ` |
// | SHFT | pp | vUP | vDO | | | | _ | + | { | } | "|" | ~ |
// | GUI | | SPC | | ENT | | ALT |
bindings = <
&kp TAB &kp EXCL &kp AT &kp HASH &kp DLLR &kp PRCNT &kp CARET &kp AMPS &kp KP_MULTIPLY &kp LPAR &kp RPAR &kp BSPC
&kp LCTRL &trans &trans &trans &trans &trans &kp MINUS &kp EQUAL &kp LBKT &kp RBKT &kp BSLH &kp GRAVE
&kp LSHFT &kp C_PLAY_PAUSE &kp K_VOLUME_UP &kp K_VOLUME_DOWN &trans &trans &kp UNDER &kp PLUS &kp LBRC &kp RBRC &kp PIPE &kp TILDE
&kp LGUI &trans &kp SPACE &kp RET &trans &kp RALT
>;
};
};
};

@ -0,0 +1,31 @@
function filterCSV( contentFilename ){
if ( contentFilename.endsWith(".csv") ) {
console.log('it is a CSV file', contentFilename)
// mereology option
let openingOptions = filesWithMetadata[contentFilename].openingOptions
// can be used via e.g. showFile("https://fabien.benetou.fr/?action=source",{ mereology:"whole"})
fetch( contentFilename )
.then( r => r.text() ).then( r => {
// console.log('CSV', r)
console.log( "mereology", openingOptions.mereology )
switch( openingOptions.mereology ) {
case "whole":
addNewNote(r, "0 1.4 -.8")
break;
default:
r.split('\n').map( (c,i) => addNewNote(c, "0 "+(1+i/10)+" -.8") )
}
AFRAME.scenes[0].emit('csvloaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("csvloaded", e => console.log(e))
})
}
applyNextFilter( contentFilename )
}
sequentialFilters.push( filterCSV )

@ -0,0 +1,30 @@
function filterDocx( contentFilename ){
let file = filesWithMetadata[contentFilename]
if (!file) return
let contentType = file.contentType
if ( contentType.includes("xml") && contentFilename.includes("_content.docx/") && contentFilename.endsWith(".docx")) {
console.log('it is a docx packed file', contentFilename)
fetch( contentFilename )
.then( r => r.blob(r) )
.then( file => zip.loadAsync(file) )
.then( f => {
// console.log(f.files)
f.files["word/document.xml"].async("string")
.then( str => new window.DOMParser().parseFromString(str, "text/xml"))
.then(data => {
// console.log('docx via packed', data)
el = addNewNote( data.childNodes[0].textContent, "-.5 1.2 -0.5" )
AFRAME.scenes[0].emit('docxxmlloaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("visualmetajsonloaded", e => console.log(e))
})
});
}
applyNextFilter( contentFilename )
}
sequentialFilters.push( filterDocx )

@ -0,0 +1,24 @@
function filterDocxJSON( contentFilename ){
let file = filesWithMetadata[contentFilename]
if (!file) return
let contentType = file.contentType
if ( contentType.includes("xml") && contentFilename.includes("_content.docx/") && contentFilename.endsWith(".xml")) {
console.log('it is a docx file', contentFilename)
fetch( contentFilename )
.then(response => response.text())
.then(str => new window.DOMParser().parseFromString(str, "text/xml"))
.then(data => {
// console.log(data)
el = addNewNote( data.childNodes[0].textContent, "0 1.2 -0.7" )
AFRAME.scenes[0].emit('docxxmlloaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("visualmetajsonloaded", e => console.log(e))
});
}
applyNextFilter( contentFilename )
}
sequentialFilters.push( filterDocxJSON )

@ -4,7 +4,7 @@ function filterJSONRef( contentFilename ){
let contentType = file.contentType
if ( contentType.includes("json") && contentFilename.endsWith(".json")) {
if ( contentType.includes("json") && contentFilename.startsWith("references_manual_") && contentFilename.endsWith(".json")) {
console.log('it is a manual reference JSON file', contentFilename)
fetch( contentFilename ).then( r => r.json() ).then( json => {
let ref = json["data-objects"]
@ -29,28 +29,54 @@ function filterJSONRef( contentFilename ){
fullEl.setAttribute("rotation", "45 0 0")
fullEl.setAttribute("scale", ".01 .01 .01")
fullEl.classList.add("reference-entry-card")
fullEl.data = r
let backgroundEl = document.createElement("a-box")
backgroundEl.setAttribute("scale", "10 5 .1")
backgroundEl.setAttribute("position", "4.5 0 -.1")
fullEl.appendChild( backgroundEl )
if (r["annote"]) {
let annoteEl = addNewNoteAsPostItNote( r["annote"], "-.3 "+(1+i/20)+" -.5", ".02 .02 .02" )
annoteEl.classList.add("reference-entry-annotate")
}
if (r["note"]) {
let annoteEl = addNewNoteAsPostItNote( r["note"], "-.2 "+(1+i/20)+" -.5", ".02 .02 .02" )
annoteEl.classList.add("reference-entry-note")
}
// if ACM and OA might be available via https://dl.acm.org/doi/pdf/DOI
// could then try to pass as PDF reader argument
// cf pageAsTextViaXML() and related in index.html
// note that it'd still need to fetch then upload via WebDAV
let pdf = r["bibtex-data"]["source-pdf"]
let acmoa = r["bibtex-data"]["free-acm-access"]
if (pdf && acmoa && pdf.startsWith("https://dl.acm.org")) {
let acmoa = r["bibtex-data"]["free-acm-access"] // warning, this is a string, not a boolean
// fallback due to file structure change between v4 and v11
if (!pdf) pdf = r["source-pdf"]
if (!acmoa) acmoa = r["free-acm-access"] // warning, this is a string, not a boolean
if (pdf && acmoa == "true" && pdf.startsWith("https://dl.acm.org")) {
// could then try to fetch content then upload via WebDAV
// should skip if already available
let pdfEl = document.createElement("a-box")
//pdfEl.setAttribute("scale", ".1 .1 .1")
pdfEl.setAttribute("position", "-.9 0 0")
pdfEl.setAttribute("position", "-1.2 0 0") // trying to move away in order to be executable
fullEl.appendChild( pdfEl )
let truncated_filename = "3209542.3209570" // hardcoded example
// should instead try to fetch .xml on saved/pdfxml/ and if 200 then change color
let truncated_filename = pdf.replace(/.*pdf\//,'').replace(/.*\//,'')
// here by convention it would be in "/saved/pdfxml/"
let url = "/saved/pdfxml/"+truncated_filename+".xml"
fetch(url).then( response => {
if (response.ok) {
pdfEl.setAttribute( "color", "purple" )
// could have a showFile() on pinched here
let openCommandEl = addNewNote( "jxr showFile('"+url+"')", "-.6 "+(1+i/20)+" -.5", '.05 .05 .05' )
openCommandEl.classList.add("reference-entry-showfile")
}
})
/*
if (pdf.endsWith( truncated_filename )) {
pdfEl.setAttribute("color", "green" )
pdfEl.setAttribute("color", "purple" )
//pdfEl.setAttribute("value", "jxr console.log('"+truncated_filename+"')" )
// what should become the target then? the cube?
// problematic because it becomes movable
@ -58,15 +84,14 @@ function filterJSONRef( contentFilename ){
// then should add JXR open of target PDF
/*
window.pageastextviaxmlsrc = "https://companion.benetou.fr/saved/pdfxml/"+truncated_filename+".xml"
pageAsTextViaXML()
highlightcommands.setAttribute("visible", true)
roundedpageborders.setAttribute("visible", true)
}
*/
}
}
})
})
}

@ -0,0 +1,32 @@
function filterKeymap( contentFilename ){
// if (!file) return
// special filter, done via URL, no local file (for now at least)
if ( contentFilename.endsWith(".keymap") ) { // not very reliable
console.log('it is a .keymap file', contentFilename)
fetch( contentFilename )
.then( r => r.text() ).then( r => {
// console.log('Keymap', r)
let parsedMap = r.split("_layer").filter( l => l.includes('-----') ).map( l => l.split('\n').filter( l => (l.startsWith('//') && !l.includes('------'))).map(l => l.replace("// ",'').split('| ') ) )
// WARNING : this is NOT a normal keymap, rather this is about the comments (!) with a visual representation of a keymap
// see for example https://github.com/Utopiah/zmk-config-zen-2/blob/main/config/corneish_zen.keymap
r.split("_layer").filter( l => l.includes('-----') ).map( l => l.split('\n').filter( l => (l.startsWith('//') && !l.includes('------'))).map(l => l.replace("// ",'') ) ).map( (layer, l) => {
let el = addNewNote( layer.join('\n') )
el.id = 'keymap_layer'+l
el.classList.add( 'keymap_layer' )
el.highlightLetter = ( letter ) => { keymap_layer0.setAttribute("value", keymap_layer0.getAttribute("value").replace(' '+letter+' ','_'+letter+'_') ) }
el.unhighlightLetter = ( letter ) => { keymap_layer0.setAttribute("value", keymap_layer0.getAttribute("value").replace('_'+letter+'_',' '+letter+' ') ) }
// then available e.g. keymap_layer0.unhighlightLetter("J")
})
AFRAME.scenes[0].emit('keymaploaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("keymaploaded", e => console.log(e))
})
}
applyNextFilter( contentFilename )
}
sequentialFilters.push( filterKeymap )

@ -0,0 +1,36 @@
// see e.g. https://git.benetou.fr/utopiah/text-code-xr-engine/src/branch/fot-sloan-companion/public/index.html#L806
// overall newContent() from that branch of Q3/Q4 2024
function filterLayoutJSON( contentFilename ){
let file = filesWithMetadata[contentFilename]
if (!file) return
let contentType = file.contentType
if ( contentType.includes("json") && contentFilename.endsWith(".layout.json") ) {
console.warn('not fully implemented yet, only display file name, not their content with the right pose')
console.log('it is a JSON layout file', contentFilename)
fetch( contentFilename ).then( r => r.json() ).then( json => {
console.log( json )
// could here rely on showFile for each element but maybe some are NOT files
// different format than previous save
// must insure that moved also to the right position/rotation after loading
json.map( i => {
showFile( i.filename )
// AFRAME.scenes[0].emit('layoutjsonloaded', contentFilename)
// would have to wait for any file to be loaded...
// el.setAttribute("position", AFRAME.utils.coordinates.stringify( i.position ) )
// el.setAttribute("rotation", AFRAME.utils.coordinates.stringify( i.rotation ) )
// el.id = i.id
})
AFRAME.scenes[0].emit('layoutjsonloaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("layoutjsonloaded", e => console.log(e))
})
}
applyNextFilter( contentFilename )
}
sequentialFilters.push( filterLayoutJSON )

@ -0,0 +1,63 @@
function filterMapVisualMetaJSON( contentFilename ){
let file = filesWithMetadata[contentFilename]
if (!file) return
let contentType = file.contentType
let sidecarFile = contentFilename.replace("dynamicview", "glossary")
const scale = 1/1000
const xOffset = 1
const yOffset = 1
const idprefix = 'visualmetaexport_'
if ( contentType.includes("json") && contentFilename.endsWith("_dynamicviewvisualmetaexport.json")) {
console.log('it is a map visualmeta export file', contentFilename)
fetch( contentFilename ).then( r => r.json() ).then( json => {
fetch( sidecarFile ).then( r => r.json() ).then( sidecarjson => {
let glossary = Object.entries( sidecarjson.entries ).map( i => i[1].phrase )
// console.log( json, sidecarjson )
json.nodes.map( n => {
let x = json.layout.nodePositions[ n.identifier ].x * scale + xOffset
let y = -json.layout.nodePositions[ n.identifier ].y * scale + yOffset
let z = -1/2
let el = addNewNote( n.title, "" + x + " " + y + " " + z )
el.id = "visualmetaexport_"+n.title.toLowerCase().replaceAll(" ","_").replaceAll(".","_").replaceAll("'","_")
el.setAttribute("outline-width", 0)
// optional background color "#7c7c7c"
// could use onpicked/onreleased to show links from glossary
// check if any word from title is in glossary
// if so, display line or highlight visually by removing outline
// reset onreleased
})
Object.entries( sidecarjson.entries ).map( i => i[1].phrase ).map( n => {
Object.entries( sidecarjson.entries )
.map( i => { return {phrase:i[1].phrase, entry:i[1].entry} } )
.filter( i => i.entry.toLowerCase().includes(n.toLowerCase()) )
.map( i => {
let el = document.createElement("a-entity")
el.setAttribute('live-selector-line',
'start: #'+(idprefix+n.toLowerCase().replaceAll(" ","_").replaceAll(".","_").replaceAll("'","_"))
+'; end: #'+(idprefix+i.phrase.toLowerCase().replaceAll(" ","_").replaceAll(".","_").replaceAll("'","_"))
+';' )
el.classList.add('visualmeta_export_link')
AFRAME.scenes[0].appendChild(el)
})
})
Array.from( document.querySelectorAll("[line]") ).map( el => el.setAttribute("line", "color", "black"))
// should filter a bit better otherwise take lines from other components and elements e.g. raycaster
AFRAME.scenes[0].emit('mapvisualmetajsonloaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("mapvisualmetajsonloaded", e => console.log(e))
})
})
}
applyNextFilter( contentFilename )
}
sequentialFilters.push( filterMapVisualMetaJSON )

@ -0,0 +1,82 @@
function filterPackedMapVisualMetaJSON( contentFilename ){
let file = filesWithMetadata[contentFilename]
if (!file) return
let contentType = file.contentType
let openingOptions = filesWithMetadata[contentFilename].openingOptions
// could overwrite via URL
const scale = 1/1000
const xOffset = 0
const yOffset = 1
const idprefix = 'visualmetaexport_'
if ( contentType.includes("zip") && contentFilename.includes("dynamicviewvisualmetaexport.jsons") ) {
// naming issue due to WordPress
console.log('it is a packed map visualmeta export file', contentFilename)
fetch( contentFilename )
// see for packed filters/docx_packed.xml.js
.then( r => r.blob(r) )
.then( file => zip.loadAsync(file) )
.then( f => {
// console.log(f.files)
// check for optional export directory
let filenameDynamicView = Object.entries( f.files ).filter( i => i[0].includes("_dynamicviewvisualmetaexport.json") ).map( i => i[0] )[0]
let filenameGlossary = Object.entries( f.files ).filter( i => i[0].includes("_glossaryvisualmetaexport.json") ).map( i => i[0] )[0]
f.files[filenameDynamicView].async("string").then( dyn_content => {
json = JSON.parse( dyn_content )
f.files[filenameGlossary].async("string").then( gloss_content => {
let sidecarjson = JSON.parse( gloss_content )
let glossary = Object.entries( sidecarjson.entries ).map( i => i[1].phrase )
console.log( json, sidecarjson )
//console.log( glossary )
json.nodes.map( n => {
let x = json.layout.nodePositions[ n.identifier ].x * scale + xOffset
let y = -json.layout.nodePositions[ n.identifier ].y * scale + yOffset
let z = -1/2
let el = addNewNote( n.title, "" + x + " " + y + " " + z )
el.id = "visualmetaexport_"+n.title.toLowerCase().replaceAll(" ","_").replaceAll(".","_").replaceAll("'","_")
el.setAttribute("outline-width", 0)
// optional background color "#7c7c7c"
// could use onpicked/onreleased to show links from glossary
// check if any word from title is in glossary
// if so, display line or highlight visually by removing outline
// reset onreleased
let definitionFound = Object.entries( sidecarjson.entries ).filter( i => i[1].phrase == n.title ).map( f => f[1].entry )
if (openingOptions.showDefinitions && definitionFound ){ el.setAttribute("annotation", "content", definitionFound[0] ) }
})
Object.entries( sidecarjson.entries ).map( i => i[1].phrase ).map( n => {
Object.entries( sidecarjson.entries )
.map( i => { return {phrase:i[1].phrase, entry:i[1].entry} } )
.filter( i => i.entry.toLowerCase().includes(n.toLowerCase()) )
.map( i => {
let el = document.createElement("a-entity")
el.setAttribute('live-selector-line',
'start: #'+(idprefix+n.toLowerCase().replaceAll(' ','_').replaceAll(".","_").replaceAll("'","_"))
+'; end: #'+(idprefix+i.phrase.toLowerCase().replaceAll(' ','_').replaceAll(".","_").replaceAll("'","_"))
+';' )
el.classList.add('visualmeta_export_link')
AFRAME.scenes[0].appendChild(el)
})
})
Array.from( document.querySelectorAll("[line]") ).map( el => el.setAttribute("line", "color", "black"))
// should filter a bit better otherwise take lines from other components and elements e.g. raycaster
AFRAME.scenes[0].emit('mapvisualmetajsonloaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("mapvisualmetajsonloaded", e => console.log(e))
})
} )
})
}
applyNextFilter( contentFilename )
}
sequentialFilters.push( filterPackedMapVisualMetaJSON )

@ -0,0 +1,42 @@
function filterMarkdown( contentFilename ){
// if (!file) return
// special filter, done via URL, no local file (for now at least)
if ( contentFilename.endsWith(".md") ) {
console.log('it is a markdown file', contentFilename)
// mereology option
let openingOptions = filesWithMetadata[contentFilename].openingOptions
// can be used via e.g. showFile("https://fabien.benetou.fr/?action=source",{ mereology:"whole"})
fetch( contentFilename )
.then( r => r.text() ).then( r => {
console.log('markdown', r)
console.log( "mereology", openingOptions.mereology )
switch( openingOptions.mereology ) {
case "whole":
addNewNote(r, "0 1.4 -.8")
break;
case "section":
r.split(/! /).map( (c,i) => addNewNote(c, "0 "+(1+i/10)+" -.8") ) // untested
break;
case "line":
r.split('\n').reverse().map( (c,i) => addNewNote(c, "0 "+(1+i/10)+" -.8") )
break;
case "listonly":
r.replaceAll('* ','').split('\n').map( (c,i) => addNewNote(c, "0 "+(1+i/10)+" -.8") )
// based on pmwiki, should also work with + or -
default:
r.split('\n').reverse().map( (c,i) => addNewNote(c, "0 "+(1+i/10)+" -.8") )
}
AFRAME.scenes[0].emit('markdownloaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("markdownloaded", e => console.log(e))
})
}
applyNextFilter( contentFilename )
}
sequentialFilters.push( filterMarkdown )

@ -0,0 +1,84 @@
function filterOdtJSON( contentFilename ){
let file = filesWithMetadata[contentFilename]
if (!file) return
let contentType = file.contentType
if ( contentType.includes("xml") && contentFilename.includes("_content.odt/") && contentFilename.endsWith(".xml")) {
console.log('it is a visualmeta export file', contentFilename)
fetch( contentFilename )
.then(response => response.text())
.then(str => new window.DOMParser().parseFromString(str, "text/xml"))
.then(data => {
console.log(data)
el = addNewNote( data.childNodes[0].textContent )
});
/*
fetch( contentFilename ).then( r => r.json() ).then( json => {
// not JSON!
console.log( json )
let thumbnailPath = json.thumbnail.replace("file:/","/tapestry/")
let el = document.createElement("a-image")
el.setAttribute("position", "0 1.5 0.7")
el.setAttribute("rotation", "0 180 0")
el.setAttribute("src", thumbnailPath)
el.setAttribute("width", json.startView.size.width/10000)
el.setAttribute("height", json.startView.size.height/10000)
AFRAME.scenes[0].appendChild(el)
let positioned = {}
json.items.map( i => {
let x = 1-i.position.x/1000
let y = 2-i.position.y/1000
let el = null
if (i.type == "text") {
el = addNewNoteAsPostItNote( i.text, x + " " + y + " 1", ".1 .1 .1", "tapestry_"+i.id, "tapestry_text", true, "0 180 0")
// addNewNoteAsPostItNote( text, position=`-0.2 1.1 -0.1`, scale= "0.1 0.1 0.1", id=null, classes="notes", visible="true", rotation="0 0 0" ){
}
if (i.thumbnail) {
let thumbnailPath = i.thumbnail.source.replace("file:/","/tapestry/")
el = document.createElement("a-image")
el.setAttribute("position", x + " " + y + " 1")
el.setAttribute("rotation", "0 180 0")
el.setAttribute("src", thumbnailPath)
el.setAttribute("width", i.size.width/1000)
el.setAttribute("height", i.size.height/1000)
AFRAME.scenes[0].appendChild(el)
}
positioned[i.id] = el
if ( el ) el.setAttribute("target", "")
})
setTimeout( _ => {
// must be done after the items are positioned
json.rels.map( i => {
let el = document.createElement("a-tube")
console.log( i.from.itemId, i.to.itemId )
let start = positioned[i.from.itemId].getAttribute("position")
let end = positioned[i.to.itemId].getAttribute("position")
let mid = new THREE.Vector3()
mid.copy ( start )
mid.add ( end )
mid.divideScalar(2)
mid.z += 0.3
let path = [start, mid, end].map( p => AFRAME.utils.coordinates.stringify( p ) ).join(", ")
el.setAttribute("radius", 0.01)
el.setAttribute("path", path)
AFRAME.scenes[0].appendChild(el)
})
}, 1000 )
AFRAME.scenes[0].emit('visualmetajsonloaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("visualmetajsonloaded", e => console.log(e))
})
*/
}
applyNextFilter( contentFilename )
}
sequentialFilters.push( filterOdtJSON )

@ -0,0 +1,121 @@
function filterPDFUnpackedXml( contentFilename ){
const path = "/saved/pdfxml/"
let file = filesWithMetadata[contentFilename]
// might have to hijack again
if (!file) return
let contentType = file.contentType
if ( contentType.includes("xml") && contentFilename.includes("pdfxml/") ) {
console.log('it is a pdf unpacked file', contentFilename)
pageAsTextViaXML(contentFilename, page=0)
}
applyNextFilter( contentFilename )
function pageAsTextViaXML(src, page=0){
fetch( src ).then( r => r.text() ).then( txt => {
const rootEl = document.createElement("a-box")
file.filteredEl = rootEl
rootEl.nextPage = (el) => {
// not tested in depth
Array.from( rootEl.querySelectorAll(".highlightabletext") ).map( el => el.remove() )
Array.from( doc.children[0].children[page++].querySelectorAll("text") ).map( (l,n) => {
let tktxt = document.createElement("a-troika-text")
let pos = ""+(l.attributes.left.value*scalingFactor+xOffset) + " " + (1-l.attributes.top.value*scalingFactor+yOffset) + " "+zPos
let scale = "0.045 0.045 0.045"
tktxt.setAttribute("position", pos)
tktxt.setAttribute("font-size", "0.009")
tktxt.setAttribute("color", "black")
tktxt.setAttribute("value", l.textContent)
tktxt.setAttribute("anchor", "left")
tktxt.classList.add("highlightabletext")
rootEl.appendChild(tktxt)
})
}
rootEl.previousPage = _ => console.log( 'test' )
const sheetEl = document.createElement("a-box")
sheetEl.setAttribute("width", "1")
sheetEl.setAttribute("height", "1.2")
sheetEl.setAttribute("depth", ".01")
sheetEl.setAttribute("position", "0.5 -.6 0")
rootEl.appendChild(sheetEl)
rootEl.setAttribute("width", ".01")
rootEl.setAttribute("height", ".01")
rootEl.setAttribute("depth", ".01")
rootEl.setAttribute("color", "brown")
rootEl.setAttribute("target", "") // "hidden" by the highlightable texts as targets
// can temporarily remove target from .highlightabletext elements
// alternatively could have a handle, a la lens demo
let disabledHighlight = true // for testing manipulation
disabledHighlight=false
// highlight ball still hidden due to thickness
rootEl.setAttribute("position", "-0.5 1.7 -.6")
rootEl.id = "page_from_"+src.replaceAll("https://","").replaceAll("/","_")
rootEl.classList.add ( "page_from_pdf" )
AFRAME.scenes[0].appendChild(rootEl)
const parser = new DOMParser();
let doc = parser.parseFromString(txt, "application/xml")
const scalingFactor = 1/1000 // used for position of text and images
// could also use x/y/z offsets
// probably easier to append to an entity, either empty or used as (white) background
const xOffset = 0.1 // changed by rootEl position
const yOffset = -1.0
const zPos = 0.01
//Array.from( doc.children[0].children[page].querySelectorAll("text") ).map( (l,n) => addNewNote(l.textContent, ""+(l.attributes.left.value*scalingFactor+xOffset) + " " + (1-l.attributes.top.value*scalingFactor+yOffset) + " "+zPos, "0.045 0.045 0.045", "highlighttextfromxml_"+n, "highlighttextfromxmlitem" ) )
Array.from( doc.children[0].children[page].querySelectorAll("text") ).map( (l,n) => {
// addNewNote(l.textContent, ""+(l.attributes.left.value*scalingFactor+xOffset) + " " + (1-l.attributes.top.value*scalingFactor+yOffset) + " "+zPos, "0.045 0.045 0.045", "highlighttextfromxml_"+n, "highlighttextfromxmlitem" ) )
let tktxt = document.createElement("a-troika-text")
let pos = ""+(l.attributes.left.value*scalingFactor+xOffset) + " " + (1-l.attributes.top.value*scalingFactor+yOffset) + " "+zPos
let scale = "0.045 0.045 0.045"
tktxt.setAttribute("position", pos)
tktxt.setAttribute("originalposition", pos)
tktxt.setAttribute("originalpage", page)
// FIXME
//tktxt.setAttribute("originalsource", fileContent.meta.metadata['dc:title'])
//tktxt.setAttribute("originalidentifier", fileContent.meta.metadata['dc:identifier'])
tktxt.setAttribute("font-size", "0.009")
tktxt.setAttribute("color", "black")
if (!disabledHighlight) tktxt.setAttribute("target", "")
tktxt.classList.add("highlightabletext")
tktxt.setAttribute("onpicked", "console.log(selectedElements.at(-1).element.getAttribute('value'))")
tktxt.setAttribute("onreleased", "let el = selectedElements.at(-1).element; if (true) el.setAttribute('color', highlightColor); el.setAttribute('rotation', ''); el.setAttribute('position', el.getAttribute('originalposition') )")
// resets back...
// change color
// only if above a certain threshold, e.g. held a long time, or released close to specific other item
// could also toggle coloring
// can be based on coloring pick with jxr
tktxt.setAttribute("value", l.textContent)
tktxt.setAttribute("anchor", "left")
rootEl.appendChild(tktxt)
})
Array.from( doc.children[0].children[page].querySelectorAll("image") ).map( (l,n) => {
let el = document.createElement("a-box")
// is position via center of element so should offset it
el.setAttribute("src", path+l.attributes.src.value); // somehow set to #transparent...
el.setAttribute("width", l.attributes.width.value*scalingFactor);
el.setAttribute("height", l.attributes.height.value*scalingFactor);
el.setAttribute("depth", .01);
el.setAttribute("target", "")
el.id = "highlightimagefromxml_"+n
el.classList.add("highlightimagefromxmlitem")
el.classList.add("highlightabletext")
let w = l.attributes.width.value*scalingFactor
let h = l.attributes.height.value*scalingFactor
el.setAttribute("position", ""+ ""+(w/2+l.attributes.left.value*scalingFactor+xOffset)+" "+ (-h/2+1-l.attributes.top.value*scalingFactor+yOffset)+ " "+zPos)
rootEl.appendChild(el)
})
AFRAME.scenes[0].emit('pdfxmlloaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("visualmetajsonloaded", e => console.log(e))
} );
}
}
sequentialFilters.push( filterPDFUnpackedXml )

@ -0,0 +1,49 @@
function videoAPIPeerTube( contentFilename ){
let file = filesWithMetadata[contentFilename]
//if (!file) return
// can be removed for URLs as those are not with metadata
let contentType = file.contentType
// mereology option
let openingOptions = filesWithMetadata[contentFilename].openingOptions
// can be used via e.g. showFile("https://fabien.benetou.fr/?action=source",{ mereology:"whole"})
// filtering, only applying what's next to a certain content type and/or with filename filtering with .startsWith() .endsWith() .includes() or regex
if ( contentFilename.includes("/api/v1/search/videos") ) { // peertube API, not specific enough
// could verify instead via header i.e x-powered-by: PeerTube
console.log('it is a PeerTube API call', contentFilename)
fetch( contentFilename ).then( r => r.json() ).then( json => {
const rootEl = document.createElement("a-entity")
rootEl.jsonfromapi = json // rarely so big that it makes any difference to have it twice, on here and on individual items
// having a root element to attach on makes manipulation later on easier and safer, bringing a context for later modifications
file.filteredEl = rootEl
rootEl.test = _ => console.log( 'test' )
// function that can then be called on the created element later on
// e.g. filesWithMetadata["https://companion.benetou.fr/saved/pdfxml/3209542.3209570.xml"].filteredEl.nextPage('ok')
AFRAME.scenes[0].appendChild(rootEl)
json.data.map( (d,i) => {
let el = document.createElement("a-image")
rootEl.appendChild(el)
el.setAttribute("position", ""+(-1+Math.random())+" "+(Math.random()+1)+" -0.5" )
el.setAttribute("scale", ".2 .1 .1")
el.setAttribute("src", contentFilename.replace(/\/api.*/,'') + d.thumbnailPath)
el.setAttribute("target", "")
el.jsonfromapi = d
el.id = "video_"+d.name
})
AFRAME.scenes[0].emit('videoapipeertube_loaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("videoapipeertube_loaded", e => console.log(e))
} );
}
applyNextFilter( contentFilename )
// can stop here or move on to the next filter that might or not be applied
}
sequentialFilters.push( videoAPIPeerTube )
// adding this to the list of filters to go through, order matters
// typically one would be generic filters first then more specific ones after

@ -0,0 +1,50 @@
function filterPmWiki( contentFilename ){
// if (!file) return
// special filter, done via URL, no local file (for now at least)
if ( contentFilename.endsWith("?action=source") ) { // not very reliable
console.log('it is a pmwiki file', contentFilename)
// mereology option
let openingOptions = filesWithMetadata[contentFilename].openingOptions
// can be used via e.g. showFile("https://fabien.benetou.fr/?action=source",{ mereology:"whole"})
fetch( contentFilename )
.then( r => r.text() ).then( r => {
// console.log('pmwiki', r)
// TODO
// diff support to replay over time, e.g. https://fabien.benetou.fr/PIMVRdata/ItemsStates?action=diff
// index as a special case https://fabien.benetou.fr/Site/AllRecentChanges?action=source
// rendered previews e.g https://vatelier.benetou.fr/MyDemo/newtooling/textures/fabien.benetou.fr_PIMVRdata_ItemsStates.png
// server side graph generation e.g. https://vatelier.benetou.fr/MyDemo/newtooling/wiki_graph.json
// very specific
let elements = []
console.log( "mereology", openingOptions.mereology )
switch( openingOptions.mereology ) {
case "whole":
elements.push( addNewNote(r, "0 1.4 -.8") )
break;
case "section":
r.split(/! /).map( (c,i) => elements.push( addNewNote(c, "0 "+(1+i/10)+" -.8") ) ) // untested
break;
case "line":
r.split('\n').map( (c,i) => elements.push( addNewNote(c, "0 "+(1+i/10)+" -.8") ) )
break;
case "listonly":
default:
r.replaceAll('* ','').split('\n').map( (c,i) => elements.push( addNewNote(c, "0 "+(1+i/10)+" -.8") ) )
// this is specific to https://fabien.benetou.fr/PersonalInformationStream/WithoutNotesMay2025 and similar
}
elements.map( el => { el.classList.add('pmwikifilter'); el.classList.add('filterimport') } )
// for something more generic see https://www.pmwiki.org/wiki/PmWiki/PageFileFormat
AFRAME.scenes[0].emit('pmwikiloaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("pmwikiloaded", e => console.log(e))
})
}
applyNextFilter( contentFilename )
}
sequentialFilters.push( filterPmWiki )

@ -0,0 +1,40 @@
function filterQ2LayoutJSON( contentFilename ){
let file = filesWithMetadata[contentFilename]
if (!file) return
// can be removed for URLs as those are not with metadata
let contentType = file.contentType
// mereology option
let openingOptions = filesWithMetadata[contentFilename].openingOptions
// can be used via e.g. showFile("https://fabien.benetou.fr/?action=source",{ mereology:"whole"})
// filtering, only applying what's next to a certain content type and/or with filename filtering with .startsWith() .endsWith() .includes() or regex
if ( contentType.includes("json") && contentFilename.endsWith("_q2layout.json") ) {
console.log('it is a Q2 Layout JSON file', contentFilename)
fetch( contentFilename ).then( r => r.json() ).then( savedDataFromPreviousSession => {
// console.log( savedDataFromPreviousSession )
savedDataFromPreviousSession.map( savedData =>
Array.from( document.querySelectorAll('.'+classNameItemsToSave) )
.filter( noteEl => noteEl.getAttribute("value") == savedData.value )
// search by value as in theory those are constant (but not necessarily unique, even though usually are)
.map( foundNoteEl => {
foundNoteEl.setAttribute("position", AFRAME.utils.coordinates.stringify(savedData.position) )
foundNoteEl.setAttribute("rotation", AFRAME.utils.coordinates.stringify(savedData.rotation) )
} )
// should also debug the unfound ones
)
AFRAME.scenes[0].emit('q2layoutjsonloaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("templateexampleloaded", e => console.log(e))
} );
}
applyNextFilter( contentFilename )
// can stop here or move on to the next filter that might or not be applied
}
sequentialFilters.push( filterQ2LayoutJSON )
// adding this to the list of filters to go through, order matters
// typically one would be generic filters first then more specific ones after

@ -0,0 +1,112 @@
function filterReteBiByBit( contentFilename ){
let file = filesWithMetadata[contentFilename]
if (!file) return
// can be removed for URLs as those are not with metadata
let contentType = file.contentType
// mereology option
let openingOptions = filesWithMetadata[contentFilename].openingOptions
// can be used via e.g. showFile("https://fabien.benetou.fr/?action=source",{ mereology:"whole"})
// filtering, only applying what's next to a certain content type and/or with filename filtering with .startsWith() .endsWith() .includes() or regex
if ( contentType.includes("") && (contentFilename.endsWith("-rete.bitbybit") || contentFilename.endsWith("rete-runner.bitbybit") )) {
console.log('it is a Rete BitByBit file', contentFilename)
// assuming non minified code... but maybe works the same way
// TODO try with minified code, would be preferred in order to do execution after
const filenameIdPrefix = "retebitbybit_id_"
if (contentFilename.endsWith("rete-runner.bitbybit") ) {
fetch( contentFilename ).then( r => r.text() ).then( txt => {
executeBitbybit(txt)
// can keep track of generate meshes via
// AFRAME.scenes[0].object3D.children.filter( o => o.name.includes("brepMesh") )
// filtering with before the call and after (even though async)
})
} else {
fetch( contentFilename ).then( r => r.json() ).then( txt => {
let nodes = Object.entries( JSON.parse(txt.script).nodes )
//console.log( nodes )
const rootEl = document.createElement("a-entity")
file.filteredEl = rootEl
rootEl.execute = _ => console.warn( 'Not implemented for now, see executeBitbybit()')
// TODO see executeBitbybit() , needs runner code too
// function that can then be called on the created element later on
// e.g. filesWithMetadata["https://companion.benetou.fr/saved/pdfxml/3209542.3209570.xml"].filteredEl.nextPage('ok')
nodes.map( n => {
let el = document.createElement("a-troika-text")
let boxEl = document.createElement("a-box")
boxEl.setAttribute("wireframe", "true" )
boxEl.setAttribute("width", .3 )
boxEl.setAttribute("height", .4 )
boxEl.setAttribute("depth", .01 )
boxEl.setAttribute("position", "0 -.18 0" )
el.setAttribute("value", n[1].customName )
el.setAttribute("font-size", ".05")
el.setAttribute("target", "")
let x = 1+n[1].position[0]/1000
let y = 2-n[1].position[1]/1000
let z = -1
el.setAttribute("position", ""+x+" "+y+" "+z)
rootEl.appendChild(el)
el.appendChild(boxEl)
el.id = filenameIdPrefix + n[1].id
})
nodes.map( p => {
let n = p[1]
let inputs = Object.entries( n.inputs ).map( o => o[1] )
if (inputs.length) {
// console.log( n.id, inputs )
inputs.map( c => {
let el = document.createElement("a-entity")
el.setAttribute('live-selector-line',
'start: #'+(filenameIdPrefix+n.id)
+'; end: #'+(filenameIdPrefix+c.connections[0].node)
+';' )
el.classList.add('retebitbybit_export_link')
rootEl.appendChild(el)
})
}
})
AFRAME.scenes[0].appendChild(rootEl)
AFRAME.scenes[0].emit('retebitbybitloaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("retebitbybitloaded", e => console.log(e))
} );
}
}
applyNextFilter( contentFilename )
// can stop here or move on to the next filter that might or not be applied
}
sequentialFilters.push( filterReteBiByBit )
// adding this to the list of filters to go through, order matters
// typically one would be generic filters first then more specific ones after
async function executeBitbybit(code){
window.THREEJS = window.THREE;
await import("https://cdn.jsdelivr.net/gh/bitbybit-dev/bitbybit-assets@0.20.4/runner/bitbybit-runner-lite-threejs.js")
const aframeScene = document.querySelector('a-scene').object3D;
const runnerOptions = {
canvasZoneClass: 'myCanvasZone',
enableOCCT: true,
enableJSCAD: false,
enableManifold: false,
loadFonts: ['Roboto'],
externalThreeJSSettings: {
scene: aframeScene,
camera: AFRAME.systems.camera,
}
};
const runner = window.bitbybitRunner.getRunnerInstance();
const { bitbybit, Bit, camera, scene, renderer } = await runner.run( runnerOptions);
window.bitbybit = bitbybit;
window.Bit = Bit;
window.runner = runner
runner.executeScript(code)
}

@ -0,0 +1,61 @@
// https://cdnjs.com/libraries/sql.js
// https://github.com/sql-js/sql.js?tab=readme-ov-file#loading-a-database-from-a-server
function filterTemplateExample( contentFilename ){
let file = filesWithMetadata[contentFilename]
if (!file) return
// can be removed for URLs as those are not with metadata
let contentType = file.contentType
// mereology option
let openingOptions = filesWithMetadata[contentFilename].openingOptions
// can be used via e.g. showFile("https://fabien.benetou.fr/?action=source",{ mereology:"whole"})
// filtering, only applying what's next to a certain content type and/or with filename filtering with .startsWith() .endsWith() .includes() or regex
if ( contentType.includes("xml") && contentFilename.includes("pdfxml/") ) {
console.log('it is a pdf unpacked file', contentFilename)
/*
// example of zip support
fetch( contentFilename ).then( r => r.blob(r) ).then( file => zip.loadAsync(file) ).then( f => {
console.log(f.files)
let filenameFromZip = Object.entries( f.files ).filter( i => i[0].includes("partialfilename.ext") ).map( i => i[0] )[0]
f.files[filenameFromZip].async("string").then( dyn_content => {
json = JSON.parse( dyn_content )
})
})
*/
fetch( contentFilename ).then( r => r.text() ).then( txt => {
console.log( "mereology", openingOptions.mereology )
switch( openingOptions.mereology ) {
case "whole":
addNewNote(txt, "0 1.4 -.8")
break;
case "listonly":
default:
txt.replaceAll('* ','').split('\n').map( (c,i) => addNewNote(c, "0 "+(1+i/10)+" -.8") )
}
const rootEl = document.createElement("a-entity")
// having a root element to attach on makes manipulation later on easier and safer, bringing a context for later modifications
file.filteredEl = rootEl
rootEl.test = _ => console.log( 'test' )
// function that can then be called on the created element later on
// e.g. filesWithMetadata["https://companion.benetou.fr/saved/pdfxml/3209542.3209570.xml"].filteredEl.nextPage('ok')
AFRAME.scenes[0].appendChild(rootEl)
AFRAME.scenes[0].emit('templateexampleloaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("templateexampleloaded", e => console.log(e))
} );
}
applyNextFilter( contentFilename )
// can stop here or move on to the next filter that might or not be applied
}
sequentialFilters.push( filterTemplateExample )
// adding this to the list of filters to go through, order matters
// typically one would be generic filters first then more specific ones after

@ -8,11 +8,14 @@ function filterTextModifications( contentFilename ){
console.log('it is an modification scheme', contentFilename)
console.log('try to pass it to parametersViaURL(data)')
fetch( contentFilename ).then( r => r.text() ).then( txt => {
console.log(txt.split(/$\n/).map(l=>{
let jsonFromSRT = txt.split(/$\n/).map(l=>{
let subItem = l.split('\n')
let timings = subItem[1].split(' --> ')
return { id:Number(subItem[0]), timingStart:timings[0], timingEnd:timings[1], text:subItem[2] }
} ))
} )
console.log( jsonFromSRT )
addNewNote( jsonFromSRT.map( i => i.text).join('\n') )
// could instead delegate that later as event
})
}
applyNextFilter( contentFilename )

@ -6,6 +6,9 @@ function filterSVGImage( contentFilename ){
if ( contentType.includes("image") && contentFilename.endsWith(".svg")) {
console.log('it is an SVG image', contentFilename)
// could also try to parse the SVG itself, for now delegated potentially via event
AFRAME.scenes[0].emit('svgloaded', contentFilename)
}
applyNextFilter( contentFilename )
}

@ -0,0 +1,86 @@
function filterTapestryJSON( contentFilename ){
let file = filesWithMetadata[contentFilename]
if (!file) return
let contentType = file.contentType
// TODO support zipped version directly, cf e.g. mapvisualmeta.jsons.zip.js
/*
// example of zip support
fetch( contentFilename ).then( r => r.blob(r) ).then( file => zip.loadAsync(file) ).then( f => {
console.log(f.files)
let filenameFromZip = Object.entries( f.files ).filter( i => i[0].includes("partialfilename.ext") ).map( i => i[0] )[0]
f.files[filenameFromZip].async("string").then( dyn_content => {
json = JSON.parse( dyn_content )
})
})
*/
if ( contentType.includes("json") && contentFilename.startsWith("tapestry/") && contentFilename.endsWith("root.json")) {
console.log('it is a tapestry export file', contentFilename)
fetch( contentFilename ).then( r => r.json() ).then( json => {
// console.log( json )
let thumbnailPath = json.thumbnail.replace("file:/","/tapestry/")
let el = document.createElement("a-image")
el.setAttribute("position", "0 1.5 0.7")
el.setAttribute("rotation", "0 180 0")
el.setAttribute("src", thumbnailPath)
el.setAttribute("width", json.startView.size.width/10000)
el.setAttribute("height", json.startView.size.height/10000)
AFRAME.scenes[0].appendChild(el)
let positioned = {}
json.items.map( i => {
let x = 1-i.position.x/1000
let y = 2-i.position.y/1000
let el = null
if (i.type == "text") {
el = addNewNoteAsPostItNote( i.text, x + " " + y + " 1", ".1 .1 .1", "tapestry_"+i.id, "tapestry_text", true, "0 180 0")
// addNewNoteAsPostItNote( text, position=`-0.2 1.1 -0.1`, scale= "0.1 0.1 0.1", id=null, classes="notes", visible="true", rotation="0 0 0" ){
}
if (i.thumbnail) {
let thumbnailPath = i.thumbnail.source.replace("file:/","/tapestry/")
el = document.createElement("a-image")
el.setAttribute("position", x + " " + y + " 1")
el.setAttribute("rotation", "0 180 0")
el.setAttribute("src", thumbnailPath)
el.setAttribute("width", i.size.width/1000)
el.setAttribute("height", i.size.height/1000)
AFRAME.scenes[0].appendChild(el)
}
positioned[i.id] = el
if ( el ) el.setAttribute("target", "")
})
setTimeout( _ => {
// must be done after the items are positioned
json.rels.map( i => {
let el = document.createElement("a-tube")
console.log( i.from.itemId, i.to.itemId )
let start = positioned[i.from.itemId].getAttribute("position")
let end = positioned[i.to.itemId].getAttribute("position")
let mid = new THREE.Vector3()
mid.copy ( start )
mid.add ( end )
mid.divideScalar(2)
mid.z += 0.3
let path = [start, mid, end].map( p => AFRAME.utils.coordinates.stringify( p ) ).join(", ")
el.setAttribute("radius", 0.01)
el.setAttribute("path", path)
AFRAME.scenes[0].appendChild(el)
})
}, 1000 )
AFRAME.scenes[0].emit('tapestryjsonloaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("visualmetajsonloaded", e => console.log(e))
})
}
applyNextFilter( contentFilename )
}
sequentialFilters.push( filterTapestryJSON )

@ -0,0 +1,61 @@
function filterTemplateExample( contentFilename ){
let file = filesWithMetadata[contentFilename]
if (!file) return
// can be removed for URLs as those are not with metadata
let contentType = file.contentType
// mereology option
let openingOptions = filesWithMetadata[contentFilename].openingOptions
// can be used via e.g. showFile("https://fabien.benetou.fr/?action=source",{ mereology:"whole"})
// filtering, only applying what's next to a certain content type and/or with filename filtering with .startsWith() .endsWith() .includes() or regex
if ( contentType.includes("xml") && contentFilename.includes("pdfxml/") ) {
console.log('it is a pdf unpacked file', contentFilename)
/*
// example of zip support
fetch( contentFilename ).then( r => r.blob(r) ).then( file => zip.loadAsync(file) ).then( f => {
console.log(f.files)
let filenameFromZip = Object.entries( f.files ).filter( i => i[0].includes("partialfilename.ext") ).map( i => i[0] )[0]
f.files[filenameFromZip].async("string").then( dyn_content => {
json = JSON.parse( dyn_content )
})
})
*/
fetch( contentFilename ).then( r => r.text() ).then( txt => {
console.log( "mereology", openingOptions.mereology )
switch( openingOptions.mereology ) {
case "whole":
addNewNote(txt, "0 1.4 -.8")
break;
case "listonly":
default:
txt.replaceAll('* ','').split('\n').map( (c,i) => addNewNote(c, "0 "+(1+i/10)+" -.8") )
// could had here
}
const rootEl = document.createElement("a-entity")
// having a root element to attach on makes manipulation later on easier and safer, bringing a context for later modifications
rootEl.fromfile = txt
// to make the data usable, better with JSON or per element but still potentially useful by other elements
file.filteredEl = rootEl
rootEl.test = _ => console.log( 'test' )
// function that can then be called on the created element later on
// e.g. filesWithMetadata["https://companion.benetou.fr/saved/pdfxml/3209542.3209570.xml"].filteredEl.nextPage('ok')
AFRAME.scenes[0].appendChild(rootEl)
AFRAME.scenes[0].emit('templateexampleloaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("templateexampleloaded", e => console.log(e))
} );
}
applyNextFilter( contentFilename )
// can stop here or move on to the next filter that might or not be applied
}
sequentialFilters.push( filterTemplateExample )
// adding this to the list of filters to go through, order matters
// typically one would be generic filters first then more specific ones after

@ -0,0 +1,41 @@
function filterVisualMetaJSON( contentFilename ){
let file = filesWithMetadata[contentFilename]
if (!file) return
let contentType = file.contentType
if ( contentType.includes("json") && contentFilename.endsWith(".visualmetaexport.json")) {
console.log('it is a visualmeta export file', contentFilename)
fetch( contentFilename ).then( r => r.json() ).then( json => {
//console.log( json )
let titleEl = addNewNote( json["visual-meta-bibtex-self-citation"].article.title, "0 1.8 -.3" )
json["visual-meta-full-document-text"].text.split("\n").reverse().map( (e, i) => {
let el = addNewNote( e, "0 "+(1+i/40)+" -.5" )
if ( e.startsWith("#") ){
//el.addEventListener("object3dset", _ => el.setAttribute("font-size", el.getAttribute("font-size")*20 ) )
// not yet set, events loaded and object3dset do not help
el.setAttribute("font-size", 0.3)
el.setAttribute("value", e.replaceAll("#","") )
// should be a bit more subtle...
}
})
Object.values( json["Glossary"].entries ).map( (e, i) => { let el = addNewNote( e.phrase + ' ' + e.entry, "-1.3 "+(1+i/10)+" -.6" ) })
Object.values( json["document-headings"] ).reverse().map( (e, i) => {
let el = addNewNote( e.name, "-1.1 "+(1+i/10)+" -.6" )
el.setAttribute("font-size", Number(e.level.replace("level",""))/10 )
el.addEventListener("object3dset", _ => el.object3D.translateX( 1-Number(e.level.replace("level",""))/10 ) )
})
AFRAME.scenes[0].emit('visualmetajsonloaded', contentFilename)
// to use the event consider :
//AFRAME.scenes[0].addEventListener("visualmetajsonloaded", e => console.log(e))
})
}
applyNextFilter( contentFilename )
}
sequentialFilters.push( filterVisualMetaJSON )

@ -0,0 +1,337 @@
<h1>Documentation of parameters</h1>
The experience can be configured via its URL. This allow for customization by anyone while also allowing to share the result back with anyone else. There is no code modification needed, no saving needed. The URL itself becomes the modified experience.
<br>
<br>
Within that context there are 3 main query parameters to understand, namely <b>username</b>, <b>set</b> and <b>showfile</b>. Specifically :
<ul>
<li><code>username</code> : set a pre-defined experiment with its dedicated interface, data, etc. Note that some do require more parameters. An example could be <a target=_blank href="https://companion.benetou.fr/index.html?username=demoqueueq1">https://companion.benetou.fr/index.html?username=demoqueueq1</a> which allowed for a sequence of experiments. It required no additional parameters. Alternatively <a target=_blank href="https://companion.benetou.fr/index.html?username=q2_step_refcards_filtering&emulatexr=true">https://companion.benetou.fr/index.html?username=q2_step_refcards_filtering&emulatexr=true</a> has both a username (here q2_step_refcards_filtering ) and another parameter (emulatexr with value true). See the <a href="#username">dedicated list</a> below.
<li><code>set</code> : modify any element of the page. Typically this is done to visually modify something, showing or hiding elements. An example could be <a target=_blank href="https://companion.benetou.fr/index.html?set_a-sky_color=purple&showfile=Fortress.glb">https://companion.benetou.fr/index.html?set_a-sky_color=purple&showfile=Fortress.glb</a> which set the color of the sky to purple and show a file that is a 3D model of a castle where the user starts their experience. See the <a href="#set">dedicated list</a> below.
<li><code>showfile</code> : display the content of a file. This is done thanks to filters (see recorded video) and allows for a relatively wide range of files. The most basic ones, e.g. 3D model, show 3D models in space but some filters allow parsing of specific file formats, e.g. PmWiki lists, docx text content, zipped exports, etc. An example could be <a target=_blank href="https://companion.benetou.fr/index.html?username=q2_visualmetaexport_map_via_wordpress&showfile=https://futuretextlab.info/wp-content/uploads/Frode-dynamicviewvisualmetaexport.jsons_.zip">a remote VisualMeta export</a> hosted on a WordPress instance. The filter unpacks the zip file and display its content while making each item interactable. Note also that some specific formats, e.g. _modifications.txt files, can contain set parameters and thus multiple modifications back. Shown files can also contain layouts of displayed content in order to resume the state of a past session, e.g. <a target=_blank href="https://companion.benetou.fr/index.html?username=q2_annotated_bibliography_week2&showfile=q2_annotated_bibliography_week2_test_q2layout.json">https://companion.benetou.fr/index.html?username=q2_annotated_bibliography_week2&showfile=q2_annotated_bibliography_week2_test_q2layout.json</a>.
</ul>
<script>
const baseURL = 'https://companion.benetou.fr/index.html'
</script>
<h2>Note on live documentation of experiments</h2>
<div>
Most links should work, yes surely some will not work or won't make sense outside of XR or need some interactions.
</div>
<br>
<div>
A lot of the <code>?set</code> examples (see below) do not show a visual difference as we decided at the end of the 1st quarter to hide a lot of elements to faciliate demonstrations. This unfortunately had the side effect of preventing those examples later on despite them working back then. For example at the end of Q2 <code>?set_IDenvironment_gltf-model=world-bake.glb</code> does not show the 3D model despite the file being present and showfile working because we are not hiding the element with ID environment.
From an experimentation perspective it is arguably not useful to "fix" those historical links as doings will most likely prevent newer ones from working and thus rather unpredictively. It is though important to know that they did work.
</div>
<br>
<div>
Regardless of those technical limitations please do feel free to report links that seem, according to you, not to work as expected. Plenty do provide :
<ol>
<li>the URL (full including all parameters used),
<li>hardware used, browser used (e.g. Vision Pro with OS version 123 and browser version 456),
<li>date and hour of test (as things are live, maybe something was edited as you tried)
<li>the current behavior (what you expected to see),
<li>the expected behavior (what actually happened),
<li>any supporting materials you think would be useful, e.g. video recording, screenshots, etc.
<ol>
</div>
<h2>Parameters</h2>
<div id=parameters_to_expand>
<ul>
<li>allowNtfyFeedbackHUD
<li>emulatexr
<li>forcecontrollers
<li>itemsfile
<li>partialfilename
<li>query
<li>remote_keyboard_group // should be URL encoded, assuming for now alphanum only
<li>shareerrors
<li>showdebug
<li>showdefinitions
<li>showdemoexample
<li>sourceFromNextDemo
<li>speedup_emulatexr_test
<li>username
</ul>
<script>
// could make URLs clickable, even though some won't work or show visible changes
Array.from( parameters_to_expand.querySelectorAll('li') ).map( p => {
return // none of these are useful without the right value
let el = document.createElement('a')
el.href = baseURL + '?' + p.innerText
el.target = '_blank'
el.innerText = p.innerText
p.innerText = ''
p.appendChild( el )
})
</script>
</div>
Note that some parameters (e.g. <code>emulatorxr</code>) are only used in conjonction with other parameters. Here <code>emulatorxr</code> with <code>username</code> itself within only a range of values, see below.
<br>
<br>
Some parameters added after the creation of this documenation (early June 2025) are not documented.
<h3>How to populate that list</h3>
Ran server side
<pre>grep urlParams.get /transition/webdav/data/fotsave/fot_sloan_companion_public/index.html | sed "s/.*get//" | sort | uniq</pre>
<a id=username></a>
<h2>Username values</h2>
for username which is itself a just a parameter
<div id=usernamevalues_to_expand>
<ul>
<li>cubetester
<li>demoqueueq1
<li>icon_tags
<li>instructionsonhands
<li>jsonrefmanualtester
<li>metatester10032025
<li>metatester13032025
<li>poweruser
<li>refoncubetester
<li>ring_discovery
<li>ring_discovery_with_keyboard
<li>ring_highlights
<li>skating_rings
<li>spreadsheetcolumns
<li>tabletest
<li>temple_test
<li>thicknesstesteruser
</ul>
<h3>Quarter 1</h3>
<ul>
<li>q1_step_audio
<li>q1_step_highlights
<li>q1_step_refcards
<li>q1_step_screenshot
<li>q1_step_showfile
<li>q1_step_urlcustom
</ul>
<h3>Quarter 2</h3>
<ul>
<li>q2_annotated_bibliography
<li>q2_annotated_bibliography_week2
<li>q2_arcade
<li>q2_bbox_per_filter_source
<li>q2_drop_for_graph
<li>q2_fingersmenu
<li>q2_handswap
<li>q2_immersive_console
<li>q2_json_collaborations
<li>q2_keydrumsticks
<li>q2_keymap
<li>q2_lego_map
<li>q2_lense
<li>q2_most_recent_file
<li>q2_noneuclidian
<li>q2_nouploadfile
<li>q2_ntfy_keyboard_with_keymap_visual_feedback
<li>q2_onrelease_lookat
<li>q2_os_keyboard
<li>q2_pasting
<li>q2_picker
<li>q2_remote_ntfy_keyboard
<li>q2_ring_keyboard
<li>q2_secondarypinch_singlehanded
<li>q2_secondarypinch_singlehanded_spatial
<li>q2_spatialknowledgeobject
<li>q2_step_contextuallayouts
<li>q2_step_end
<li>q2_step_highlight
<li>q2_step_jsonedit
<li>q2_step_layout_animationtests
<li>q2_step_refcards_filtering
<li>q2_step_start
<li>q2_step_volumetric_frames
<li>q2_visualmetaexport
<li>q2_visualmetaexport_map
<li>q2_visualmetaexport_map_via_wordpress
<li>q2_visualmetaexport_map_via_wordpress_with_keyboard
<li>q2_visualmetaexport_map_via_wordpress_with_lookat
<li>q2_wrist_rotations
<li>q2_yubikeyotp
</ul>
<script>
// make URLs clickable, even though some won't work or show visible changes
Array.from( usernamevalues_to_expand.querySelectorAll('li') ).map( p => {
let el = document.createElement('a')
el.href = baseURL + '?username=' + p.innerText
el.target = '_blank'
el.innerText = p.innerText
p.innerText = ''
p.appendChild( el )
})
</script>
</div>
<h3>How to populate that list</h3>
Ran server side
<pre>grep username /transition/webdav/data/fotsave/fot_sloan_companion_public/index.html | sed "s/.*== //" | sed "s/) {//" | grep -v dictionaryForCompletion | sort | uniq</pre>
<a id=emulatorxr></a>
<h2>emulatexr working with Username values</h2>
and for test scenarii only which is with emulatexr parameter
<div id=emulaterxrvalues_to_expand>
<ul>
<li>q2_lense
<li>q2_step_layout_animationtests
<li>q2_step_refcards_filtering
<li>q2_step_volumetric_frames
</ul>
<script>
// make URLs clickable, even though some won't work or show visible changes
Array.from( emulaterxrvalues_to_expand.querySelectorAll('li') ).map( p => {
let el = document.createElement('a')
el.href = baseURL + '?emulatexr=true&username=' + p.innerText
el.target = '_blank'
el.innerText = p.innerText
p.innerText = ''
p.appendChild( el )
})
</script>
</div>
<h3>How to populate that list</h3>
Ran server side
<pre>head -300 /transition/webdav/data/fotsave/fot_sloan_companion_public/index.html | grep username | sed "s/.*== //" | sed "s/) {//" | grep -v dictionaryForCompletion | sort | uniq</pre>
there are a quite a few more from Q1 via specifically parametersViaURL because that one https://git.benetou.fr/utopiah/spasca-fot-sloan-q1/src/branch/main/data/index.html#L602 can do a lot, letting anybody changing what is shown or not, how, etc
<a id=set></a>
<h2>Set values</h2>
<h3>Syntax</h3>
<ul>
<li>selector ID environment (not # as this is not a valid URL query parameter!) or .classname
<li>attribute name (e.g. color)
<li>value, (e.g. blue)
</ul>
For example <code>?set_.pannel_color=red</code> will set to "red" the attribute named "color" for all elements of class name "pannel".
<h3>Example of values</h3>
<ul>
<li><code>color=blue</code> can be a string of HTML colors
<li><code>gltf-model=world-bake.glb</code> can be a relative or absolute URL (where CORS is supported)
<li><code>visible=false</code> to show or hide an element, a boolean string that can be either true or false (not True or False or TRUE or FALSE!)
<li><code>src=https://webdav.benetou.fr/fotsave/fot_sloan_companion_public/world-bake.glb</code> can be a full URL (where CORS is supported)
<li><code>src=pano4s.png</code> can also be the relative URL of an image, using it as texture
<li><code>scale=1%201%201</code> as a scale for width height and depth, e.g. here a uniform scale of 1 (URI encoded, where %20 becomes an empty space " ")
<li><code>position=0%201%200</code> as metric position along X, Y and Z axis, X=-1 is 1 to the left of the starting position, Y=1 is 1m up and Z=-1 is 1m ahead
<li><code>rotation=0%20-30%200</code> as angle rotation in degrees along X, Y and Z axis relative to the object center, here 0 deg pitch, -30 yaw and 0 roll (see <a href="https://aframe.io/docs/1.7.0/components/rotation.html">details</a>)
</ul>
Your browser will automatically URI encode your URL, thus one can type <code>position=0 1.4 -1</code> and get <code>position=0%201.4%20-1</code>
<br>
<br>
To get a URL to later on use as a parameter value consider using the upload function of the prototype <a target=_blank href="https://companion.benetou.fr">https://companion.benetou.fr</a> which in turns make the resulting file available at <code>https://companion.benetou.fr/filename</code>
<h3>Values used</h3>
<div id=setvalues_to_expand>
<ul>
<li>?set_.manuscript_color=blue
<li>?set_.notes_visible=false
<li>?set_IDenvironment_visible=false&set_.pannel_color=blue&set_a-sky_color=grey&showfile=Fortress.glb
<li>?set_IDenvironment_visible=false&set_.pannel_color=blue&set_a-sky_color=grey&showfile=Fortress.glb&showfile=augmented_paper.pdf-0.jpg
<li>?set_IDenvironment_visible=false&set_.pannel_color=blue&set_a-sky_color=grey&showfile=Fortress.glb&showfile=augmented_paper.pdf-1.jpg
<li>?set_IDenvironment_visible=false&set_.pannel_color=blue&set_a-sky_color=grey&showfile=Fortress.glb&showfile=fabien_modifications_test.txt
<li>?set_IDenvironment_visible=false&showfile=Apartment.glb
<li>?set_IDmanuscript_color=blue
<li>?set_IDmanuscript_color=lightbrown
<li>?set_IDmanuscript_color=lightyellow
<li>?set_a-sky_src=Solstice-sunrise-cylinder-105-degrees.png
<li>?set_a-sky_src=Solstice-sunrise-cylinder-105-degrees.png
<li>?set_a-sky_src=Solstice-sunrise-cylinder-105-degrees.png&set_IDgroundfor360_visible=true
<li>?set_a-sky_src=Solstice-sunrise-cylinder-105-degrees.png&set_IDgroundfor360_visible=true
<li>?set_a-sky_src=faded-ground-keithm.jpg
<li>?set_a-sky_src=faded-ground-keithm.jpg&set_IDgroundfor360_visible=true
<li>?set_a-sky_src=office%20windows%20closed%20edited.jpg
<li>?set_a-sky_src=office%20windows%20closed%20edited.jpg
<li>?set_a-sky_src=office-windows-closed-edited25.png
<li>?set_a-sky_src=pano3s.png
<li>?set_a-sky_src=pano4s.png
<li>?set_a-sky_src=spacemed.jpg&set_IDgroundfor360_visible=true
<li>?set_a-sky_src=spacemed.jpg&set_IDgroundfor360_visible=true
<li>?set_a-sky_src=spacemed.jpg&set_IDgroundfor360_visible=true
<li>?set_a-sky_src=spacemed.jpg&set_IDgroundfor360_visible=true&set_IDgroundfor360_scale=2%202%202
<li>?set_a-sky_src=spacemed.jpg&set_IDgroundfor360_visible=true&set_IDgroundfor360_scale=20%202%2020
<li>?set_environmentsky_color=green
<li>?set_environmentsky_color=red
</ul>
Known problematic ones (usually not a bug, just not visible change)
<ul>
<li>?set_.notes_visible=false&set_IDpanopticonpannels_visible=false
<li>?set_IDenvironment_visible=false&set_.pannels_color=blue
<li>?set_IDenvironment_visible=false&set_IDpanopticonpannels_visible=false
<li>?set_IDenvironment_src=https://webdav.benetou.fr/fotsave/fot_sloan_companion_public/world-bake.glb
<li>?set_IDenvironment_visible=false&set_.pannel_color=blue
<li>?set_IDenvironment_visible=false&set_.pannel_color=blue&set_a-sky_color=grey
<li>?set_IDenvironment_gltf-model=world-bake.glb
<li>?set_IDenvironment_gltf-model=world-bake.glb&set_IDenvironment_scale=1%201%201
<li>?set_IDenvironment_gltf-model=world-bake.glb&set_IDenvironment_scale=1%201%201&set_IDenvironment_position=0%200%200
<li>?set_IDenvironment_gltf-model=world-bake.glb&set_IDenvironment_scale=1%201%201&set_IDenvironment_position=0%201%200&set_IDenvironment_rotation=0%20-30%200
<li>?set_IDenvironment_gltf-model=world-bake.glb&set_IDenvironment_scale=1%201%201&set_IDenvironment_position=0%201%200&set_IDenvironment_rotation=0%200%200
<li>?set_IDenvironment_gltf-model=world-bake.glb&set_IDenvironment_scale=1%201%201&set_IDenvironment_position=0%201%200&set_IDenvironment_rotation=0%2030%200
<li>?set_IDenvironment_gltf-model=world-bake.glb&set_IDenvironment_scale=1%201%201&set_IDenvironment_position=0%202%20-7&set_IDenvironment_rotation=0%20-30%200
<li>?set_IDenvironment_gltf-model=world-bake.glb&set_IDenvironment_scale=1%201%201&set_IDenvironment_position=0%203%200&set_IDenvironment_rotation=0%20-30%200
<li>?set_.pannel_color=blue
<li>?set_.pannel_color=red
<li>?set_.environment_visible=false&set_.panopticonpannels_visible=false
<li>?set_IDpanopticonpannels_position=0%200%200&set_IDpanopticonpannels_rotation=0%2045%200
<li>?set_IDpanopticonpannels_position=0%200%200&set_IDpanopticonpannels_rotation=0%2090%200
<li>?set_IDpanopticonpannels_position=0%200.5%200
<li>?set_IDpanopticonpannels_position=0%200.5%200&set_IDpanopticonpannels_rotation=0%2045%200
<li>?set_IDpanopticonpannels_position=0%202%200
<li>?set_panopticonpannels_position=0%201%200
<li>?set_panopticonpannels_scale=.1%20.1%20.1
<li>?set_panopticonpannels_scale=2%202%202
<li>?set_IDenvironment_src=url(world-bake.glb)
<li>?set_IDenvironment_src=world_bake.glb
<li>?set_environment_glb=Fortress.glb
<li>?set_environment_glb=url(Fortress.glb)
<li>?set_environment_gltf-model=url(Fortress.glb)
<li>?set_environment_gltf-model=url(Fortress.glb)&set_environment_position=0%20-1%200
<li>?set_environment_gltf-model=url(Fortress.glb)&set_environment_position=0%20-10%200
<li>?set_environment_gltf-model=url(Fortress.glb)&set_environment_position=0%200%200
<li>?set_environment_visible=false&set_panopticonpannels_visible=false
<li>?set_environment_visible=false&set_panopticonpannels_visible=false
</ul>
<script>
// make URLs clickable, even though some won't work or show visible changes
Array.from( setvalues_to_expand.querySelectorAll('li') ).map( p => {
let el = document.createElement('a')
el.href = baseURL + p.innerText
el.target = '_blank'
el.innerText = p.innerText
p.innerText = ''
p.appendChild( el )
})
</script>
</div>
<h3>How to populate that list</h3>
Ran client side
<pre>sqlite3 /home/fabien/Prototypes/places.sqlite "select url from moz_places where url like '%?set_%';" | sort | uniq</pre>
<hr>
<h2>See also</h2>
<ul>
<li>listing of Q1 demos https://companion.benetou.fr/demos_example.html?filename=demo_q1.json
<li>listing of Q2 demos (still to be edited) https://companion.benetou.fr/demos_example.html?filename=demo_q2.json
<li>to provide feedback on demo sets https://companion.benetou.fr/demos_feedback_example.html
<li>to visually edit a demo set https://companion.benetou.fr/demos_editor_example.html
</ul>

@ -8,9 +8,18 @@ generalize showGestureDebug to any joint, not just thumb-tip of right hand
*/
// consider which gestures are more inclusive, more minimalist to be more accessible
targetGesture = {"microgesture":{"type":"glyph","action":"Extension","context":["Contact","Air"],"parameters":{"pressure":{"start":null,"end":null,"type":"no_end"},"amplitude":{"start":null,"end":null,"type":"no_end"},"time":{"start":null,"end":null,"leftType":"none","rightType":"bigger"}},"actuator":[["thumb"]],"phalanx":[]}}
// supports both hands
otherTargetGesture = {"microgesture":{"type":"seq","quantificators":{"x":[[["thumb"],["index"],["middle"],["ring"],["pinky"]],[["thumb"],["index"],["middle"],["ring"],["pinky"]]],"y":[]},"seq":{"type":"and","operands":[{"type":"glyph","action":"Any","context":["Air","Contact"],"parameters":{"pressure":{"start":null,"end":null,"type":"no_end"},"amplitude":{"start":null,"end":null,"type":"no_end"},"time":{"start":null,"end":null,"leftType":"none","rightType":"bigger"}},"actuator":[["x0"]],"contact":{"type":"contact","action":"Contact","parameters":{"pressure":{"start":null,"end":null,"type":"no_end"},"amplitude":{"start":null,"end":null,"type":"no_end"},"time":{"start":null,"end":null,"leftType":"none","rightType":"bigger"}},"actuator":[["x1"]]},"phalanx":[]},{"type":"glyph","action":"Any","context":["Air","Contact"],"parameters":{"pressure":{"start":null,"end":null,"type":"no_end"},"amplitude":{"start":null,"end":null,"type":"no_end"},"time":{"start":null,"end":null,"leftType":"none","rightType":"bigger"}},"actuator":[["x1"]],"contact":{"type":"contact","action":"Contact","parameters":{"pressure":{"start":null,"end":null,"type":"no_end"},"amplitude":{"start":null,"end":null,"type":"no_end"},"time":{"start":null,"end":null,"leftType":"none","rightType":"bigger"}},"actuator":[["x0"]]},"phalanx":[]}]}}}
// name = closed gap touch
// text description = Press the specified fingers together.
// different from "finger pinch" which is just between thumb and another finger, at specified locations
// from https://lig-microglyph.imag.fr
const fingersNames = ["index-finger", "middle-finger", "ring-finger", "pinky-finger","thumb"]
const tips = fingersNames.map( f => f+"-tip" )
const thumbParts = ["metacarpal", "phalanx-proximal", "phalanx-distal"] // no phalanx-intermediate for thumb
@ -114,6 +123,212 @@ function drawPoints(points){
return el
}
// gesture(s) to detect, callback on detection, by default debugging via console or new debuggraph
// should also emit event
// might need some form of hierarchy, so that composable gestures don't overlap or re-use components from other gestures
// e.g. if there is a fingertip to metacarpal gesture for the index, don't re-write the same for the little finger
// same for other hand, i.e. left vs right
// could be computationally interesting too, e.g. stop checking for a gesture if part of it failed already, e.g. stop checking for a gesture if part of it failed already
AFRAME.registerComponent('cube-pull', {
init: function () {
this.tick = AFRAME.utils.throttleTick(this.tick, 50, this);
},
tick: function (t, dt) {
const myScene = AFRAME.scenes[0].object3D
if (!myScene.getObjectByName("l_handMeshNode") ) return
const wrist = myScene.getObjectByName("r_handMeshNode").parent.getObjectByName("wrist")
let sum = Math.abs(wrist.rotation.y) + Math.abs(wrist.rotation.y) + Math.abs(wrist.rotation.z)
console.log( sum )
if ( sum < .3 ) cubetest.setAttribute("position", AFRAME.utils.coordinates.stringify( wrist.position ) ) // doesn't look good, cube on wrist is moving quite a bit too
// could emit event too
// could check if all joints have close to 0 rotation on ...
// are roughly on the same y-plane of the wrist (facing up or down)
}
})
AFRAME.registerComponent('verticalflatpalmrighthand', {
schema: {
command: {type: 'string'},
},
init: function () {
this.tick = AFRAME.utils.throttleTick(this.tick, 50, this);
window.lastGesture = Date.now() // to initialize
},
tick: function (t, dt) {
const myScene = AFRAME.scenes[0].object3D
if (!myScene.getObjectByName("l_handMeshNode") ) return
const wrist = myScene.getObjectByName("r_handMeshNode").parent.getObjectByName("wrist")
let sum = Math.abs(wrist.rotation.y) + Math.abs(wrist.rotation.y) + Math.abs(wrist.rotation.z)
console.log( sum )
//document.querySelector('[isoterminal]').emit("send", [sum + (window.lastGesture+1000>now)]) // +"\n"]) // problematic for special commands
//document.querySelector('[isoterminal]').emit("send", ["\n"]) // kind of horizontal
if ( sum < .3 ) {
// spams the terminal
// could emit event too
let now = Date.now()
if ( now - window.lastGesture > 1000 ){
document.querySelector('[isoterminal]').emit("send", [this.data.command]) // +"\n"]) // problematic for special commands
window.lastGesture = now
AFRAME.scenes[0].emit('gesture', {date:now, type:'vertical flat palm', value:sum})
}
// no event if inhibited due to refractory period
}
if ( sum > 1 ) {
let now = Date.now()
if ( now - window.lastGesture > 1000 ){
document.querySelector('[isoterminal]').emit("send", ["\n"]) // kind of horizontal
window.lastGesture = now
AFRAME.scenes[0].emit('gesture', {date:now, type:'horizontal flat palm', value:sum})
}
// no event if inhibited due to refractory period
}
// could check if all joints have close to 0 rotation on ...
// are roughly on the same y-plane of the wrist (facing up or down)
}
})
AFRAME.registerComponent('gestures', {
init: function () {
this.tick = AFRAME.utils.throttleTick(this.tick, 50, this);
// maybe every 50ms is too often, or not often enough, to check
},
// consider also tock, cf https://aframe.io/docs/1.7.0/core/component.html#before-after
tick: function (t, dt) {
// consider here the order or hierarchy
// optimisation? reconciliation? cascading?
// example of context, e.g. using distance of wrist to a target object, or time, or any other condition
// could be defined else as long as it respect a specific format, like filters/converters/etc
// starting with emiting an event, e.g.
// AFRAME.scenes[0].emit('gesture', {date:now, type:'horizontal flat palm', value:sum})
// or context (unsure if truly has to be separated)
// AFRAME.scenes[0].emit('gesturecontext', {date:now, type:'right wrist in volume of target' })
// detect gesture A
// emit event
// detect gesture B
// emit event
// detect gesture C
// emit event
},
})
AFRAME.registerComponent('gesture-listneres', {
events: {
gesture : function (event) {
console.log( 'gesture captured:', event.detail )
},
gesturecontext : function (event) {
console.log( 'gesture context captured:', event.detail )
}
}
})
AFRAME.registerComponent('positional-context', {
schema: {
target: {type: 'selector'},
attribute: {type: 'string'},
value: {type: 'string'},
threshold: {type: 'float'}
},
init: function () {
this.tick = AFRAME.utils.throttleTick(this.tick, 500, this);
},
tick: function (t, dt) {
const myScene = AFRAME.scenes[0].object3D
if (!myScene.getObjectByName("l_handMeshNode") ) return
const wrist = myScene.getObjectByName("r_handMeshNode").parent.getObjectByName("wrist").position
// console.log ( this.data.target.object3D.position, this.data.threshold, wrist.distanceTo( this.data.target.object3D.position ) < this.data.threshold )
if ( wrist.distanceTo( this.data.target.object3D.position ) < this.data.threshold ){
// could emit event too
AFRAME.scenes[0].setAttribute(this.data.attribute, "command:"+ this.data.value)
// wrong... this should still delegate instead to a gesture (or gestures) that would THEN conditionally pass the command
AFRAME.scenes[0].emit('gesturecontext', {date:now, type:'right wrist in volume of target' })
// could be used to check for enter/leave based on previous state until now
} else {
AFRAME.scenes[0].removeAttribute(this.data.attribute)
}
}
})
setTimeout( _ => {
// username based to isolate testing
if (username && username == "cubepull") {
AFRAME.scenes[0].setAttribute("cube-pull", "")
}
// multi-users as multi-gestures?
// cascading gestures
console.log ( window.location, window.location.search.includes("xrsh") )
//if ( window.location.search.includes("xrsh") ) {
if ( window.location.host.includes("fabien.benetou.fr") ) {
// URL modified by XRSH
// if (username && username == "xrsh") {
let el = document.createElement("a-sphere")
el.id = "contextsphere"
el.setAttribute("radius", "0.3") // fails on .3 or 0.3
//el.setAttribute("scale", "0.3)
el.setAttribute("wireframe", "true")
el.setAttribute("position", "0 1.4 -1")
AFRAME.scenes[0].appendChild(el)
console.log( 1337 )
let cube = document.createElement("a-box")
cube.setAttribute("target", "") // no effect, probably over written by XRSH
cube.setAttribute("color", "green")
cube.id = "cubetest"
cube.setAttribute("scale", ".1 .1 .1")
cube.setAttribute("position", "0 1.4 -1")
AFRAME.scenes[0].appendChild(cube)
AFRAME.scenes[0].setAttribute("positional-context", "target:#contextsphere; threshold:0.3; attribute:verticalflatpalmrighthand; value: ls -l\\n;")
AFRAME.scenes[0].setAttribute('gesture-listneres','')
// gesture to
// go back in history (then down again)
// to execute
/*
// enter
document.querySelector('[isoterminal]').emit("send", ["\n"])
// arrow keyup
document.querySelector('[isoterminal]').emit("send", ["\x1b[A"])
// arrow keydown
document.querySelector('[isoterminal]').emit("send", ["\x1b[B"])
*/
}
// that can also be visible, e.g. wireframe
if (username && username == "cubepullwithincontext") {
let cube = addCubeWithAnimations()
cube.setAttribute("target", "")
//cube.setAttribute("visible", "false")
el = document.createElement("a-sphere")
el.id = "contextsphere"
el.setAttribute("radius", .3)
el.setAttribute("wireframe", "true")
el.setAttribute("position", "0 1.4 -1")
AFRAME.scenes[0].appendChild(el)
AFRAME.scenes[0].setAttribute("positional-context", "target:#contextsphere; threshold:0.3; attribute:cube-pull;")
}
// that can also be visible, e.g. wireframe
// could do so with example of index finger tip within range of targets, if so make a visual change
// on within cube, off if outside of it
// could change the cube color to show the difference too
// try perf on Quest1
}, 1000 )
// should be a component instead...
setTimeout( _ => {
const myScene = AFRAME.scenes[0].object3D

File diff suppressed because one or more lines are too long

@ -0,0 +1,9 @@
// inspired by http://expressjs.com/en/guide/using-middleware.html
function colorChange( el ){
if (false) el.setAttribute("color", "lime")
// could test to filter more, e.g.
// if ( el.getAttribute("value")?.includes("change my color to red") )
}
sequentialFiltersInteractionOnReleased.push( colorChange )

@ -0,0 +1 @@
There is a lot of attention rightly focused on the many aspects of Artificial Intelligence already in use and being developed. From generative AI for text, images and 3D to AI for image and speech recognition and AI for analysis and so on, the impact on how we learn, think and communicate has already been fundamental. There is also another tech revolution going on and so far the attention paid to it is primarily that as a gaming , social and media consumption tech; that of Virtual and Augmented Reality (VR & AR), as most prominently on the market as the Meta Quest and the Apple Vision Pro.

@ -1,695 +0,0 @@
{
"data-objects": [
{
"object-id": "alexander:1978:apl",
"object-type": "reference data",
"bibtex-type": "@book",
"bibtex-data": {
"citeKey": "alexander:1978:apl",
"author": [
"Alexander, Christopher"
],
"title": "A Pattern Language: Towns, Buildings, Construction",
"volume": "",
"pages": "1216",
"editor": [
""
],
"publisher": "OUP USA",
"address": "",
"year": "1978",
"doi": "",
"isbn": "0195019199",
"keywords": [
"design patterns"
],
"note": "Webtwo ipsum etsy lanyrd meevee glogster, joyent kno. Sifteo etsy waze odeo, kazaa appjet.",
"annote": "",
"source-url": "https://global.oup.com/academic/product/a-pattern-language-9780195019193",
"source-pdf": "",
"source-html": "",
"open-access": "false",
"free-acm-access": "false"
}
},
{
"object-id": "atzenbeck:2018:mia",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "atzenbeck:2018:mia",
"author": [
"Atzenbeck, Clau",
"Roßner, Daniel",
"Tzagarakis, Manolis"
],
"title": "Mother: An Integrated Approach to Hypertext Domains",
"booktitle": "Proceedings of the 29th on Hypertext and Social Media",
"publisher": "ACM",
"address": "New York, NY, USA",
"pages": "145–149",
"year": "2018",
"doi": "10.1145/3209542.3209570",
"isbn": "978-1-4503-5427-1",
"location": "Baltimore, MD",
"keywords": [
"asgard",
"cb-ohs",
"hel",
"midgard",
"hypertext infrastructure",
"mother",
"navigational hypertext",
"open hypermedia systems"
],
"note": "Appjet omgpop babblely heroku zillow, zapier yammer. Scribd woopra flickr shopify qeyno hojoki wikia, chegg udemy oooj kno.",
"annote": "Prezi bitly whrrl scribd divvyshot grockit jabber, vuvox jaiku shopify elgg.",
"source-url": "",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/3209542.3209570",
"open-access": "true",
"free-acm-access": "true"
}
},
{
"object-id": "atzenbeck:2019:ham",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "atzenbeck:2019:ham",
"author": [
"Atzenbeck, Claus",
"Nürnberg, Peter J."
],
"title": "Hypertext as Method",
"booktitle": "Proceedings of the 30th ACM Conference on Hypertext and Social Media",
"publisher": "Association for Computing Machinery",
"address": "New York, NY, USA",
"pages": "29–38",
"year": "2019",
"doi": "10.1145/3342220.3343669",
"isbn": "9781450368858",
"location": "Hof, DE",
"keywords": [
"research communities",
"structures",
"infrastructure",
"ai",
"intelligence",
"hypertext",
"man-machine",
"hypertext history",
"augmentation",
"intellect",
"context"
],
"note": "",
"annote": "Zapier chegg elgg stypi yoono elgg wikia zooomr yuntaa, movity xobni loopt stypi lanyrd foodzie ngmoco. kazaa kiko foodzie.",
"source-url": "https://dl.acm.org/doi/10.1145/3342220.3343669",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/3342220.3343669",
"source-html": "",
"open-access": "true",
"free-acm-access": "true"
}
},
{
"object-id": "atzenbeck:2023:btr",
"object-type": "@reference data",
"bibtex-type": "article",
"bibtex-data": {
"citeKey": "atzenbeck:2023:btr",
"author": [
"Atzenbeck, Claus",
"Herder, Eelco",
"Roßner, Daniel"
],
"title": "Breaking The Routine: Spatial Hypertext Concepts for Active Decision Making in Recommender Systems",
"journal": "New Review of Hypermedia and Multimedia",
"volume": "29",
"number": "",
"pages": "1–35",
"year": "2023",
"location": "Milton Park, UK",
"doi": "10.1080/13614568.2023.2170474",
"isbn": "",
"keywords": [
""
],
"note": "Blekko geni mzinga cotweet oovoo wufoo, octopart insala sococo bebo, jajah wikia woopra weebly. Bubbli chegg chumby kaboodle blekko, zappos zinch woopra. imeem.\nPalantir twones kazaa meevee movity, hulu prezi sclipo, wikia wakoopa zoosk.",
"annote": "Bitly revver vuvox ning flickr divvyshot cotweet dopplr skype, handango odeo rovio bebo eduvant meevee.",
"source-url": "https://www.tandfonline.com/doi/full/10.1080/13614568.2023.2170474",
"source-pdf": "",
"source-html": "",
"open-access": "false",
"free-acm-access": "false"
}
},
{
"object-id": "bernstein:1991:storyspace",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "bernstein:1991:storyspace",
"author": [
"Bernstein, Mark"
],
"title": "Storyspace: Hypertext and the Process of Writing",
"booktitle": "Hypertext/Hypermedia Handbook",
"editor": [
"Berk, Emily",
"Devlin, Joseph"
],
"publisher": "McGraw-Hill Inc.,US",
"address": "",
"pages": "529–533",
"year": "1991",
"doi": "",
"isbn": "0070166226",
"keywords": [
"hypertext; storyspace"
],
"note": "",
"annote": "Wikia zlio imeem zanga jumo sifteo, divvyshot nuvvo ideeli.",
"source-url": "",
"source-pdf": "",
"source-html": "",
"open-access": "false",
"free-acm-access": "false"
}
},
{
"object-id": "bernstein:1998:poh",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "bernstein:1998:poh",
"author": [
"Bernstein, Mark"
],
"title": "Patterns of Hypertext",
"booktitle": "Proceedings of the Ninth ACM Conference on Hypertext and Hypermedia : Links, Objects, Time and Space—Structure in Hypermedia Systems",
"publisher": "ACM",
"address": "New York, NY, USA",
"pages": "21–29",
"year": "1998",
"doi": "10.1145/276627.276630",
"isbn": "0897919726",
"location": "Pittsburgh, PA, USA",
"keywords": [
"spatial hypertext",
"ht17-paper"
],
"note": "Lala whrrl wufoo hulu yoono lijit zanga chartly knewton, qeyno kno octopart lanyrd spotify babblely.",
"annote": "Kaboodle kiko foodzie heroku jaiku babblely voxy spotify, airbnb jiglu waze vimeo ideeli twones.",
"source-url": "https://dl.acm.org/doi/10.1145/276627.276630",
"source-pdf": "https://dl.acm.org/doi/pdf10.1145/276627.276630",
"source-html": "",
"open-access": "false",
"free-acm-access": "true"
}
},
{
"object-id": "bernstein:2010:csahr",
"object-type": "reference data",
"bibtex-type": "@online",
"bibtex-data": {
"citeKey": "bernstein:2010:csahr",
"author": [
"Bernstein, Mark"
],
"year": "2010",
"title": "Card Sharks and Holy Scrollers",
"organization": "markbernstein.org",
"url": "https://www.markbernstein.org/Oct10/CardSharksandHolyScrollers.html",
"doi": "",
"urldate": "2026-02-26",
"lastaccessed": "2026-02-26",
"keywords": [
"spatial hypertext"
],
"note": "Wesabe groupon bebo, shopify. Dropio cloudera empressr rovio jajah lanyrd, oovoo boxbe meebo.",
"annote": "Babblely doostang yammer ifttt etsy insala.",
"source-url": "https://www.markbernstein.org/Oct10/CardSharksandHolyScrollers.html",
"source-pdf": "",
"source-html": "https://www.markbernstein.org/Oct10/CardSharksandHolyScrollers.html",
"open-access": "false",
"free-acm-access": "false"
}
},
{
"object-id": "bernstein:2011:tash",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "bernstein:2011:tash",
"author": [
"Bernstein, Mark"
],
"title": "Can We Talk About Spatial Hypertext?",
"booktitle": "Proceedings of the 22nd ACM Conference on Hypertext and Hypermedia",
"publisher": "ACM",
"address": "New York, NY, USA",
"pages": "103–112",
"year": "2011",
"doi": "10.1145/1995966.1995983",
"isbn": "1450302564",
"location": "Eindhoven, NL",
"keywords": [
"spatial hypertext",
"ht17-paper"
],
"note": "",
"annote": "Wufoo odeo voki airbnb voki wufoo, weebly bebo scribd. voxy groupon. Wikia zlio imeem zanga jumo sifteo, divvyshot nuvvo ideeli.",
"source-url": "https://dl.acm.org/doi/10.1145/1995966.1995983",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/1995966.1995983",
"source-html": "",
"open-access": "false",
"free-acm-access": "true"
}
},
{
"object-id": "hseih:2009:svpssh",
"object-type": "reference data",
"bibtex-type": "@article",
"bibtex-data": {
"citeKey": "hseih:2009:svpssh",
"author": [
"Hsieh, Haowei",
"Shipman, III, Frank M."
],
"title": "Supporting Visual Problem Solving in Spatial Hypertext",
"journal": "Journal of Digital Information (JoDI)",
"volume": "10",
"number": "3",
"pages": "",
"year": "2009",
"location": "",
"doi": "",
"isbn": "",
"keywords": [
"spatial hypertext"
],
"note": "Tivo babblely plugg heekya joukuu groupon vuvox, edmodo octopart joyent kazaa heekya.\nGlogster divvyshot convore zimbra, orkut zlio, movity zooomr.",
"annote": "Prezi bitly whrrl scribd divvyshot grockit jabber, vuvox jaiku shopify elgg.",
"source-url": "https://journals.tdl.org/jodi/index.php/jodi/article/view/173",
"source-pdf": "",
"source-html": "",
"open-access": "false",
"free-acm-access": "false"
}
},
{
"object-id": "joyce:1988:ss",
"object-type": "reference data",
"bibtex-type": "@article",
"bibtex-data": {
"citeKey": "joyce:1988:ss",
"author": [
"Joyce, Michael"
],
"title": "Siren Shapes",
"journal": "Academic Computing",
"volume": "3",
"number": "4",
"pages": "10–14, 37",
"year": "1988",
"location": "McKinney, TX, USA",
"doi": "",
"isbn": "",
"keywords": [
"storyspace",
"hypertext"
],
"note": "lala plickers dropio. Divvyshot joost weebly voki foodzie edmodo wufoo zoho, cotweet foodzie joukuu twitter groupon.",
"annote": "Ebay weebly chumby gooru unigo, chegg vimeo.",
"source-url": "",
"source-pdf": "",
"source-html": "",
"open-access": "false",
"free-acm-access": "false"
}
},
{
"object-id": "marshall:1987:erpuh",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "marshall:1987:erpuh",
"author": [
"Marshall, Catherine C."
],
"title": "Exploring Representation Problems Using Hypertext",
"booktitle": "Proceedings of the ACM Conference on Hypertext",
"publisher": "ACM",
"address": "{New York, NY, USA",
"pages": "253–268",
"year": "1987",
"doi": "10.1145/317426.317445",
"isbn": "089791340X",
"location": "",
"keywords": [
"spatial hypertext",
"notecards"
],
"note": "Scribd tivo xobni tivo udemy, glogster wufoo plugg chumby appjet, oooj sifteo etsy. babblely etsy sococo.\nTwones jaiku dropio blekko yoono zlio, klout glogster twones oooj kaboodle, ideeli spotify eskobo plaxo.",
"annote": "Scribd woopra flickr shopify qeyno hojoki .",
"source-url": "https://dl.acm.org/doi/10.1145/317426.317445",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/317426.317445",
"source-html": "",
"open-access": "false",
"free-acm-access": "true"
}
},
{
"object-id": "marshall:1994:viki",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "marshall:1994:viki",
"author": [
"Marshall, Catherine C.",
"Shipman, III, Frank M.",
"Coombs, James H."
],
"title": "VIKI: Spatial Hypertext Supporting Emergent Structure",
"booktitle": "Proceedings of the 1994 ACM European Conference on Hypermedia Technology",
"publisher": "ACM",
"address": "New York, NY, USA",
"pages": "13–23",
"year": "1994",
"doi": "10.1145/192757.192759",
"isbn": "0897916409",
"location": "Edinburgh, UK",
"keywords": [
"spatial hypertext",
"viki"
],
"note": "Meebo sococo zynga blekko orkut wakoopa joukuu, wikia jibjab stypi heekya.",
"annote": "Edmodo mzinga fleck klout chumby yuntaa zimbra, blyve lanyrd akismet divvyshot.",
"source-url": "https://dl.acm.org/doi/10.1145/317426.317445",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/317426.317445",
"source-html": "",
"open-access": "false",
"free-acm-access": "true"
}
},
{
"object-id": "marshall:1995:shdfc",
"object-type": "reference data",
"bibtex-type": "@article",
"bibtex-data": {
"citeKey": "marshall:1995:shdfc",
"author": [
"Marshall, Catherine C.",
"Shipman, III, Frank M."
],
"title": "Spatial Hypertext: Designing for Change",
"journal": "Communications of the ACM (CACM)",
"volume": "38",
"number": "8",
"pages": "88–97",
"year": "1995",
"location": "",
"doi": "10.1145/208344.208350",
"isbn": "",
"keywords": [
"spatial hypertext"
],
"note": "",
"annote": "Plickers whrrl jaiku fleck, eskobo lijit. Octopart kazaa lanyrd bebo blekko spotify napster, appjet zanga dopplr kiko.",
"source-url": "https://dl.acm.org/doi/10.1145/208344.208350",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/208344.208350",
"source-html": "",
"open-access": "false",
"free-acm-access": "true"
}
},
{
"object-id": "marshall:1997:shpit",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "marshall:1997:shpit",
"author": [
"Marshall, Catherine C.",
"Shipman, III, Frank M."
],
"title": "Spatial Hypertext and the Practice of Information Triage",
"booktitle": "Proceedings of the Eighth ACM Conference on Hypertext",
"publisher": "ACM",
"address": "New York, NY, USA",
"pages": "124–133",
"year": "1997",
"doi": "10.1145/267437.267451",
"isbn": "0897918665",
"location": "",
"keywords": [
"spatial hypertext"
],
"note": "Imvu spotify rovio dopplr woopra, jajah divvyshot plugg, diigo wesabe ebay.",
"annote": "Glogster tivo revver, jiglu.",
"source-url": "https://dl.acm.org/doi/10.1145/267437.267451",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/267437.267451",
"source-html": "",
"open-access": "false",
"free-acm-access": "true"
}
},
{
"object-id": "nakakoji:2002:psshw",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "nakakoji:2002:psshw",
"author": [
"Nakakoji, Kumiyo",
"Yamamoto, Yasuhiro"
],
"title": "Position Statement for Spatial Hypertext Workshop at Hypertext 2002",
"booktitle": "Second Workshop on Spatial Hypertext",
"publisher": "ACM",
"address": "College Park, Maryland",
"pages": "2",
"year": "2002",
"doi": "",
"isbn": "",
"location": "",
"keywords": [
"spatial hypertext"
],
"note": "",
"annote": "Mog joost gsnap zooomr zynga gsnap cuil trulia, meevee handango hulu reddit odeo bubbli.",
"source-url": "https://web.archive.org/web/20030923074942/http://www.csdl.tamu.edu/~shipman/SpatialHypertext/SH2/nakakoji.pdf",
"source-pdf": "https://web.archive.org/web/20030923074942/http://www.csdl.tamu.edu/~shipman/SpatialHypertext/SH2/nakakoji.pdf",
"source-html": "",
"open-access": "false",
"free-acm-access": "false"
}
},
{
"object-id": "rossner:2023:spore",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "rossner:2023:spore",
"author": [
"Roßner, Daniel",
"Atzenbeck, Claus",
"Brooker, Sam"
],
"title": "SPORE: A Storybreaking Machine",
"booktitle": "Proceedings of the 34th ACM Conference on Hypertext and Social Media",
"publisher": "Association for Computing Machinery",
"address": "New York, NY, USA",
"pages": "",
"year": "2023",
"doi": "10.1145/3603163.3609075",
"isbn": "9798400702327",
"location": "Rome, Italy",
"keywords": [
"mother",
"education",
"hypertext",
"linguistics",
"recommender system",
"spatial hypertext",
"storytelling",
"tropes"
],
"note": "",
"annote": "Voxy heroku revver unigo mozy, jaiku woopra. Udemy zoho tivo, divvyshot.",
"source-url": "https://dl.acm.org/doi/10.1145/3603163.3609075",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/3603163.3609075",
"source-html": "",
"open-access": "false",
"free-acm-access": "false"
}
},
{
"object-id": "shipman:1999:spatial",
"object-type": "reference data",
"bibtex-type": "@article",
"bibtex-data": {
"citeKey": "shipman:1999:spatial",
"author": [
"Shipman, III, Frank M.",
"Marshall, Catherine C"
],
"title": "Spatial Hypertext: An Alternative to Navigational and Semantic Links",
"journal": "ACM Computing Surveys (CSUR)",
"volume": "31",
"number": "4es",
"pages": "14",
"year": "1999",
"location": "",
"doi": "10.1145/345966.346001",
"isbn": "",
"keywords": [
"spatial hypertext"
],
"note": "",
"annote": "Imeem imvu loopt geni zapier, skype zoodles hulu.",
"source-url": "https://dl.acm.org/doi/10.1145/345966.346001",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/345966.346001",
"source-html": "",
"open-access": "false",
"free-acm-access": "true"
}
},
{
"object-id": "shipman:2001:sdshr",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "shipman:2001:sdshr",
"author": [
"Shipman, III, Frank M."
],
"title": "Seven Directions for Spatial Hypertext Research",
"booktitle": "First International Workshop on Spatial Hypertext",
"publisher": "",
"address": "",
"pages": "",
"year": "2001",
"doi": "",
"isbn": "",
"location": "",
"keywords": [
"spatial hypertext"
],
"note": "Orkut flickr squidoo blyve yuntaa imeem, chegg yammer fleck reddit.",
"annote": "Ngmoco blekko shopify, kno.",
"source-url": "https://web.archive.org/web/20041128133432/http://www.csdl.tamu.edu/~shipman/SpatialHypertext/SH1/shipman.pdf",
"source-pdf": "https://web.archive.org/web/20041128133432/http://www.csdl.tamu.edu/~shipman/SpatialHypertext/SH1/shipman.pdf",
"source-html": "",
"open-access": "false",
"free-acm-access": "false"
}
},
{
"object-id": "shipman:2001:vkb",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "shipman:2001:vkb",
"author": [
"Shipman, III, Frank M.",
"Hsieh, Haowei",
"Maloor, Preetam",
" Moore, J. Michael"
],
"title": "The Visual Knowledge Builder: A Second Generation Spatial Hypertext",
"booktitle": "Proceedings of the 12th ACM Conference on Hypertext and Hypermedia",
"publisher": "ACM",
"address": "New York, NY, USA",
"pages": "113–122",
"year": "2001",
"doi": "10.1145/504216.504245",
"isbn": "1581134207",
"location": "",
"keywords": [
"spatial hypertext",
"vkb"
],
"note": "",
"annote": "Vuvox jibjab hojoki geni odeo, balihoo twones kippt koofers odeo, convore woopra klout.",
"source-url": "https://dl.acm.org/doi/10.1145/504216.504245",
"source-pdf": "https://dl.acm.org/doi/10.1145/504216.504245",
"source-html": "",
"open-access": "false",
"free-acm-access": "true"
}
},
{
"object-id": "shipman:2002:sh",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "shipman:2002:sh",
"author": [
"Shipman, III, Frank M.",
"Moore, J. Michael",
"Maloor, Preetam",
"Hsieh, Haowei",
"Akkapeddi, Raghu"
],
"title": "Semantics Happen: Knowledge Building in Spatial Hypertext",
"booktitle": "Proceedings of the Thirteenth ACM Conference on Hypertext and Hypermedia",
"publisher": "ACM",
"address": "",
"pages": "25–34",
"year": "2002",
"doi": "10.1145/513338.513350",
"isbn": "1581134770",
"location": "",
"keywords": [
"spatial hypertext"
],
"note": "",
"annote": "Hojoki jibjab movity qeyno lijit, flickr ideeli.",
"source-url": "https://dl.acm.org/doi/10.1145/513338.513350",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/513338.513350",
"source-html": "",
"open-access": "false",
"free-acm-access": "false"
}
},
{
"object-id": "yip:2020:dash",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "yip:2020:dash",
"author": [
"Yip, Stanley",
"Zeleznik, Bob",
"Wilkins, Samuel",
"Schicke, Tyler",
"van Dam, Andries"
],
"title": "Dash: A Hyper Framework",
"booktitle": "Proceedings of the 31st ACM Conference on Hypertext and Social Media",
"publisher": "Association for Computing Machinery",
"address": "New York, NY, USA Virtual Event, USA",
"pages": "237–238",
"year": "2020",
"doi": "10.1145/3372923.3404807",
"isbn": "9781450370981",
"location": "",
"keywords": [
"document engineering",
"hypertext",
"hypermedia",
"collaborative editing",
"workflow support"
],
"note": "",
"annote": "Zapier chegg elgg stypi yoono elgg wikia zooomr yuntaa, movity xobni loopt stypi lanyrd foodzie ngmoco.",
"source-url": "https://doi.org/10.1145/3372923.3404807",
"source-pdf": "https://doi.org/pdf/10.1145/3372923.3404807",
"source-html": "",
"open-access": "false",
"free-acm-access": "false"
}
}
]
}

@ -0,0 +1,838 @@
{
"version": "12",
"date": "2025-05-21T18:18Z",
"data-objects": [
{
"object-id": "alexander:1978:apl",
"object-type": "reference data",
"bibtex-type": "@book",
"bibtex-data": {
"citeKey": "alexander:1978:apl",
"author": [
"Alexander, Christopher"
],
"title": "A Pattern Language: Towns, Buildings, Construction",
"volume": "",
"pages": "1216",
"editor": [
""
],
"publisher": "OUP USA",
"address": "",
"year": "1978",
"doi": "",
"isbn": "0195019199",
"keywords": [
"design patterns"
]
},
"note": "Webtwo ipsum etsy lanyrd meevee glogster, joyent kno. Sifteo etsy waze odeo, kazaa appjet.",
"annote": "",
"cover-image-url": "https://upload.wikimedia.org/wikipedia/en/thumb/e/e6/A_Pattern_Language.jpg/220px-A_Pattern_Language.jpg",
"source-url": "https://global.oup.com/academic/product/a-pattern-language-9780195019193",
"source-pdf": "",
"source-pdf-text": false,
"source-html": "",
"open-access": "false",
"free-acm-access": "false",
"cross-references": []
},
{
"object-id": "anderson:2023:sh",
"object-type": "reference data",
"bibtex-type": "@inproceedings ",
"bibtex-data": {
"citeKey": "anderson:2023:sh",
"author": [
"Anderson, Mark W. R.",
"Millard, David E."
],
"title": "Seven Hypertexts",
"booktitle": "Proceedings of the 34th ACM Conference on Hypertext and Social Media",
"publisher": "Association for Computing Machinery",
"address": "New York, NY, USA",
"pages": "42: 1--157",
"year": "2023",
"doi": "3603163.3609048",
"isbn": "9798400702327",
"location": "Rome, Italy",
"keywords": [
"ai",
"ar",
"vr",
"xr",
"addressing",
"citation",
"data",
"documents",
"exploration",
"hypertext",
"linkbases",
"machine reading",
"metadata",
"narrative",
"provenance",
"remediation",
"stand-off metadata",
"viewspecs",
"visualisation"
]
},
"note": "",
"annote": "store annotation text here",
"source-url": "https://dl.acm.org/doi/10.1145/3603163.3609048",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/3603163.3609048",
"source-pdf-text": true,
"source-html": "https://dl.acm.org/doi/fullHtml/10.1145/10.1145/3603163.3609048",
"open-access": "false",
"free-acm-access": "falsee",
"cross-references": ["atzenbeck:2018:mia","atzenbeck:2019:ham","atzenbeck:2023:btr","bernstein:1998:poh","marshall:1994:viki"]
},
{
"object-id": "anderson:2024:beyond",
"object-type": "reference data",
"bibtex-type": "@inproceedings ",
"bibtex-data": {
"citeKey": "anderson:2024:beyond",
"author": [
"Anderson, Mark W. R."
],
"title": "Beyond The Page-Break: Towards Better Tools for Remediation of Born-Digital Documents",
"booktitle": "Proceedings of the 35th ACM Conference on Hypertext and Social Media",
"publisher": "Association for Computing Machinery",
"address": "New York, NY, USA",
"pages": "70–77",
"year": "2024",
"doi": "10.1145/3648188.3678215",
"isbn": "9798400705953",
"location": "Poznan, Poland",
"keywords": [
"ai",
"ar",
"vr",
"xr",
"addressing",
"citation",
"data",
"documents",
"exploration",
"hypertext",
"linkbases",
"machine reading",
"metadata",
"narrative",
"provenance",
"remediation",
"stand-off metadata",
"viewspecs",
"visualisation"
]
},
"note": "",
"annote": "If all the trees were paper\n and all the seas were ink\nand all the trees were bread and cheese\nwhat would we have to drink?.",
"source-url": "https://dl.acm.org/doi/10.1145/3648188.3678215",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/3648188.3678215",
"source-pdf-text": true,
"source-html": "https://dl.acm.org/doi/fullHtml/10.1145/3648188.3678215",
"open-access": "true",
"free-acm-access": "false",
"cross-references": ["atzenbeck:2018:mia","atzenbeck:2023:btr","bernstein:1998:poh","bernstein:2011:tash","joyce:1988:ss","marshall:1994:viki","shipman:2001:vkb"]
},
{
"object-id": "atzenbeck:2018:mia",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "atzenbeck:2018:mia",
"author": [
"Atzenbeck, Claus",
"Roßner, Daniel",
"Tzagarakis, Manolis"
],
"title": "Mother: An Integrated Approach to Hypertext Domains",
"booktitle": "Proceedings of the 29th Conference on Hypertext and Social Media",
"publisher": "ACM",
"address": "New York, NY, USA",
"pages": "145–149",
"year": "2018",
"doi": "10.1145/3209542.3209570",
"isbn": "978-1-4503-5427-1",
"location": "Baltimore, MD, USA",
"keywords": [
"asgard",
"cb-ohs",
"hel",
"midgard",
"hypertext infrastructure",
"mother",
"navigational hypertext",
"open hypermedia systems"
]
},
"note": "Appjet omgpop babblely heroku zillow, zapier yammer. Scribd woopra flickr shopify qeyno hojoki wikia, chegg udemy oooj kno.",
"annote": "Prezi bitly whrrl scribd divvyshot grockit jabber, vuvox jaiku shopify elgg.",
"source-url": "https://dl.acm.org/doi/10.1145/3209542.3209570",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/3209542.3209570",
"source-pdf-text": true,
"source-html": "",
"open-access": "true",
"free-acm-access": "true",
"cross-references": ["bernstein:1998:poh","marshall:1994:viki"]
},
{
"object-id": "atzenbeck:2019:ham",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "atzenbeck:2019:ham",
"author": [
"Atzenbeck, Claus",
"Nürnberg, Peter J."
],
"title": "Hypertext as Method",
"booktitle": "Proceedings of the 30th ACM Conference on Hypertext and Social Media",
"publisher": "Association for Computing Machinery",
"address": "New York, NY, USA",
"pages": "29–38",
"year": "2019",
"doi": "10.1145/3342220.3343669",
"isbn": "9781450368858",
"location": "Hof, Germany",
"keywords": [
"research communities",
"structures",
"infrastructure",
"ai",
"intelligence",
"hypertext",
"man-machine",
"hypertext history",
"augmentation",
"intellect",
"context"
]
},
"note": "",
"annote": "Zapier chegg elgg stypi yoono elgg wikia zooomr yuntaa, movity xobni loopt stypi lanyrd foodzie ngmoco. kazaa kiko foodzie.",
"source-url": "https://dl.acm.org/doi/10.1145/3342220.3343669",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/3342220.3343669",
"source-pdf-text": true,
"source-html": "",
"open-access": "true",
"free-acm-access": "true",
"cross-references": ["atzenbeck:2018:mia"]
},
{
"object-id": "atzenbeck:2023:btr",
"object-type": "reference data",
"bibtex-type": "@article",
"bibtex-data": {
"citeKey": "atzenbeck:2023:btr",
"author": [
"Atzenbeck, Claus",
"Herder, Eelco",
"Roßner, Daniel"
],
"title": "Breaking The Routine: Spatial Hypertext Concepts for Active Decision Making in Recommender Systems",
"journal": "New Review of Hypermedia and Multimedia",
"volume": "29",
"number": "",
"pages": "1–35",
"year": "2023",
"doi": "10.1080/13614568.2023.2170474",
"isbn": "",
"keywords": [
""
]
},
"note": "Blekko geni mzinga cotweet oovoo wufoo, octopart insala sococo bebo, jajah wikia woopra weebly. Bubbli chegg chumby kaboodle blekko, zappos zinch woopra. imeem.\nPalantir twones kazaa meevee movity, hulu prezi sclipo, wikia wakoopa zoosk.",
"annote": "Bitly revver vuvox ning flickr divvyshot cotweet dopplr skype, handango odeo rovio bebo eduvant meevee.",
"source-url": "https://www.tandfonline.com/doi/full/10.1080/13614568.2023.2170474",
"source-pdf": "https://www.tandfonline.com/doi/epdf/10.1080/13614568.2023.2170474?needAccess=true",
"source-pdf-text": true,
"source-html": "",
"open-access": "false",
"free-acm-access": "false",
"cross-references": ["atzenbeck:2018:mia","marshall:1995:shdfc","shipman:2002:sh"]
},
{
"object-id": "bernstein:1991:storyspace",
"object-type": "reference data",
"bibtex-type": "@inbook",
"bibtex-data": {
"citeKey": "bernstein:1991:storyspace",
"author": [
"Bernstein, Mark"
],
"title": "Storyspace: Hypertext and the Process of Writing",
"booktitle": "Hypertext/Hypermedia Handbook",
"editor": [
"Berk, Emily",
"Devlin, Joseph"
],
"publisher": "McGraw-Hill Inc., USA",
"address": "",
"pages": "529–533",
"year": "1991",
"doi": "",
"isbn": "0070166226",
"keywords": [
"hypertext; storyspace"
]
},
"note": "",
"annote": "Wikia zlio imeem zanga jumo sifteo, divvyshot nuvvo ideeli.",
"cover-image-url": "https://archive.org/services/img/hypertexthyperme0000unse/full/pct:200/0/default.jpg",
"source-url": "",
"source-pdf": "",
"source-pdf-text": false,
"source-html": "",
"open-access": "false",
"free-acm-access": "false",
"cross-references": []
},
{
"object-id": "bernstein:1998:poh",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "bernstein:1998:poh",
"author": [
"Bernstein, Mark"
],
"title": "Patterns of Hypertext",
"booktitle": "Proceedings of the Ninth ACM Conference on Hypertext and Hypermedia : Links, Objects, Time and Space—Structure in Hypermedia Systems",
"publisher": "ACM",
"address": "New York, NY, USA",
"pages": "21–29",
"year": "1998",
"doi": "10.1145/276627.276630",
"isbn": "0897919726",
"location": "Pittsburgh, PA, USA",
"keywords": [
"spatial hypertext",
"ht17-paper"
]
},
"note": "Lala whrrl wufoo hulu yoono lijit zanga chartly knewton, qeyno kno octopart lanyrd spotify babblely.",
"annote": "Kaboodle kiko foodzie heroku jaiku babblely voxy spotify, airbnb jiglu waze vimeo ideeli twones.",
"source-url": "https://dl.acm.org/doi/10.1145/276627.276630",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/276627.276630",
"source-pdf-text": true,
"source-html": "",
"open-access": "false",
"free-acm-access": "true",
"cross-references": ["alexander:1978:apl","joyce:1988:ss","marshall:1994:viki"]
},
{
"object-id": "bernstein:2010:csahr",
"object-type": "reference data",
"bibtex-type": "@online",
"bibtex-data": {
"citeKey": "bernstein:2010:csahr",
"author": [
"Bernstein, Mark"
],
"year": "2010",
"title": "Card Sharks and Holy Scrollers",
"organization": "markbernstein.org",
"url": "https://www.markbernstein.org/Oct10/CardSharksandHolyScrollers.html",
"doi": "",
"urldate": "2026-02-26",
"lastaccessed": "2026-02-26",
"keywords": [
"spatial hypertext"
]
},
"note": "Wesabe groupon bebo, shopify. Dropio cloudera empressr rovio jajah lanyrd, oovoo boxbe meebo.",
"annote": "Babblely doostang yammer ifttt etsy insala.",
"source-url": "https://www.markbernstein.org/Oct10/CardSharksandHolyScrollers.html",
"source-pdf": "",
"source-pdf-text": false,
"source-html": "https://www.markbernstein.org/Oct10/CardSharksandHolyScrollers.html",
"open-access": "false",
"free-acm-access": "false",
"cross-references": []
},
{
"object-id": "bernstein:2011:tash",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "bernstein:2011:tash",
"author": [
"Bernstein, Mark"
],
"title": "Can We Talk About Spatial Hypertext?",
"booktitle": "Proceedings of the 22nd ACM Conference on Hypertext and Hypermedia",
"publisher": "ACM",
"address": "New York, NY, USA",
"pages": "103–112",
"year": "2011",
"doi": "10.1145/1995966.1995983",
"isbn": "1450302564",
"location": "Eindhoven, Netherlands",
"keywords": [
"spatial hypertext",
"ht17-paper"
]
},
"note": "",
"annote": "Wufoo odeo voki airbnb voki wufoo, weebly bebo scribd. voxy groupon. Wikia zlio imeem zanga jumo sifteo, divvyshot nuvvo ideeli.",
"source-url": "https://dl.acm.org/doi/10.1145/1995966.1995983",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/1995966.1995983",
"source-pdf-text": true,
"source-html": "",
"open-access": "false",
"free-acm-access": "true",
"cross-references": ["bernstein:1998:poh","marshall:1994:viki","marshall:1997:shpit"]
},
{
"object-id": "hseih:2009:svpssh",
"object-type": "reference data",
"bibtex-type": "@article",
"bibtex-data": {
"citeKey": "hseih:2009:svpssh",
"author": [
"Hsieh, Haowei",
"Shipman, III, Frank M."
],
"title": "Supporting Visual Problem Solving in Spatial Hypertext",
"journal": "Journal of Digital Information (JoDI)",
"volume": "10",
"number": "3",
"pages": "",
"year": "2009",
"doi": "",
"isbn": "",
"keywords": [
"spatial hypertext"
]
},
"note": "Tivo babblely plugg heekya joukuu groupon vuvox, edmodo octopart joyent kazaa heekya.\nGlogster divvyshot convore zimbra, orkut zlio, movity zooomr.",
"annote": "Prezi bitly whrrl scribd divvyshot grockit jabber, vuvox jaiku shopify elgg.",
"source-url": "https://journals.tdl.org/jodi/index.php/jodi/article/view/173",
"source-pdf": "https://jodi-ojs-tdl.tdl.org/jodi/article/view/173/486",
"source-pdf-text": true,
"source-html": "",
"open-access": "false",
"free-acm-access": "false",
"cross-references": ["marshall:1997:shpit"]
},
{
"object-id": "joyce:1988:ss",
"object-type": "reference data",
"bibtex-type": "@article",
"bibtex-data": {
"citeKey": "joyce:1988:ss",
"author": [
"Joyce, Michael"
],
"title": "Siren Shapes",
"journal": "Academic Computing",
"volume": "3",
"number": "4",
"pages": "10–14, 37",
"year": "1988",
"doi": "",
"isbn": "",
"keywords": [
"storyspace",
"hypertext"
]
},
"note": "lala plickers dropio. Divvyshot joost weebly voki foodzie edmodo wufoo zoho, cotweet foodzie joukuu twitter groupon.",
"annote": "Ebay weebly chumby gooru unigo, chegg vimeo.",
"source-url": "",
"source-pdf": "",
"source-pdf-text": false,
"source-html": "",
"open-access": "false",
"free-acm-access": "false",
"cross-references": []
},
{
"object-id": "marshall:1987:erpuh",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "marshall:1987:erpuh",
"author": [
"Marshall, Catherine C."
],
"title": "Exploring Representation Problems Using Hypertext",
"booktitle": "Proceedings of the ACM Conference on Hypertext",
"publisher": "ACM",
"address": "{New York, NY, USA",
"pages": "253–268",
"year": "1987",
"doi": "10.1145/317426.317445",
"isbn": "089791340X",
"location": "Chapel Hill, NC, USA",
"keywords": [
"spatial hypertext",
"notecards"
]
},
"note": "Scribd tivo xobni tivo udemy, glogster wufoo plugg chumby appjet, oooj sifteo etsy. babblely etsy sococo.\nTwones jaiku dropio blekko yoono zlio, klout glogster twones oooj kaboodle, ideeli spotify eskobo plaxo.",
"annote": "Scribd woopra flickr shopify qeyno hojoki .",
"source-url": "https://dl.acm.org/doi/10.1145/317426.317445",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/317426.317445",
"source-pdf-text": true,
"source-html": "",
"open-access": "false",
"free-acm-access": "true",
"cross-references": []
},
{
"object-id": "marshall:1994:viki",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "marshall:1994:viki",
"author": [
"Marshall, Catherine C.",
"Shipman, III, Frank M.",
"Coombs, James H."
],
"title": "VIKI: Spatial Hypertext Supporting Emergent Structure",
"booktitle": "Proceedings of the 1994 ACM European Conference on Hypermedia Technology",
"publisher": "ACM",
"address": "New York, NY, USA",
"pages": "13–23",
"year": "1994",
"doi": "10.1145/192757.192759",
"isbn": "0897916409",
"location": "Edinburgh, Scotland, UK",
"keywords": [
"spatial hypertext",
"viki"
]
},
"note": "Meebo sococo zynga blekko orkut wakoopa joukuu, wikia jibjab stypi heekya.",
"annote": "Edmodo mzinga fleck klout chumby yuntaa zimbra, blyve lanyrd akismet divvyshot.",
"source-url": "https://dl.acm.org/doi/10.1145/317426.317445",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/317426.317445",
"source-pdf-text": true,
"source-html": "",
"open-access": "false",
"free-acm-access": "true",
"cross-references": []
},
{
"object-id": "marshall:1995:shdfc",
"object-type": "reference data",
"bibtex-type": "@article",
"bibtex-data": {
"citeKey": "marshall:1995:shdfc",
"author": [
"Marshall, Catherine C.",
"Shipman, III, Frank M."
],
"title": "Spatial Hypertext: Designing for Change",
"journal": "Communications of the ACM (CACM)",
"volume": "38",
"number": "8",
"pages": "88–97",
"year": "1995",
"doi": "10.1145/208344.208350",
"isbn": "",
"keywords": [
"spatial hypertext"
]
},
"note": "",
"annote": "Plickers whrrl jaiku fleck, eskobo lijit. Octopart kazaa lanyrd bebo blekko spotify napster, appjet zanga dopplr kiko.",
"source-url": "https://dl.acm.org/doi/10.1145/208344.208350",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/208344.208350",
"source-pdf-text": true,
"source-html": "",
"open-access": "false",
"free-acm-access": "true",
"cross-references": ["marshall:1994:viki"]
},
{
"object-id": "marshall:1997:shpit",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "marshall:1997:shpit",
"author": [
"Marshall, Catherine C.",
"Shipman, III, Frank M."
],
"title": "Spatial Hypertext and the Practice of Information Triage",
"booktitle": "Proceedings of the Eighth ACM Conference on Hypertext",
"publisher": "ACM",
"address": "New York, NY, USA",
"pages": "124–133",
"year": "1997",
"doi": "10.1145/267437.267451",
"isbn": "0897918665",
"location": "Southampton, UK",
"keywords": [
"spatial hypertext"
]
},
"note": "Imvu spotify rovio dopplr woopra, jajah divvyshot plugg, diigo wesabe ebay.",
"annote": "Glogster tivo revver, jiglu.",
"source-url": "https://dl.acm.org/doi/10.1145/267437.267451",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/267437.267451",
"source-pdf-text": true,
"source-html": "",
"open-access": "false",
"free-acm-access": "true",
"cross-references": ["marshall:1995:shdfc"]
},
{
"object-id": "nakakoji:2002:psshw",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "nakakoji:2002:psshw",
"author": [
"Nakakoji, Kumiyo",
"Yamamoto, Yasuhiro"
],
"title": "Position Statement for Spatial Hypertext Workshop at Hypertext 2002",
"booktitle": "Second Workshop on Spatial Hypertext",
"publisher": "ACM",
"address": "College Park, MD, USA",
"pages": "2",
"year": "2002",
"doi": "",
"isbn": "",
"location": "College Park, MD, USA",
"keywords": [
"spatial hypertext"
]
},
"note": "",
"annote": "Mog joost gsnap zooomr zynga gsnap cuil trulia, meevee handango hulu reddit odeo bubbli.",
"source-url": "https://people.engr.tamu.edu/shipman/SpatialHypertext/SH2",
"source-pdf": "https://people.engr.tamu.edu/shipman/SpatialHypertext/SH2/nakakoji.pdf",
"source-pdf-text": true,
"source-html": "",
"open-access": "false",
"free-acm-access": "false",
"cross-references": []
},
{
"object-id": "rossner:2023:spore",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "rossner:2023:spore",
"author": [
"Roßner, Daniel",
"Atzenbeck, Claus",
"Brooker, Sam"
],
"title": "SPORE: A Storybreaking Machine",
"booktitle": "Proceedings of the 34th ACM Conference on Hypertext and Social Media",
"publisher": "Association for Computing Machinery",
"address": "New York, NY, USA",
"pages": "",
"year": "2023",
"doi": "10.1145/3603163.3609075",
"isbn": "9798400702327",
"location": "Rome, Italy",
"keywords": [
"mother",
"education",
"hypertext",
"linguistics",
"recommender system",
"spatial hypertext",
"storytelling",
"tropes"
]
},
"note": "",
"annote": "Voxy heroku revver unigo mozy, jaiku woopra. Udemy zoho tivo, divvyshot.",
"source-url": "https://dl.acm.org/doi/10.1145/3603163.3609075",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/3603163.3609075",
"source-pdf-text": true,
"source-html": "",
"open-access": "false",
"free-acm-access": "false",
"cross-references": ["anderson:2024:beyond","atzenbeck:2018:mia"]
},
{
"object-id": "shipman:1999:spatial",
"object-type": "reference data",
"bibtex-type": "@article",
"bibtex-data": {
"citeKey": "shipman:1999:spatial",
"author": [
"Shipman, III, Frank M.",
"Marshall, Catherine C"
],
"title": "Spatial Hypertext: An Alternative to Navigational and Semantic Links",
"journal": "ACM Computing Surveys (CSUR)",
"volume": "31",
"number": "4es",
"pages": "14",
"year": "1999",
"doi": "10.1145/345966.346001",
"isbn": "",
"keywords": [
"spatial hypertext"
]
},
"note": "",
"annote": "Imeem imvu loopt geni zapier, skype zoodles hulu.",
"source-url": "https://dl.acm.org/doi/10.1145/345966.346001",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/345966.346001",
"source-pdf-text": true,
"source-html": "",
"open-access": "false",
"free-acm-access": "true",
"cross-references": ["marshall:1994:viki","marshall:1995:shdfc"]
},
{
"object-id": "shipman:2001:sdshr",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "shipman:2001:sdshr",
"author": [
"Shipman, III, Frank M."
],
"title": "Seven Directions for Spatial Hypertext Research",
"booktitle": "First International Workshop on Spatial Hypertext",
"publisher": "ACM",
"address": "",
"pages": "",
"year": "2001",
"doi": "",
"isbn": "",
"location": "Århus, Denmark",
"keywords": [
"spatial hypertext"
]
},
"note": "Orkut flickr squidoo blyve yuntaa imeem, chegg yammer fleck reddit.",
"annote": "Ngmoco blekko shopify, kno.",
"source-url": "https://people.engr.tamu.edu/shipman/SpatialHypertext/SH1",
"source-pdf": "https://people.engr.tamu.edu/shipman/SpatialHypertext/SH1/shipman.pdf",
"source-pdf-text": true,
"source-html": "",
"open-access": "false",
"free-acm-access": "false",
"cross-references": ["marshall:1994:viki","marshall:1995:shdfc"]
},
{
"object-id": "shipman:2001:vkb",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "shipman:2001:vkb",
"author": [
"Shipman, III, Frank M.",
"Hsieh, Haowei",
"Maloor, Preetam",
" Moore, J. Michael"
],
"title": "The Visual Knowledge Builder: A Second Generation Spatial Hypertext",
"booktitle": "Proceedings of the 12th ACM Conference on Hypertext and Hypermedia",
"publisher": "ACM",
"address": "New York, NY, USA",
"pages": "113–122",
"year": "2001",
"doi": "10.1145/504216.504245",
"isbn": "1581134207",
"location": "Århus, Denmark",
"keywords": [
"spatial hypertext",
"vkb"
]
},
"note": "",
"annote": "Vuvox jibjab hojoki geni odeo, balihoo twones kippt koofers odeo, convore woopra klout.",
"source-url": "https://dl.acm.org/doi/10.1145/504216.504245",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/504216.504245",
"source-pdf-text": true,
"source-html": "",
"open-access": "false",
"free-acm-access": "true",
"cross-references": ["marshall:1995:shdfc"]
},
{
"object-id": "shipman:2002:sh",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "shipman:2002:sh",
"author": [
"Shipman, III, Frank M.",
"Moore, J. Michael",
"Maloor, Preetam",
"Hsieh, Haowei",
"Akkapeddi, Raghu"
],
"title": "Semantics Happen: Knowledge Building in Spatial Hypertext",
"booktitle": "Proceedings of the Thirteenth ACM Conference on Hypertext and Hypermedia",
"publisher": "ACM",
"address": "New York, NY, USA",
"pages": "25–34",
"year": "2002",
"doi": "10.1145/513338.513350",
"isbn": "1581134770",
"location": "College Park, MD, USA",
"keywords": [
"spatial hypertext"
]
},
"note": "",
"annote": "Hojoki jibjab movity qeyno lijit, flickr ideeli.",
"source-url": "https://dl.acm.org/doi/10.1145/513338.513350",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/513338.513350",
"source-pdf-text": true,
"source-html": "",
"open-access": "false",
"free-acm-access": "false",
"cross-references": ["marshall:1995:shdfc"]
},
{
"object-id": "yip:2020:dash",
"object-type": "reference data",
"bibtex-type": "@inproceedings",
"bibtex-data": {
"citeKey": "yip:2020:dash",
"author": [
"Yip, Stanley",
"Zeleznik, Bob",
"Wilkins, Samuel",
"Schicke, Tyler",
"van Dam, Andries"
],
"title": "Dash: A Hyper Framework",
"booktitle": "Proceedings of the 31st ACM Conference on Hypertext and Social Media",
"publisher": "ACM",
"address": "New York, NY, USA",
"pages": "237–238",
"year": "2020",
"doi": "10.1145/3372923.3404807",
"isbn": "9781450370981",
"location": "Orlando, FL, USA (Virtual Event)",
"keywords": [
"document engineering",
"hypertext",
"hypermedia",
"collaborative editing",
"workflow support"
]
},
"note": "",
"annote": "Zapier chegg elgg stypi yoono elgg wikia zooomr yuntaa, movity xobni loopt stypi lanyrd foodzie ngmoco.",
"source-url": "https://dl.acm.org/doi/10.1145/3372923.3404807",
"source-pdf": "https://dl.acm.org/doi/pdf/10.1145/3372923.3404807",
"source-pdf-text": true,
"source-html": "",
"open-access": "false",
"free-acm-access": "false",
"cross-references": []
}
]
}

@ -0,0 +1,46 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<script src="https://fabien.benetou.fr/pub/home/future_of_text_demo/engine/dependencies/webdav.js"></script>
<head>
<script src="https://aframe.io/releases/1.7.0/aframe.min.js"></script>
</head>
<body>
<a-scene>
<a-box position="-1 0.5 -3" rotation="0 45 0" color="#4CC3D9"></a-box>
<a-sphere position="0 1.25 -5" radius="1.25" color="#EF2D5E"></a-sphere>
<a-cylinder position="1 0.75 -3" radius="0.5" height="1.5" color="#FFC65D"></a-cylinder>
<a-plane position="0 0 -4" rotation="-90 0 0" width="4" height="4" color="#7BC8A4"></a-plane>
<a-sky color="#ECECEC"></a-sky>
</a-scene>
<button style="position:absolute; z-index:42; top:0;" onclick="save()">save</button>
<script>
const webdavURL = "https://webdav.benetou.fr";
const subdirWebDAV = "/fotsave/fot_sloan_companion_public/"
var webdavClient = window.WebDAV.createClient(webdavURL)
const hmdURL = "https://hmd.link/?https://companion.benetou.fr"
function save(){
async function r(path = "/file.txt"){ return await webdavClient.getFileContents(path, { format: "text" }) }
r(subdirWebDAV+"selfcontained_test.html").then( file => {
let newContent = file.replace("#FFC65D",'#' + Math.floor(Math.random()*16777215).toString(16) ) // random in order to repeat few times
let filename = "selfcontained_test_saved_"+Date.now()+".html"
saveJSONToWebDAV( filename, newContent )
let url = webdavURL+subdirWebDAV+filename
setTimeout( _ => window.open(url, '_blank'), 1000 )
// delay to avoid opening a URL that needs .1 s to be actually available
})
}
function saveJSONToWebDAV(filename, content){
async function w(path = "/file.txt"){ return await webdavClient.putFileContents(path, content) }
written = w(subdirWebDAV+filename)
}
</script>
</body>
</html>

@ -0,0 +1,3 @@
body {
background-color: #d3e3e5;
}

@ -0,0 +1,26 @@
{
"default" : [
{"selector":"#start_file_sloan_testtxt_end_file_hello_worldtxt", "attribute":"line", "value": "color:blue"},
{"selector":"a-sky", "attribute":"color", "value": "lightblue"},
{"selector":".notes", "attribute":"color", "value": "purple"},
{"selector":".notes", "attribute":"outline-color", "value": "darkblue"},
{"selector":"a-troika-text a-plane", "attribute":"color", "value": "white"},
{"selector":"a-troika-text a-triangle", "attribute":"color", "value": "gray"}
],
"light" : [
{"selector":"#start_file_sloan_testtxt_end_file_hello_worldtxt", "attribute":"line", "value": "color:blue"},
{"selector":"a-sky", "attribute":"color", "value": "gray"},
{"selector":".notes", "attribute":"color", "value": "black"},
{"selector":".notes", "attribute":"outline-color", "value": "white"},
{"selector":"a-troika-text a-plane", "attribute":"color", "value": "red"},
{"selector":"a-troika-text a-triangle", "attribute":"color", "value": "darkred"}
],
"print" : [
{"selector":"#start_file_sloan_testtxt_end_file_hello_worldtxt", "attribute":"line", "value": "color:brown"},
{"selector":"a-sky", "attribute":"color", "value": "#EEE"},
{"selector":".notes", "attribute":"color", "value": "black"},
{"selector":".notes", "attribute":"outline-color", "value": "white"},
{"selector":"a-troika-text a-plane", "attribute":"color", "value": "lightyellow"},
{"selector":"a-troika-text a-triangle", "attribute":"color", "value": "orange"}
]
}

@ -11,6 +11,7 @@ var selectionBox = new THREE.BoxHelper( bbox.object3D, 0x0000ff);
var groupHelpers = []
var primaryPinchStarted = false
var wristShortcut = "jxr switchToWireframe()"
var otherWristShortcut = "jxr console.log('hi from other wrist)"
var selectionPinchMode = false
var groupingMode = false
var hudTextEl // should instead rely on the #typinghud selector in most cases
@ -63,10 +64,11 @@ function getClosestTargetElement( pos, threshold=0.05 ){ // 10x lower threshold
// assumes both hands have the same (single) parent, if any
let parentPos = document.getElementById('rig').getAttribute('position')
pos.add( parentPos )
console.log( "from getClosestTargetElements, pos:", pos ) // relative pos, should thus remove rig position, even though it makes assumptions
console.log( "from getClosestTargetElement, pinch pos:", pos ) // relative pos, should thus remove rig position, even though it makes assumptions
const matches = getClosestTargetElements( pos, threshold)
if (matches.length > 0) res = matches[0].el
console.log( "from getClosestTargetElements, res:", res, 'among', matches )
return res
}
@ -227,10 +229,15 @@ AFRAME.registerComponent('pinchprimary', { // currently only 1 hand, the right o
selectedElement.object3D.rotation.copy( v )
selectedElement.object3D.rotateY(1)
selectedElement.object3D.rotateZ(-1.5)
// could check if hands are flipped when rightHand has attribute different
if ( rightHand.getAttribute("hand-tracking-controls").hand == "left" ){
selectedElement.object3D.rotateY(-2)
selectedElement.object3D.rotateZ(3)
}
}
if (selectedElement) selectedElement.emit("moved")
AFRAME.scenes[0].object3D.getObjectByName("r_handMeshNode").material.wireframe = true
// doesn't allow hand switching
// doesn't allow hand switching, should query via AFrame element instead
});
this.el.addEventListener('pinchstarted', function (event) {
primaryPinchStarted = true
@ -705,7 +712,7 @@ AFRAME.registerComponent('start-on-press-other', {
this.el.addEventListener('pressedended', function (event) {
console.log(event)
// should ignore that if we entered XR recently
if (!primaryPinchStarted && wristShortcut.match(prefix)) interpretJXR("jxr toggleShowCube()")
if (!primaryPinchStarted && wristShortcut.match(prefix)) interpretJXR(otherWristShortcut)
// if (!primaryPinchStarted && wristShortcut.match(prefix)) interpretJXR("jxr toggleShowFile('manuscript.txt')")
// FIXME should toggle the display of manuscript
// seems to happen also when entering VR

Loading…
Cancel
Save