diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/contact.html b/contact.html new file mode 100644 index 0000000..f1c340f --- /dev/null +++ b/contact.html @@ -0,0 +1,128 @@ + + + + + + + + + + Contact Details | Angel Xuan Chang + + + + + + + + + + + + + +
+
+ + +

Angel Xuan Chang

+ +
+
+

+ I am an Associate Professor at Simon Fraser University. + Prior to this, I was a visiting research scientist at Facebook AI Research and a research scientist at Eloquent Labs working on dialogue. I received my Ph.D. in Computer Science from Stanford, where I was part of the Natural Language Processing Group and advised by Chris Manning. + My research focuses on connecting language to 3D representations of shapes and scenes and grounding of language for embodied agents in indoor environments. I have worked on methods for synthesizing 3D scenes and shapes from natural language, and various datasets for 3D scene understanding. In general, I am interested in the semantics of shapes and scenes, the representation and acquisition of common sense knowledge, and reasoning using probabilistic models. + Some of my other interests include drawing and dance. +

+

+ +

+
+
+ Angel Xuan Chang +
+ angelx-{at}-sfu-[dot]-ca +

+ Associate Professor
+ School of Computing Science
+ Simon Fraser University
+ 3DLG + | GrUVi + | SFU NatLang
+ SFU AI/ML + | VINCI
+ Canada CIFAR AI Chair (Amii)
+ TUM-IAS Hans Fischer Fellow (2018-2022)
+ Google Scholar +
+
+
+ + + +
+
+ +
+
+

Contact Details

+
+
+

Mailing Address

+ Angel Xuan Chang
+ School of Computing Science
+ ASB 9971-8888 University Drive
+ Simon Fraser University
+ Burnaby, BC V5A 1S6, Canada +
+
+

Office

+ Campus: Burnaby Campus
+ Building: TASC1
+ Room: 8031 +
+
+ +
+
+
+ +
+ + diff --git a/files/3dhoi.png b/files/3dhoi.png new file mode 100644 index 0000000..fadf974 Binary files /dev/null and b/files/3dhoi.png differ diff --git a/files/3drotinv.png b/files/3drotinv.png new file mode 100644 index 0000000..b65cfeb Binary files /dev/null and b/files/3drotinv.png differ diff --git a/files/3dvqa.png b/files/3dvqa.png new file mode 100644 index 0000000..bf2a6c8 Binary files /dev/null and b/files/3dvqa.png differ diff --git a/files/angel.jpg b/files/angel.jpg new file mode 100644 index 0000000..04228e6 Binary files /dev/null and b/files/angel.jpg differ diff --git a/files/barcodebert.png b/files/barcodebert.png new file mode 100644 index 0000000..a955677 Binary files /dev/null and b/files/barcodebert.png differ diff --git a/files/bioscan1m.png b/files/bioscan1m.png new file mode 100644 index 0000000..b9a217b Binary files /dev/null and b/files/bioscan1m.png differ diff --git a/files/bioscan5m.png b/files/bioscan5m.png new file mode 100644 index 0000000..32565a0 Binary files /dev/null and b/files/bioscan5m.png differ diff --git a/files/bioscanclip.png b/files/bioscanclip.png new file mode 100644 index 0000000..74a04e7 Binary files /dev/null and b/files/bioscanclip.png differ diff --git a/files/comon.png b/files/comon.png new file mode 100644 index 0000000..1f75be1 Binary files /dev/null and b/files/comon.png differ diff --git a/files/corefSieves.png b/files/corefSieves.png new file mode 100644 index 0000000..3c3d94c Binary files /dev/null and b/files/corefSieves.png differ diff --git a/files/d3net.png b/files/d3net.png new file mode 100644 index 0000000..920c95c Binary files /dev/null and b/files/d3net.png differ diff --git a/files/diorama.webp b/files/diorama.webp new file mode 100644 index 0000000..f2e5a7b Binary files /dev/null and b/files/diorama.webp differ diff --git a/files/dragonCurve.png b/files/dragonCurve.png new file mode 100644 index 0000000..9e78854 Binary files /dev/null and b/files/dragonCurve.png differ diff --git a/files/duoduoclip.png b/files/duoduoclip.png new file mode 100644 index 0000000..c3d5577 Binary files /dev/null and b/files/duoduoclip.png differ diff --git a/files/fpic2014.png b/files/fpic2014.png new file mode 100644 index 0000000..1d272f4 Binary files /dev/null and b/files/fpic2014.png differ diff --git a/files/genshaperetr.png b/files/genshaperetr.png new file mode 100644 index 0000000..be1ad34 Binary files /dev/null and b/files/genshaperetr.png differ diff --git a/files/hab2.jpeg b/files/hab2.jpeg new file mode 100644 index 0000000..86f35e0 Binary files /dev/null and b/files/hab2.jpeg differ diff --git a/files/hierarchylayout.jpg b/files/hierarchylayout.jpg new file mode 100644 index 0000000..a216fe8 Binary files /dev/null and b/files/hierarchylayout.jpg differ diff --git a/files/hm3d.jpeg b/files/hm3d.jpeg new file mode 100644 index 0000000..5813fbf Binary files /dev/null and b/files/hm3d.jpeg differ diff --git a/files/hm3dsem.png b/files/hm3dsem.png new file mode 100644 index 0000000..a9fd73d Binary files /dev/null and b/files/hm3dsem.png differ diff --git a/files/hssd.png b/files/hssd.png new file mode 100644 index 0000000..30a3a67 Binary files /dev/null and b/files/hssd.png differ diff --git a/files/interactiveLearning.png b/files/interactiveLearning.png new file mode 100644 index 0000000..e00dbbe Binary files /dev/null and b/files/interactiveLearning.png differ diff --git a/files/law-vlnce.gif b/files/law-vlnce.gif new file mode 100644 index 0000000..1842059 Binary files /dev/null and b/files/law-vlnce.gif differ diff --git a/files/lexground.png b/files/lexground.png new file mode 100644 index 0000000..ad7f0e2 Binary files /dev/null and b/files/lexground.png differ diff --git a/files/m2dnerf.png b/files/m2dnerf.png new file mode 100644 index 0000000..44f3a57 Binary files /dev/null and b/files/m2dnerf.png differ diff --git a/files/maps.png b/files/maps.png new file mode 100644 index 0000000..f26895b Binary files /dev/null and b/files/maps.png differ diff --git a/files/mimic.png b/files/mimic.png new file mode 100644 index 0000000..865d37d Binary files /dev/null and b/files/mimic.png differ diff --git a/files/mirror3d.png b/files/mirror3d.png new file mode 100644 index 0000000..7887d45 Binary files /dev/null and b/files/mirror3d.png differ diff --git a/files/mopa.gif b/files/mopa.gif new file mode 100644 index 0000000..6842306 Binary files /dev/null and b/files/mopa.gif differ diff --git a/files/multi3drefer.png b/files/multi3drefer.png new file mode 100644 index 0000000..e3b80bf Binary files /dev/null and b/files/multi3drefer.png differ diff --git a/files/multion.jpg b/files/multion.jpg new file mode 100644 index 0000000..2ef3e3c Binary files /dev/null and b/files/multion.jpg differ diff --git a/files/multiscan.png b/files/multiscan.png new file mode 100644 index 0000000..2160485 Binary files /dev/null and b/files/multiscan.png differ diff --git a/files/nlslam.gif b/files/nlslam.gif new file mode 100644 index 0000000..01b5b84 Binary files /dev/null and b/files/nlslam.gif differ diff --git a/files/omage.png b/files/omage.png new file mode 100644 index 0000000..ce1ac81 Binary files /dev/null and b/files/omage.png differ diff --git a/files/opd.png b/files/opd.png new file mode 100644 index 0000000..760c006 Binary files /dev/null and b/files/opd.png differ diff --git a/files/opdmulti.png b/files/opdmulti.png new file mode 100644 index 0000000..1c89e94 Binary files /dev/null and b/files/opdmulti.png differ diff --git a/files/ovmm.jpeg b/files/ovmm.jpeg new file mode 100644 index 0000000..304cd1b Binary files /dev/null and b/files/ovmm.jpeg differ diff --git a/files/pigraphs.png b/files/pigraphs.png new file mode 100644 index 0000000..102ebbc Binary files /dev/null and b/files/pigraphs.png differ diff --git a/files/plan2scene.png b/files/plan2scene.png new file mode 100644 index 0000000..6d5cccd Binary files /dev/null and b/files/plan2scene.png differ diff --git a/files/prox.png b/files/prox.png new file mode 100644 index 0000000..1e4f120 Binary files /dev/null and b/files/prox.png differ diff --git a/files/pureclipnerf.png b/files/pureclipnerf.png new file mode 100644 index 0000000..b9e320d Binary files /dev/null and b/files/pureclipnerf.png differ diff --git a/files/quote-attribution.png b/files/quote-attribution.png new file mode 100644 index 0000000..4db4f10 Binary files /dev/null and b/files/quote-attribution.png differ diff --git a/files/r3ds.webp b/files/r3ds.webp new file mode 100644 index 0000000..62fd068 Binary files /dev/null and b/files/r3ds.webp differ diff --git a/files/rearrangement.png b/files/rearrangement.png new file mode 100644 index 0000000..8ed91f0 Binary files /dev/null and b/files/rearrangement.png differ diff --git a/files/roominoes.png b/files/roominoes.png new file mode 100644 index 0000000..078a7ad Binary files /dev/null and b/files/roominoes.png differ diff --git a/files/rrr.gif b/files/rrr.gif new file mode 100644 index 0000000..6842306 Binary files /dev/null and b/files/rrr.gif differ diff --git a/files/s2o.png b/files/s2o.png new file mode 100644 index 0000000..ba716b1 Binary files /dev/null and b/files/s2o.png differ diff --git a/files/sapien.png b/files/sapien.png new file mode 100644 index 0000000..03693d2 Binary files /dev/null and b/files/sapien.png differ diff --git a/files/scan2cad.png b/files/scan2cad.png new file mode 100644 index 0000000..23cd5dd Binary files /dev/null and b/files/scan2cad.png differ diff --git a/files/scan2cap.jpg b/files/scan2cap.jpg new file mode 100644 index 0000000..43e3a3e Binary files /dev/null and b/files/scan2cap.jpg differ diff --git a/files/scannet.jpg b/files/scannet.jpg new file mode 100644 index 0000000..1511ae4 Binary files /dev/null and b/files/scannet.jpg differ diff --git a/files/scanrefer.jpg b/files/scanrefer.jpg new file mode 100644 index 0000000..f039ee0 Binary files /dev/null and b/files/scanrefer.jpg differ diff --git a/files/scene-understanding.png b/files/scene-understanding.png new file mode 100644 index 0000000..c60ec95 Binary files /dev/null and b/files/scene-understanding.png differ diff --git a/files/scenegrok.png b/files/scenegrok.png new file mode 100644 index 0000000..d75802b Binary files /dev/null and b/files/scenegrok.png differ diff --git a/files/semanticParsing.png b/files/semanticParsing.png new file mode 100644 index 0000000..e78f590 Binary files /dev/null and b/files/semanticParsing.png differ diff --git a/files/semgeo.png b/files/semgeo.png new file mode 100644 index 0000000..56264d7 Binary files /dev/null and b/files/semgeo.png differ diff --git a/files/shapenet.png b/files/shapenet.png new file mode 100644 index 0000000..f7c348c Binary files /dev/null and b/files/shapenet.png differ diff --git a/files/singapo.png b/files/singapo.png new file mode 100644 index 0000000..00afe3f Binary files /dev/null and b/files/singapo.png differ diff --git a/files/sizes.png b/files/sizes.png new file mode 100644 index 0000000..26d2b02 Binary files /dev/null and b/files/sizes.png differ diff --git a/files/smc.png b/files/smc.png new file mode 100644 index 0000000..e8f29e9 Binary files /dev/null and b/files/smc.png differ diff --git a/files/smc.webp b/files/smc.webp new file mode 100644 index 0000000..4002d31 Binary files /dev/null and b/files/smc.webp differ diff --git a/files/spatialLearning.png b/files/spatialLearning.png new file mode 100644 index 0000000..b87248d Binary files /dev/null and b/files/spatialLearning.png differ diff --git a/files/spl.jpeg b/files/spl.jpeg new file mode 100644 index 0000000..9d506f3 Binary files /dev/null and b/files/spl.jpeg differ diff --git a/files/sscnet.jpeg b/files/sscnet.jpeg new file mode 100644 index 0000000..96dbcfa Binary files /dev/null and b/files/sscnet.jpeg differ diff --git a/files/sutime.png b/files/sutime.png new file mode 100644 index 0000000..e633a13 Binary files /dev/null and b/files/sutime.png differ diff --git a/files/text2scene.png b/files/text2scene.png new file mode 100644 index 0000000..30cddcb Binary files /dev/null and b/files/text2scene.png differ diff --git a/files/transphoner.png b/files/transphoner.png new file mode 100644 index 0000000..4d7f13b Binary files /dev/null and b/files/transphoner.png differ diff --git a/files/tricolo.png b/files/tricolo.png new file mode 100644 index 0000000..f162db7 Binary files /dev/null and b/files/tricolo.png differ diff --git a/files/tt3dstar.png b/files/tt3dstar.png new file mode 100644 index 0000000..89d97c1 Binary files /dev/null and b/files/tt3dstar.png differ diff --git a/files/twomen.png b/files/twomen.png new file mode 100644 index 0000000..240e4b4 Binary files /dev/null and b/files/twomen.png differ diff --git a/files/unit3d.jpeg b/files/unit3d.jpeg new file mode 100644 index 0000000..66cc61b Binary files /dev/null and b/files/unit3d.jpeg differ diff --git a/files/vigil3d.png b/files/vigil3d.png new file mode 100644 index 0000000..7473b46 Binary files /dev/null and b/files/vigil3d.png differ diff --git a/files/wordnetLink.png b/files/wordnetLink.png new file mode 100644 index 0000000..c1c1d09 Binary files /dev/null and b/files/wordnetLink.png differ diff --git a/fonts/glyphicons-halflings-regular.eot b/fonts/glyphicons-halflings-regular.eot new file mode 100644 index 0000000..b93a495 Binary files /dev/null and b/fonts/glyphicons-halflings-regular.eot differ diff --git a/fonts/glyphicons-halflings-regular.svg b/fonts/glyphicons-halflings-regular.svg new file mode 100644 index 0000000..94fb549 --- /dev/null +++ b/fonts/glyphicons-halflings-regular.svg @@ -0,0 +1,288 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/fonts/glyphicons-halflings-regular.ttf b/fonts/glyphicons-halflings-regular.ttf new file mode 100644 index 0000000..1413fc6 Binary files /dev/null and b/fonts/glyphicons-halflings-regular.ttf differ diff --git a/fonts/glyphicons-halflings-regular.woff b/fonts/glyphicons-halflings-regular.woff new file mode 100644 index 0000000..9e61285 Binary files /dev/null and b/fonts/glyphicons-halflings-regular.woff differ diff --git a/fonts/glyphicons-halflings-regular.woff2 b/fonts/glyphicons-halflings-regular.woff2 new file mode 100644 index 0000000..64539b5 Binary files /dev/null and b/fonts/glyphicons-halflings-regular.woff2 differ diff --git a/group.html b/group.html new file mode 100644 index 0000000..4de719b --- /dev/null +++ b/group.html @@ -0,0 +1,343 @@ + + + + + + + + + + SFU 3D Language Group | Angel Xuan Chang + + + + + + + + + + + + + +
+
+ + +
+
+ + + + + + + + + +
+
+

3D Language Group

+
+
+

Current Students

+

PhD Students

+
+ +
+ + Sonia Raychaudhuri + (Fall 2020) + +
+ +
+ + Han-Hung Lee + (Fall 2021) + +
+ +
+ + Xiaohao Sun + (Fall 2021) + +
+ +
+ + Xingguang Yan + (Fall 2022) + +
+ +
+ + Qirui Wu + (Spring 2022) + +
+ +
+ + Austin T. Wang + (Fall 2023) + +
+ +
+ + Xiaoliang Huo + (Fall 2023, co-advised with Manolis Savva) + +
+ +
+ + Yiming Zhang + (Spring 2024) + +
+ +
+
+

Master Students

+
+ +
+ + Zeming Gong + (Summer 2023) + +
+ +
+ + Denys Iliash + (Spring 2024) + +
+ +
+ + Qinghong Han + (Spring 2025) + +
+ +
+
+

Undergraduate Students

+
+ +
+ + Mrinal Goshalia + (Fall 2023) + +
+ +
+
+
+
+

Alumni

+ +
+ Yasaman Etesam + + + (Fall 2019) + +
+ +
+ +
+ Ali Gholami + + + (M.Sc. Fall 2019 to Spring 2022, next Zippin), Thesis: Dense Captioning for 3D Environments using Natural Language + +
+ +
+ +
+ Leon Kochiev + + + (M.Sc. Fall 2019 to Summer 2021, next Integrant), Thesis: Neural State Machine for 2D and 3D Visual Question Answering + +
+ +
+ +
+ Yue Ruan + + + (M.Sc. Fall 2019 to Spring 2022, next Amazon), Thesis: TriCoLo: Trimodal Contrastive Loss for Text to Shape Retrieval + +
+ +
+ +
+ Akshit Sharma + + + (M.Sc. Fall 2019 to Fall 2022), Thesis: DenseRefer3D: A Language and 3D Dataset for Coreference Resolution and Referring Expression Comprehension + +
+ +
+ +
+ Hanxiao (Shawn) Jiang + + + (undergrad, Fall 2019 - 2020, M.Sc. Fall 2020 to Summer 2023, next UIUC PhD), Thesis: OPD: Single-view 3D Openable Part Detection + +
+ +
+ +
+ Qirui Wu + + + (undergrad, Fall 2019 - 2021, next SFU PhD) + +
+ +
+ +
+ Weijie (Lewis) Lin + + + (undergrad, Spring 2020 - 2021, next CMU MS) + +
+ +
+ +
+ Yiming Zhang + + + (undergrad, 2021, M.Sc. 2022 - 2023, next SFU PhD), Thesis: Multi3DRefer: Grounding text description to multiple 3D objects + +
+ +
+ +
+ Zeming Gong + + + (undergrad, 2022, next SFU MSc) + +
+ +
+ +
+ Dave Zhenyu Chen + + + (visiting student, Spring 2022, PhD at Technical University of Munich; TUM-IAS funded student co-advised with Matthias Niessner) + +
+ +
+ +
+ Tommaso Campari + + + (visiting student, Spring 2022, PhD at University of Padova) + +
+ +
+ +
+ Enrico Cancelli + + + (visiting student, Spring 2024, PhD at University of Padova) + +
+ +
+ +
+ Aditi Jain + + + (visiting student, Summer 2022, Undergraduate at IIT Delhi) + +
+ +
+ +
+ Ning Wang + + + (visiting student, Spring 2023-2024, PhD at Wuhan University) + +
+ +
+ + +
+
+ +
+
+
+ +
+ + diff --git a/index.html b/index.html new file mode 100644 index 0000000..e336d35 --- /dev/null +++ b/index.html @@ -0,0 +1,5351 @@ + + + + + + + + + + Angel Xuan Chang | Angel Xuan Chang + + + + + + + + + + + + + +
+
+ + +

Angel Xuan Chang

+ +
+
+

+ I am an Associate Professor at Simon Fraser University. + Prior to this, I was a visiting research scientist at Facebook AI Research and a research scientist at Eloquent Labs working on dialogue. I received my Ph.D. in Computer Science from Stanford, where I was part of the Natural Language Processing Group and advised by Chris Manning. + My research focuses on connecting language to 3D representations of shapes and scenes and grounding of language for embodied agents in indoor environments. I have worked on methods for synthesizing 3D scenes and shapes from natural language, and various datasets for 3D scene understanding. In general, I am interested in the semantics of shapes and scenes, the representation and acquisition of common sense knowledge, and reasoning using probabilistic models. + Some of my other interests include drawing and dance. +

+

+ +

News

+ + More... + +

+
+
+ Angel Xuan Chang +
+ angelx-{at}-sfu-[dot]-ca +

+ Associate Professor
+ School of Computing Science
+ Simon Fraser University
+ 3DLG + | GrUVi + | SFU NatLang
+ SFU AI/ML + | VINCI
+ Canada CIFAR AI Chair (Amii)
+ TUM-IAS Hans Fischer Fellow (2018-2022)
+ Google Scholar +
+
+
+ + + +
+
+ +
+
+

Recent Papers

+
+
+ + +
+
+ + ViGiL3D: A Linguistically Diverse Dataset for 3D Visual Grounding +   + +
+
+

+ + ViGiL3D: A Linguistically Diverse Dataset for 3D Visual Grounding + +

+ + + + Austin T. Wang, + + + + + + Zeming Gong, + + + + + + Angel X. Chang
+
+ + + arXiv:2501.01366 [cs.CV], January 2025
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Diorama: Unleashing Zero-shot Single-view 3D Scene Modeling +   + +
+
+

+ + Diorama: Unleashing Zero-shot Single-view 3D Scene Modeling + +

+ + + + Qirui Wu, + + + + + + Denys Iliash, + + + + + + Daniel Ritchie, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + arXiv:2411.19492 [cs.CV], November 2024
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + NL-SLAM for OC-VLN: Natural Language Grounded SLAM for Object-Centric VLN +   + +
+
+

+ + NL-SLAM for OC-VLN: Natural Language Grounded SLAM for Object-Centric VLN + +

+ + + + Sonia Raychaudhuri, + + + + + + Duy Ta, + + + + + + Katrina Ashton, + + + + + + Angel X. Chang, + + + + + + Jiuguang Wang, + + + + + + Bernadette Bucher
+ + + + arXiv:2411.07848 [cs.RO], November 2024
+ + + + + pdf + + + + + + + + + + + | + video + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + SINGAPO: Single Image Controlled Generation of Articulated Parts in Objects +   + +
+
+

+ + SINGAPO: Single Image Controlled Generation of Articulated Parts in Objects + +

+ + + + Jiayi Liu, + + + + + + Denys Iliash, + + + + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Ali Mahdavi-Amiri
+ + + + arXiv:2410.16499 [cs.CV], October 2024
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + S2O: Static to Openable Enhancement for Articulated 3D Objects +   + +
+
+

+ + S2O: Static to Openable Enhancement for Articulated 3D Objects + +

+ + + + Denys Iliash, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Yiming Zhang, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + arXiv:2409.18896 [cs.CV], September 2024
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ + + More... +
+
+ +
+ + + +
+ +

Grounding language to 3D

+ + +
+
+ + ViGiL3D: A Linguistically Diverse Dataset for 3D Visual Grounding +   + +
+
+

+ + ViGiL3D: A Linguistically Diverse Dataset for 3D Visual Grounding + +

+ + + + Austin T. Wang, + + + + + + Zeming Gong, + + + + + + Angel X. Chang
+
+ + + arXiv:2501.01366 [cs.CV], January 2025
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Duoduo CLIP: Efficient 3D Understanding with Multi-View Images +   + +
+
+

+ + Duoduo CLIP: Efficient 3D Understanding with Multi-View Images + +

+ + + + Han-Hung Lee, + + + + + + Yiming Zhang, + + + + + + Angel X. Chang
+
+ + + arXiv:2406.11579 [cs.CV], June 2024
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + TriCoLo: Trimodal Contrastive Loss for Text to Shape Retrieval +   + +
+
+

+ + TriCoLo: Trimodal Contrastive Loss for Text to Shape Retrieval + +

+ + + + Yue Ruan, + + + + + + Han-Hung Lee, + + + + + + Yiming Zhang, + + + + + Ke Zhang, + + + + + Angel X. Chang
+
+ + + WACV 2024, arXiv:2201.07366 [cs.CV], January 2022
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Multi3DRefer: Grounding Text Description to Multiple 3D Objects +   + +
+
+

+ + Multi3DRefer: Grounding Text Description to Multiple 3D Objects + +

+ + + + Yiming Zhang, + + + + + + Zeming Gong, + + + + + + Angel X. Chang
+
+ + + ICCV 2023
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + UniT3D: A Unified Transformer for 3D Dense Captioning and Visual Grounding +   + +
+
+

+ + UniT3D: A Unified Transformer for 3D Dense Captioning and Visual Grounding + +

+ + + + Dave Zhenyu Chen, + + + + + + Ronghang Hu, + + + + + + Xinlei Chen, + + + + + + Matthias Nießner, + + + + + + Angel X. Chang
+
+ + + ICCV 2023
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + D3Net: A Unified Speaker-Listener Architecture for 3D Dense Captioning and Visual Grounding +   + +
+
+

+ + D3Net: A Unified Speaker-Listener Architecture for 3D Dense Captioning and Visual Grounding + +

+ + + + Dave Zhenyu Chen, + + + + + + Qirui Wu, + + + + + + Matthias Nießner, + + + + + + Angel X. Chang
+
+ + + ECCV 2022
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Scan2Cap: Context-aware Dense Captioning in RGB-D Scans +   + +
+
+

+ + Scan2Cap: Context-aware Dense Captioning in RGB-D Scans + +

+ + + + Dave Zhenyu Chen, + + + + + + Ali Gholami, + + + + + + Matthias Nießner, + + + + + + Angel X. Chang
+
+ + + CVPR 2021
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + ScanRefer: 3D Object Localization in RGB-D Scans using Natural Language +   + +
+
+

+ + ScanRefer: 3D Object Localization in RGB-D Scans using Natural Language + +

+ + + + Dave Zhenyu Chen, + + + + + + Angel X. Chang, + + + + + + Matthias Nießner
+ + + + ECCV 2020
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + | + benchmark + + + + + + + + + + | webpage +
+
+ + +
+ +
+ +

Language based content creation

+ + +
+
+ + SceneMotifCoder: Example-driven Visual Program Learning for Generating 3D Object Arrangements +   + +
+
+

+ + SceneMotifCoder: Example-driven Visual Program Learning for Generating 3D Object Arrangements + +

+ + + Hou In Ivan Tam, + + + + Hou In Derek Pun, + + + + + Austin T. Wang, + + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + 3DV 2025, arXiv:2408.02211 [cs.GR], August 2024
+ + + + + pdf + + + + + + + | + code + + + + + + + | + video + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Text-to-3D Shape Generation +   + +
+
+

+ + Text-to-3D Shape Generation + +

+ + + + Han-Hung Lee, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + Eurographics STAR (State of The Art Report), CGF 2024
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Understanding Pure CLIP Guidance for Voxel Grid NeRF Models +   + +
+
+

+ + Understanding Pure CLIP Guidance for Voxel Grid NeRF Models + +

+ + + + Han-Hung Lee, + + + + + + Angel X. Chang
+
+ + + arXiv:2201.07366 [cs.CV], September 2022
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Text2Shape: Generating Shapes from Natural Language by Learning Joint Embeddings +   + +
+
+

+ + Text2Shape: Generating Shapes from Natural Language by Learning Joint Embeddings + +

+ + + + Kevin Chen, + + + + + + Christopher B. Choy, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Thomas Funkhouser, + + + + + + Silvio Savarese
+ + + + Proceedings of ACCV 2018 (oral), arXiv:1803.08495 [cs.CV]
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Text to 3D Scene Generation with Rich Lexical Grounding +   + +
+
+

+ + Text to 3D Scene Generation with Rich Lexical Grounding + +

+ + + + Angel X. Chang, + + + + + + Will Monroe, + + + + + + Manolis Savva, + + + + + + Christopher Potts, + + + + + + Christopher D. Manning
+ + + + Proceedings of ACL 2015
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Learning Spatial Knowledge for Text to 3D Scene Generation +   + +
+
+

+ + Learning Spatial Knowledge for Text to 3D Scene Generation + +

+ + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Christopher D. Manning
+ + + + Proceedings of the 2014 Conference on Empirical Methods in Natural Language Processing (EMNLP 2014)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+ +

BIOSCAN

+ + +
+
+ + BIOSCAN-5M: A Multimodal Dataset for Insect Biodiversity +   + +
+
+

+ + BIOSCAN-5M: A Multimodal Dataset for Insect Biodiversity + +

+ + + Zahra Gharaee, + + + + + Scott C Lowe, + + + + + + Zeming Gong, + + + + + + Pablo Millan Arias, + + + + + Nicholas Pellegrino, + + + + + Austin T. Wang, + + + + + + Joakim Bruslund Haurum, + + + + + Iuliia Zarubiieva, + + + + + Lila Kari, + + + + + Dirk Steinke, + + + + + Graham W Taylor, + + + + + + Paul Fieguth, + + + + + + Angel X. Chang
+
+ + + NeurIPS D&B 2024, arXiv:2406.12723 [cs.LG], June 2024
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + CLIBD: Bridging Vision and Genomics for Biodiversity Monitoring at Scale +   + +
+
+

+ + CLIBD: Bridging Vision and Genomics for Biodiversity Monitoring at Scale + +

+ + + + Zeming Gong, + + + + + + Austin T. Wang, + + + + + + Xiaoliang Huo, + + + + + + Joakim Bruslund Haurum, + + + + + + Scott C Lowe, + + + + + + Graham W Taylor, + + + + + + Angel X. Chang
+
+ + + arXiv:2405.17537 [cs.AI], May 2024
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + A Step Towards Worldwide Biodiversity Assessment: The BIOSCAN-1M Insect Dataset +   + +
+
+

+ + A Step Towards Worldwide Biodiversity Assessment: The BIOSCAN-1M Insect Dataset + +

+ + + Zahra Gharaee, + + + + + Zeming Gong, + + + + + Nicholas Pellegrino, + + + + Iuliia Zarubiieva, + + + + + Joakim Bruslund Haurum, + + + + + + Scott C Lowe, + + + + + Jaclyn TA McKeown, + + + + Chris CY Ho, + + + + Joschka McLeod, + + + + Yi-Yun C Wei, + + + + Jireh Agda, + + + + Sujeevan Ratnasingham, + + + + Dirk Steinke, + + + + + Angel X. Chang, + + + + + + Graham W Taylor, + + + + + + Paul Fieguth
+ + + + NeurIPS Datasets and Benchmarks 2023
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+ +

Embodied AI

+ + +
+
+ + NL-SLAM for OC-VLN: Natural Language Grounded SLAM for Object-Centric VLN +   + +
+
+

+ + NL-SLAM for OC-VLN: Natural Language Grounded SLAM for Object-Centric VLN + +

+ + + + Sonia Raychaudhuri, + + + + + + Duy Ta, + + + + + + Katrina Ashton, + + + + + + Angel X. Chang, + + + + + + Jiuguang Wang, + + + + + + Bernadette Bucher
+ + + + arXiv:2411.07848 [cs.RO], November 2024
+ + + + + pdf + + + + + + + + + + + | + video + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + MOPA: Modular Object Navigation with PointGoal Agents +   + +
+
+

+ + MOPA: Modular Object Navigation with PointGoal Agents + +

+ + + + Sonia Raychaudhuri, + + + + + + Tommaso Campari, + + + + + + Unnat Jain, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + WACV 2024, arXiv:2304.03696 [cs.RO, cs.CV], April 2023
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + HomeRobot: Open Vocabulary Mobile Manipulation +   + +
+
+

+ + HomeRobot: Open Vocabulary Mobile Manipulation + +

+ + + Sriram Yenamandra, + + + + Arun Ramachandran, + + + + + Karmesh Yadav, + + + + + Austin Wang, + + + + + Mukul Khanna, + + + + + + Theo Gervet, + + + + + Tsung-Yen Yang, + + + + Vidhi Jain, + + + + Alexander William Clegg, + + + + John Turner, + + + + + Zsolt Kira, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Devendra Singh Chaplot, + + + + + + Dhruv Batra, + + + + + + Roozbeh Mottaghi, + + + + + + Yonatan Bisk, + + + + + + Chris Paxton
+ + + + CoRL 2023
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + | + challenge + + + + | webpage +
+
+ +
+
+ + Exploiting Proximity-Aware Tasks for Embodied Social Navigation +   + +
+
+

+ + Exploiting Proximity-Aware Tasks for Embodied Social Navigation + +

+ + + + Enrico Cancelli, + + + + + + Tommaso Campari, + + + + + + Luciano Serafini, + + + + + + Angel X. Chang, + + + + + + Lamberto Ballan
+ + + + ICCV 2023
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Language-Aligned Waypoint (LAW) Supervision for Vision-and-Language Navigation in Continuous Environments +   + +
+
+

+ + Language-Aligned Waypoint (LAW) Supervision for Vision-and-Language Navigation in Continuous Environments + +

+ + + + Sonia Raychaudhuri, + + + + + + Shivansh Patel, + + + + + + Saim Wani, + + + + + + Unnat Jain, + + + + + + Angel X. Chang
+
+ + + EMNLP 2021 (short)
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Interpretation of Emergent Communication in Heterogeneous Collaborative Embodied Agents +   + +
+
+

+ + Interpretation of Emergent Communication in Heterogeneous Collaborative Embodied Agents + +

+ + + + Shivansh Patel, + + + + + + Saim Wani, + + + + + + Unnat Jain, + + + + + + Alexander Schwing, + + + + + + Svetlana Lazebnik, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + ICCV 2021
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Rearrangement: A Challenge for Embodied AI +   + +
+
+

+ + Rearrangement: A Challenge for Embodied AI + +

+ + + + Dhruv Batra, + + + + + + Angel X. Chang, + + + + + + Sonia Chernova, + + + + + + Andrew J. Davison, + + + + + + Jia Deng, + + + + + + Vladlen Koltun, + + + + + + Sergey Levine, + + + + + + Jitendra Malik, + + + + + + Igor Mordatch, + + + + + + Roozbeh Mottaghi, + + + + + + Manolis Savva, + + + + + + Hao Su
+ + + + arXiv:2011.01975 [cs.AI], November 2020
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Multi-ON: Benchmarking Semantic Map Memory using Multi-Object Navigation +   + +
+
+

+ + Multi-ON: Benchmarking Semantic Map Memory using Multi-Object Navigation + +

+ + + + Saim Wani, + + + + + + Shivansh Patel, + + + + + + Unnat Jain, + + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + NeurIPS 2020
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + On evaluation of embodied navigation agents +   + +
+
+

+ + On evaluation of embodied navigation agents + +

+ + + + Peter Anderson, + + + + + + Angel X. Chang, + + + + + + Devendra Singh Chaplot, + + + + + + Alexey Dosovitskiy, + + + + + + Saurabh Gupta, + + + + + + Vladlen Koltun, + + + + + + Jana Kosecka, + + + + + + Jitendra Malik, + + + + + + Roozbeh Mottaghi, + + + + + + Manolis Savva, + + + + + + Amir R. Zamir
+ + + + arXiv:1807.06757 [cs.AI], July 2018
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ +

Simulation platforms

+ + +
+
+ + Habitat 2.0: Training Home Assistants to Rearrange their Habitat +   + +
+
+

+ + Habitat 2.0: Training Home Assistants to Rearrange their Habitat + +

+ + + + Andrew Szot, + + + + + Alexander Clegg, + + + + Eric Undersander, + + + + + Erik Wijmans, + + + + + + Yili Zhao, + + + + + John Turner, + + + + Noah Maestre, + + + + + Mustafa Mukadam, + + + + + + Devendra Singh Chaplot, + + + + + Oleksandr Maksymets, + + + + + Aaron Gokaslan, + + + + + + Vladimír Vondrus, + + + + + Sameer Dharur, + + + + + Franziska Meier, + + + + + Wojciech Galuba, + + + + + Angel X. Chang, + + + + + + Zsolt Kira, + + + + + + Vladlen Koltun, + + + + + + Jitendra Malik, + + + + + + Manolis Savva, + + + + + + Dhruv Batra
+ + + + NeurIPS 2021
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + | + post + + + + + + +
+
+ +
+
+ + SAPIEN: a SimulAted Part-based Interactive ENvironment +   + +
+
+

+ + SAPIEN: a SimulAted Part-based Interactive ENvironment + +

+ + + + Fanbo Xiang, + + + + + + Yuzhe Qin, + + + + + + Kaichun Mo, + + + + + + Yikuan Xia, + + + + + + Hao Zhu, + + + + + + Fangchen Liu, + + + + + + Minghua Liu, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Yifu Yuan, + + + + + + He Wang, + + + + + + Li Yi, + + + + + + Angel X. Chang, + + + + + + Leonidas Guibas, + + + + + + Hao Su
+ + + + CVPR 2020
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + MINOS: Multimodal Indoor Simulator for Navigation in Complex Environments +   + +
+
+

+ + MINOS: Multimodal Indoor Simulator for Navigation in Complex Environments + +

+ + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Alexey Dosovitskiy, + + + + + + Thomas Funkhouser, + + + + + + Vladlen Koltun
+ + + + arXiv:1712.03931 [cs.LG]
+ + + + + pdf + + + + + + + | + code + + + + + + + | + video + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+ +

Articulated objects for interactive environments

+ + +
+
+ + SINGAPO: Single Image Controlled Generation of Articulated Parts in Objects +   + +
+
+

+ + SINGAPO: Single Image Controlled Generation of Articulated Parts in Objects + +

+ + + + Jiayi Liu, + + + + + + Denys Iliash, + + + + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Ali Mahdavi-Amiri
+ + + + arXiv:2410.16499 [cs.CV], October 2024
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + S2O: Static to Openable Enhancement for Articulated 3D Objects +   + +
+
+

+ + S2O: Static to Openable Enhancement for Articulated 3D Objects + +

+ + + + Denys Iliash, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Yiming Zhang, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + arXiv:2409.18896 [cs.CV], September 2024
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + OPDMulti: Openable Part Detection for Multiple Objects +   + +
+
+

+ + OPDMulti: Openable Part Detection for Multiple Objects + +

+ + + + Xiaohao Sun, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + 3DV 2024, arXiv:2303.14087 [cs.CV], arXiv March 2023
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + MultiScan: Scalable RGBD scanning for 3D environments with articulated objects +   + +
+
+

+ + MultiScan: Scalable RGBD scanning for 3D environments with articulated objects + +

+ + + + Yongsen Mao, + + + + + + Yiming Zhang, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + NeurIPS 2022
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Articulated 3D Human-Object Interactions from RGB Videos: An Empirical Analysis of Approaches and Challenges +   + +
+
+

+ + Articulated 3D Human-Object Interactions from RGB Videos: An Empirical Analysis of Approaches and Challenges + +

+ + + + Sanjay Haresh, + + + + + + Xiaohao Sun, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + 3DV 2022
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + OPD: Single-view 3D Openable Part Detection +   + +
+
+

+ + OPD: Single-view 3D Openable Part Detection + +

+ + + + Hanxiao (Shawn) Jiang, + + + + + + Yongsen Mao, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + ECCV 2022
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Motion Annotation Programs: A Scalable Approach to Annotating Kinematic Articulations in Large 3D Shape Collections +   + +
+
+

+ + Motion Annotation Programs: A Scalable Approach to Annotating Kinematic Articulations in Large 3D Shape Collections + +

+ + + + Xianghao Xu, + + + + + + David Charatan, + + + + + + Sonia Raychaudhuri, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Mae Heitmann, + + + + + + Vladmir Kim, + + + + + + Siddhartha Chaudhuri, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Daniel Ritchie
+ + + + 3DV 2020
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + | + demo + + + + + + + + | webpage +
+
+ + +
+ +
+ +

Large-scale datasets for 3D deep learning

+ + +
+
+ + Habitat Synthetic Scenes Dataset (HSSD-200): An Analysis of 3D Scene Scale and Realism Tradeoffs for ObjectGoal Navigation +   + +
+
+

+ + Habitat Synthetic Scenes Dataset (HSSD-200): An Analysis of 3D Scene Scale and Realism Tradeoffs for ObjectGoal Navigation + +

+ + + + Mukul Khanna, + + + + + + Yongsen Mao, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Sanjay Haresh, + + + + + + Brennan Shacklett, + + + + + + Dhruv Batra, + + + + + Alexander William Clegg, + + + + Eric Undersander, + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + CVPR 2024, arXiv:2306.11290 [cs.CV], June 2023
+ + + + + pdf + + + + + + + | + code + + + + + | + data + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Habitat-Matterport 3D Semantics Dataset +   + +
+
+

+ + Habitat-Matterport 3D Semantics Dataset + +

+ + + + Karmesh Yadav, + + + + + + Ram Ramrakhya, + + + + + + Santhosh K. Ramakrishnan, + + + + + + Theo Gervet, + + + + + John Turner, + + + + + Aaron Gokaslan, + + + + + Noah Maestre, + + + + + Angel X. Chang, + + + + + + Dhruv Batra, + + + + + + Manolis Savva, + + + + + Alexander William Clegg, + + + + + Devendra Singh Chaplot
+ + + + CVPR 2023
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Habitat-Matterport 3D Dataset (HM3D): 1000 Large-scale 3D Environments for Embodied AI +   + +
+
+

+ + Habitat-Matterport 3D Dataset (HM3D): 1000 Large-scale 3D Environments for Embodied AI + +

+ + + + Santhosh K. Ramakrishnan, + + + + + + Aaron Gokaslan, + + + + + + Erik Wijmans, + + + + + Oleksandr Maksymets, + + + + Alexander Clegg, + + + + John Turner, + + + + Eric Undersander, + + + + Wojciech Galuba, + + + + Andrew Westbury, + + + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Yili Zhao, + + + + + + Dhruv Batra
+ + + + NeurIPS Datasets and Benchmarks Track 2021
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Mirror3D: Depth Refinement for Mirror Surfaces +   + +
+
+

+ + Mirror3D: Depth Refinement for Mirror Surfaces + +

+ + + + Jiaqi Tan, + + + + + + Weijie (Lewis) Lin, + + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + CVPR 2021
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + PartNet: A Large-scale Benchmark for Fine-grained and Hierarchical Part-level 3D Object Understanding +   + +
+
+

+ + PartNet: A Large-scale Benchmark for Fine-grained and Hierarchical Part-level 3D Object Understanding + +

+ + + + Kaichun Mo, + + + + + + Shilin Zhu, + + + + + + Angel X. Chang, + + + + + + Li Yi, + + + + + + Subarna Tripathi, + + + + + + Leonidas Guibas, + + + + + + Hao Su
+ + + + CVPR 2019, arXiv:1812.02713 [cs.CV]
+ + + + + pdf + + + + + + + + + + + | + video + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Scan2CAD: Learning CAD Model Alignment in RGB-D Scans +   + +
+
+

+ + Scan2CAD: Learning CAD Model Alignment in RGB-D Scans + +

+ + + + Armen Avetisyan, + + + + + + Manuel Dahnert, + + + + + + Angela Dai, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Matthias Nießner
+ + + + CVPR 2019 (oral), arXiv:1811.11187 [cs.CV]
+ + + + + pdf + + + + + + + | + code + + + + + + + | + video + + + + + + + + + | + benchmark + + + + + + + + + + +
+
+ +
+
+ + Matterport3D: Learning from RGB-D Data in Indoor Environments +   + +
+
+

+ + Matterport3D: Learning from RGB-D Data in Indoor Environments + +

+ + + + Angel X. Chang, + + + + + + Angela Dai, + + + + + + Thomas Funkhouser, + + + + + + Maciej Halber, + + + + + + Matthias Nießner, + + + + + + Manolis Savva, + + + + + + Shuran Song, + + + + + + Andy Zeng, + + + + + + Yinda Zhang
+ + + + Proceedings of 3DV 2017, arXiv:1709.06158 [cs.CV]
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + ScanNet: Richly-annotated 3D Reconstructions of Indoor Scenes +   + +
+
+

+ + ScanNet: Richly-annotated 3D Reconstructions of Indoor Scenes + +

+ + + + Angela Dai, + + + + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Maciej Halber, + + + + + + Thomas Funkhouser, + + + + + + Matthias Nießner
+ + + + Proceedings of CVPR 2017 (spotlight), arXiv:1702.04405 [cs.CV]
+ + + + + pdf + + + + + + + | + code + + + + + + + | + video + + + + + + + + + | + benchmark + + + + + + + + + + | webpage +
+
+ +
+
+ + ShapeNet: An Information-Rich 3D Model Repository +   + +
+
+

+ + ShapeNet: An Information-Rich 3D Model Repository + +

+ + + + Angel X. Chang, + + + + + + Thomas Funkhouser, + + + + + + Leonidas Guibas, + + + + + + Pat Hanrahan, + + + + + + Qixing Huang, + + + + + + Zimo Li, + + + + + + Silvio Savarese, + + + + + + Manolis Savva, + + + + + + Shuran Song, + + + + + + Hao Su, + + + + + + Jianxiong Xiao, + + + + + + Li Yi, + + + + + + Fisher Yu
+ + + + arXiv:1512.03012 [cs.GR], Dec 2015
+ + + + + pdf + + + + + | + bib + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+ +

3D scene understanding and generation

+ + +
+
+ + Diorama: Unleashing Zero-shot Single-view 3D Scene Modeling +   + +
+
+

+ + Diorama: Unleashing Zero-shot Single-view 3D Scene Modeling + +

+ + + + Qirui Wu, + + + + + + Denys Iliash, + + + + + + Daniel Ritchie, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + arXiv:2411.19492 [cs.CV], November 2024
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Roominoes: Learning to Assemble 3D Rooms into Floor Plans +   + +
+
+

+ + Roominoes: Learning to Assemble 3D Rooms into Floor Plans + +

+ + + + Kai Wang, + + + + + + Xianghao Xu, + + + + + Leon Lei, + + + + Selena Ling, + + + + Natalie Lindsay, + + + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Daniel Ritchie
+ + + + SGP 2021
+ + + + + pdf + + + + + + + + + + + | + video + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Plan2Scene: Converting Floorplans to 3D Scenes +   + +
+
+

+ + Plan2Scene: Converting Floorplans to 3D Scenes + +

+ + + + Madhawa Vidanapathirana, + + + + + + Qirui Wu, + + + + + + Yasutaka Furukawa, + + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + CVPR 2021
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + PlanIT: Planning and Instantiating Indoor Scenes with Relation Graph and Spatial Prior Networks +   + +
+
+

+ + PlanIT: Planning and Instantiating Indoor Scenes with Relation Graph and Spatial Prior Networks + +

+ + + + Kai Wang, + + + + + + Yu-An Lin, + + + + + + Ben Weissmann, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Daniel Ritchie
+ + + + SIGGRAPH 2019
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Hierarchy Denoising Recursive Autoencoders for 3D Scene Layout Prediction +   + +
+
+

+ + Hierarchy Denoising Recursive Autoencoders for 3D Scene Layout Prediction + +

+ + + + Yifei Shi, + + + + + + Angel X. Chang, + + + + + + Zhelun Wu, + + + + + + Manolis Savva, + + + + + + Kai Xu
+ + + + CVPR 2019, arXiv:1903.03757 [cs.CV]
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Deep Convolutional Priors for Indoor Scene Synthesis +   + +
+
+

+ + Deep Convolutional Priors for Indoor Scene Synthesis + +

+ + + + Kai Wang, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Daniel Ritchie
+ + + + SIGGRAPH 2018
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Semantic Scene Completion from a Single Depth Image +   + +
+
+

+ + Semantic Scene Completion from a Single Depth Image + +

+ + + + Shuran Song, + + + + + + Fisher Yu, + + + + + + Andy Zeng, + + + + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Thomas Funkhouser
+ + + + Proceedings of CVPR 2017 (oral), arXiv:1611.08974 [cs.CV]
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+ +
+
+
+ +
+ + diff --git a/news.html b/news.html new file mode 100644 index 0000000..1a196d5 --- /dev/null +++ b/news.html @@ -0,0 +1,270 @@ + + + + + + + + + + Angel Xuan Chang - News | Angel Xuan Chang + + + + + + + + + + + + + +
+
+ + +

Angel Xuan Chang

+ +
+
+

+ I am an Associate Professor at Simon Fraser University. + Prior to this, I was a visiting research scientist at Facebook AI Research and a research scientist at Eloquent Labs working on dialogue. I received my Ph.D. in Computer Science from Stanford, where I was part of the Natural Language Processing Group and advised by Chris Manning. + My research focuses on connecting language to 3D representations of shapes and scenes and grounding of language for embodied agents in indoor environments. I have worked on methods for synthesizing 3D scenes and shapes from natural language, and various datasets for 3D scene understanding. In general, I am interested in the semantics of shapes and scenes, the representation and acquisition of common sense knowledge, and reasoning using probabilistic models. + Some of my other interests include drawing and dance. +

+

+ +

+
+
+ Angel Xuan Chang +
+ angelx-{at}-sfu-[dot]-ca +

+ Associate Professor
+ School of Computing Science
+ Simon Fraser University
+ 3DLG + | GrUVi + | SFU NatLang
+ SFU AI/ML + | VINCI
+ Canada CIFAR AI Chair (Amii)
+ TUM-IAS Hans Fischer Fellow (2018-2022)
+ Google Scholar +
+
+
+ + + +
+
+
+
+

News

+
+
+ +
+
+ +
+
+
+ +
+ + diff --git a/news.json b/news.json new file mode 100644 index 0000000..aa316ce --- /dev/null +++ b/news.json @@ -0,0 +1,306 @@ +[ +{ + "date": "December 2024", + "description": "We are excited that NeurIPS will be Vancouver and will be running a series of exciting talks on the mornings of Dec 11-13. See event page for details." +}, +{ + "date": "November 2024", + "description": "Two papers accepted at 3DV 2025. Congratulations to " +}, +{ + "date": "October 14, 2024", + "description": "Talk (remote) at Workshop on AI Meets Autonomy: Vision, Language, and Autonomous Systems at IROS 2024" +}, +{ + "date": "September 2024", + "description": "BIOSCAN-5M: A Multimodal Dataset for Insect Biodiversity accepted at NeurIPS datasets and benchmarks 2024." +}, +{ + "date": "August 16, 2024", + "description": "Talk (remote) at Workshop on Advances in Language and Vision Research (ALVR) at ACL 2024" +}, +{ + "date" : "July, 2024", + "description": "Congratulations to Qirui and Sonia on the acceptance of their paper R3DS: Reality-linked 3D Scenes for Panoramic Scene Understanding at ECCV 2024." +}, +{ + "date" : "July 11, 2024", + "description": "Talk at CIFAR DLRLSS on language and 3D" +}, +{ + "date": "June 2024", + "description": "Workshops at CVPR 2024. Papers at workhops and main conference" +}, +{ + "date": "June 2024", + "description": "I have received tenure!" +}, +{ + "date": "May 24, 2024", + "description": "I'm presenting a talk on Text to 3D at Upper Bound in Edmonton." +}, +{ + "date": "April 22, 2024", + "description": "Han-Hung presented his state-of-the-art report (STAR) on Text to 3D Shape Generation at Eurographics 2024." +}, +{ + "date": "March, 2024", + "description": "Qirui and Xiaohao presented their papers at 3DV 2024 on Generalizing Single-View 3D Shape Retrieval to Occlusions and Unseen Objects and OPDMulti: Openable Part Detection for Multiple Objects." +}, +{ + "date": "December, 2023", + "description": "Congratulations to Yiming Zhang for defending his MSc thesis!" +}, +{ + "date": "October 2023", + "description": "Two papers accepted at WACV 2024. Congratulations to " +}, +{ + "date": "October 2023", + "description": "Two papers accepted at 3DV 2024. Congratulations to " +}, +{ + "date": "October 2023", + "description": "Workshops at ICCV 2023. " +}, +{ + "date": "September 2023", + "description": "Congratulations to ZeMing (together with Zahra and Nick from U. of Waterloo) on the acceptance of their work BIOSCAN-1M Insect dataset at NeurIPS datasets and benchmarks 2023." +}, +{ + "date": "July 2023", + "description": "Three papers accepted at ICCV 2023. Congratulations to " +}, +{ + "date": "July 24, 2023", + "description": "Lecture on natural language processing at Invent the Future: AI Scholars Program at Simon Fraser University" +}, +{ + "date": "July 13, 2023", + "description": "Talk on Connecting 3D and Language at Seoul National University." +}, +{ + "date": "July 4, 2023", + "description": "Talk on Connecting 3D and Language at University of Padova." +}, +{ + "date": "July 1, 2023", + "description": "Tutorial on Language and 3D at SGP 2023." +}, +{ + "date": "June 2023", + "description": "Workshops and talks at CVPR 2023. " +}, +{ + "date": "March 2023", + "description": "Congratulations to Shawn Jiang for defending his MSc thesis!" +}, +{ + "date": "February 20, 2023", + "description": "Talk on Connecting 3D and Language at 3DL group seminar at University of Chicago" +}, +{ + "date": "January 7, 2023", + "description": "Keynote at WACV 2023 workshop on Photorealistic Image and Environment Synthesis for Computer Vision (PIES-CV)." +}, +{ + "date": "December 15, 2022", + "description": "Invited talk at CoRL 2022 workshop on Learning, Perception, and Abstraction for Long-Horizon Planning." +}, +{ + "date": "December 2022", + "description": "Habitat Rearrangement Competition (Dec 8th) at NeurIPS 2022." +}, +{ + "date": "October 2022", + "description": "Workshops at ECCV 2022. " +}, +{ + "date": "September 2022", + "description": "One paper accepted at NeurIPS 2022. Congratulations to Yongsen, Yiming, and Shawn for their work on MultiScan." +}, +{ + "date": "September 2022", + "description": "One paper accepted at 3DV 2022. Congratulations to Sanjay, Xiaohao, and Shawn for their work on understanding articulated 3D human-object interactions." +}, +{ + "date": "July 2022", + "description": "Two papers accepted at ECCV 2022. Congratulations to " +}, +{ + "date": "July 18, 2022", + "description": "Lecture on natural language processing at Invent the Future: AI Scholars Program at Simon Fraser University" +}, +{ + "date": "June 2022", + "description": "Workshops and Tutorials at CVPR 2022. " +}, +{ + "date": "June 2022", + "description": "3DVQA work by Yasaman and Leon will be presented at CRV 2022." +}, +{ + "date": "April 2022", + "description": "Congratulations to Ali Gholami and Yue Ruan for defending their MSc thesis!" +}, +{ + "date": "March 2022", + "description": "I'm excited to be part of BIOSCAN, a large, world-wide collaboration to identify and categorize the species of the world, and understand how they interact. I'm looking for a postdoc to use the latest advances in machine learning to develop algorithms to contribute to this project." +}, +{ + "date": "February 2022", + "description": "We are hosting the Multi-Object Navigation challenge, as part of the embodied AI workshop at CVPR 2022." +}, +{ + "date": "January 2022", + "description": "I'm happy to be hosting Dave Zhenyu Chen and Tommaso Campari as visiting students this term at SFU." +}, +{ + "date": "October 2021", + "description": "Habitat 2.0 to be presented at NeurIPS 2021 as a spotlight, and HM3D accepted to NeurIPS 2021 datasets and benchmarks." +}, +{ + "date": "September 2021", + "description": "Short paper on using language-aligned waypoints for supervising VLN-CE accepted to EMNLP 2021. Congratulations to Sonia, Shivansh, Saim, and Unnat!" +}, +{ + "date": "August, 2021", + "description": "Congratulations to Leon Kochiev for defending his MSc thesis!" +}, +{ + "date": "July, 2021", + "description": "Paper studying communication mechanisms between embodied AI agents for navigation accepted at ICCV 2021. Great job by Shivansh, Saim and Unnat!" +}, +{ + "date": "July 21, 2021", + "description": "Lecture on natural language processing at Invent the Future: AI Scholars Program at Simon Fraser University" +}, +{ + "date": "July 10-14 2021", + "description": "SGP (Symposium for Geometry Processing) happening virtually!" +}, +{ + "date": "June 2021", + "description": "Roominoes accepted to SGP 2021. Congratulations to Kai!" +}, +{ + "date": "May 2021", + "description": "Challenges and Workshops at CVPR 2021. " +}, +{ + "date": "March 2021", + "description": "Three papers accepted at CVPR 2021. Great job by Madhawa, Qirui, Jiaqi, Lewis, Dave, and Ali!" +}, +{ + "date": "January 2021", + "description": "I'm co-chairing SGP (Symposium for Geometry Processing) with Alec Jacobson." +}, +{ + "date": "November 25, 2020", + "description": "Invited talk at Emerging Technologies: BC's AI Showcase" +}, +{ + "date": "November, 2020", + "description": "Whitepaper on Rearrangement: A Challenge for Embodied AI" +}, +{ + "date": "October, 2020", + "description": "Papers accepted at UIST 2020, 3DV 2020, and NeurIPS 2020" +}, +{ + "date": "July 16, 2020", + "description": "Lecture on natural language processing at Invent the Future: AI Scholars Program at Simon Fraser University" +}, +{ + "date": "July 9, 2020", + "description": "Invited talk at the Workshop on Advances in Language and Vision Research (ALVR) at ACL 2020" +}, +{ + "date": "July 8, 2020", + "description": "ScanNet received the Symposium on Geometry Processing (SGP) dataset award." +}, +{ + "date": "July, 2020", + "description": "Paper on localizing objects based on natural language descriptions in 3D scans (ScanRefer) is accepted at ECCV 2020." +}, +{ + "date": "June, 2020", + "description": "I'm co-organizing three workshops at CVPR 2020 " +}, +{ + "date": "February, 2020", + "description": "Paper on simulation platform with articulated parts (SAPIEN) is accepted at CVPR 2020." +}, +{ + "date": "January, 2020", + "description": "I'm teaching CMPT 825 Natural language Processing this spring" +}, +{ + "date": "August, 2019", + "description": "I started as assistant professor at Simon Fraser University" +}, +{ + "date": "July, 25, 2019", + "description": "Lecture on deep learning for images at the DLRL Summer School at University of Alberta" +}, +{ + "date": "July 18, 2019", + "description": "Lecture on natural language processing at Invent the Future: AI Scholors Program at Simon Fraser University" +}, +{ + "date": "June 17, 2019", + "description": "Invited talk at the 360 Indoor Scene Understanding and Modeling (SUMO) workshop at CVPR" +}, +{ + "date": "May, 2019", + "description": "I'm visiting Facebook AI Research this summer" +}, +{ + "date": "March, 2019", + "description": "PartNet prerelease v0 is now available!" +}, +{ + "date": "March, 2019", + "description": "Two upcoming workshops at CVPR 2019: 3D Scene Generation workshop and Scannet Indoor Scene Understanding workshop" +}, +{ + "date": "March, 2019", + "description": "Three papers accepted at CVPR 2019" +}, +{ + "date": "Dec 3, 2018", + "description": "I'm honored to be named to the first cohort of CIFAR AI Chairs." +}, +{ + "date": "Sept 9, 2018", + "description": "Visual Learning and Embodied Agents in Simulation Environments at ECCV 2018." +}, +{ + "date": "Sept, 2018", + "description": "Visiting Matthias Niessner at TUM." +}, +{ + "date": "July 11, 2018", + "description": "ShapeNet received the Symposium on Geometry Processing (SGP) dataset award." +}, +{ + "date": "June 29, 2018", + "description": "Invited talk at RSS 2018 workshop on New Benchmarks, Metrics, and Competitions for Robotic Learning." +}, +{ + "date": "June 11, 2018", + "description": "ScanNet v2 release and ScanNet Benchmark challenge announced." +}, +{ + "date": "June 3, 2018", + "description": "I joined Eloquent Labs." +}, +{ + "date": "May 2018", + "description": "Co-organizing ECCV 2018 workshop on Visual Learning and Embodied Agents in Simulation Environments." +}, +{ + "date": "March 2018", + "description": "I was awarded the TUM-IAS Hans Fischer Fellowship." +} +] \ No newline at end of file diff --git a/people.json b/people.json new file mode 100644 index 0000000..812d8d3 --- /dev/null +++ b/people.json @@ -0,0 +1,758 @@ +{ + "angel": { + "name": "Angel X. Chang", + "special": true + }, + "manolis": { + "name": "Manolis Savva", + "web": "https://msavva.github.io/" + }, + "pat": { + "name": "Pat Hanrahan", + "web": "http://graphics.stanford.edu/~hanrahan/" + }, + "matt": { + "name": "Matthew Fisher", + "web": "https://techmatt.github.io/" + }, + "matthias": { + "name": "Matthias Nießner", + "web": "https://niessnerlab.org/members/matthias_niessner/profile.html" + }, + "angie": { + "name": "Angela Dai", + "web": "http://cs.stanford.edu/people/adai" + }, + "maciej": { + "name": "Maciej Halber", + "web": "https://mhalber.github.io/" + }, + "maneesh": { + "name": "Maneesh Agrawala", + "web": "http://graphics.stanford.edu/~maneesh/" + }, + "mihail": { + "name": "Mihail Eric", + "web": "https://www.mihaileric.com/" + }, + "gilbert": { + "name": "Gilbert Bernstein", + "web": "http://graphics.stanford.edu/~gilbo/" + }, + "chrisman": { + "name": "Christopher D. Manning", + "web": "http://nlp.stanford.edu/manning/" + }, + "chrispotts": { + "name": "Christopher Potts", + "web": "http://web.stanford.edu/~cgpotts/" + }, + "tom": { + "name": "Thomas Funkhouser", + "web": "http://www.cs.princeton.edu/~funk/" + }, + "leo": { + "name": "Leonidas Guibas", + "web": "http://geometry.stanford.edu/member/guibas/" + }, + "HaoSu": { + "name": "Hao Su", + "web": "http://cseweb.ucsd.edu/~haosu/" + }, + "LiYi": { + "name": "Li Yi", + "web": "https://ericyi.github.io/" + }, + "qixing": { + "name": "Qixing Huang", + "web": "http://www.cs.utexas.edu/~huangqx/" + }, + "zimo": { + "name": "Zimo Li" + }, + "silvio": { + "name": "Silvio Savarese", + "web": "http://cvgl.stanford.edu/silvio/" + }, + "shuran": { + "name": "Shuran Song", + "web": "http://vision.princeton.edu/people/shurans/" + }, + "andy": { + "name": "Andy Zeng", + "web": "http://andyzeng.github.io/" + }, + "yinda": { + "name": "Yinda Zhang", + "web": "http://robots.princeton.edu/people/yindaz/" + }, + "kyle": { + "name": "Kyle Genova", + "web": "http://www.kylegenova.com/" + }, + "lin": { + "name": "Lin Shao", + "web": "https://linsats.github.io/" + }, + "jianxiong": { + "name": "Jianxiong Xiao", + "web": "http://www.jianxiongxiao.com/" + }, + "fisher": { + "name": "Fisher Yu", + "web": "http://yf.io/" + }, + "will": { + "name": "Will Monroe", + "web": "https://wmonroeiv.github.io/" + }, + "sebastian": { + "name": "Sebastian Schuster", + "web": "http://sebschu.com/" + }, + "ranjay": { + "name": "Ranjay Krishna", + "web": "http://www.ranjaykrishna.com/" + }, + "fei-fei": { + "name": "Li Fei-Fei", + "web": "http://vision.stanford.edu/feifeili/" + }, + "heeyoung": { + "name": "Heeyoung Lee" + }, + "marta": { + "name": "Marta Recasens", + "web": "http://clic.ub.edu/users/marta-recasens" + }, + "yves": { + "name": "Yves Peirsman", + "web": "http://nlp.yvespeirsman.be/" + }, + "mihai": { + "name": "Mihai Surdeanu", + "web": "http://www.surdeanu.name/mihai/" + }, + "nate": { + "name": "Nathanael Chambers", + "web": "http://www.usna.edu/Users/cs/nchamber/" + }, + "danj": { + "name": "Dan Jurafsky", + "web": "http://www.stanford.edu/~jurafsky/" + }, + "davidm": { + "name": "David McClosky", + "web": "http://nlp.stanford.edu/~mcclosky/" + }, + "eneko": { + "name": "Eneko Agirre", + "web": "http://ixa2.si.ehu.es/~jipagbee/" + }, + "val": { + "name": "Valentin I. Spitkovsky", + "web": "http://nlp.stanford.edu/valentin/" + }, + "johnbauer": { + "name": "John Bauer" + }, + "julie": { + "name": "Julie Tibshirani" + }, + "jean": { + "name": "Jean Y. Wu" + }, + "osbert": { + "name": "Osbert Bastani", + "web": "https://obastani.github.io/" + }, + "gabor": { + "name": "Gabor Angeli", + "web": "http://cs.stanford.edu/~angeli/" + }, + "arun": { + "name": "Arun Tejasvi Chaganty", + "web": "http://arun.chagantys.org/" + }, + "KevinReschke": { + "name": "Kevin Reschke", + "web": "https://sites.google.com/site/kevinreschke/" + }, + "KeithSiilats": { + "name": "Keith Siilats" + }, + "ericyeh": { + "name": "Eric Yeh", + "web": "http://www.ai.sri.com/people/yeh/" + }, + "sonal": { + "name": "Sonal Gupta", + "web": "http://www.cs.stanford.edu/people/sonal/" + }, + "hiyan": { + "name": "Hiyan Alshawi" + }, + "tianrong": { + "name": "Tianrong Zhang" + }, + "VidyaSetlur": { + "name": "Vidya Setlur", + "web": "https://research.tableau.com/user/vidya-setlur" + }, + "EnamulHoque": { + "name": "Enamul Hoque", + "web": "https://www.yorku.ca/enamulh/" + }, + "DaeHyunKim": { + "name": "Dae Hyun Kim", + "web": "https://dhkim16.github.io/" + }, + "SarahBattersby": { + "name": "Sarah E. Battersby", + "web": "https://research.tableau.com/user/sarah-battersby" + }, + "MelanieTory": { + "name": "Melanie Tory", + "web": "https://research.tableau.com/user/melanie-tory" + }, + "RichGossweiler": { + "name": "Rich Gossweiler", + "web": "https://research.tableau.com/user/rich-gossweiler" + }, + "christiane": { + "name": "Christiane Fellbaum", + "web": "https://www.cs.princeton.edu/~fellbaum/" + }, + "alexey": { + "name": "Alexey Dosovitskiy", + "web": "https://scholar.google.de/citations?user=FXNJRDoAAAAJ&hl=en" + }, + "vladlen": { + "name": "Vladlen Koltun", + "web": "http://vladlen.info/" + }, + "grace": { + "name": "Grace Muzny", + "web": "http://nlp.stanford.edu/~muzny/" + }, + "ChrisChoy": { + "name": "Christopher B. Choy", + "web": "https://chrischoy.github.io/" + }, + "DanielRitchie": { + "name": "Daniel Ritchie", + "web": "https://dritchie.github.io/" + }, + "ArmenAvetisyan": { + "name": "Armen Avetisyan", + "web": "https://niessnerlab.org/members/armen_avetisyan/profile.html" + }, + "ManuelDahnert": { + "name": "Manuel Dahnert", + "web": "https://niessnerlab.org/members/manuel_dahnert/profile.html" + }, + "KaichunMo": { + "name": "Kaichun Mo", + "web": "https://cs.stanford.edu/~kaichun" + }, + "HeWang": { + "name": "He Wang", + "web": "https://hughw19.github.io/" + }, + "ShilinZhu": { + "name": "Shilin Zhu", + "web": "http://cseweb.ucsd.edu/~shz338/" + }, + "SubarnaTripathi": { + "name": "Subarna Tripathi", + "web": "http://acsweb.ucsd.edu/~stripath/" + }, + "KaiWang": { + "name": "Kai Wang", + "web": "https://kwang-ether.github.io/" + }, + "KevinChen": { + "name": "Kevin Chen", + "web": "https://cs.stanford.edu/~kchen92" + }, + "MichaelFang": { + "name": "Michael Fang" + }, + "RishiMago": { + "name": "Rishi Mago" + }, + "PranavKrishna": { + "name": "Pranav Krishna" + }, + "YifeiShi": { + "name": "Yifei Shi" + }, + "ZhelunWu": { + "name": "Zhelun Wu" + }, + "KaiXu": { + "name": "Kai Xu", + "web": "https://kevinkaixu.net/" + }, + "YuAnLin": { + "name": "Yu-An Lin" + }, + "BenWeissmann": { + "name": "Ben Weissmann" + }, + "PeterAnderson": { + "name": "Peter Anderson", + "web": "https://www.panderson.me/" + }, + "DevendraChaplot": { + "name": "Devendra Singh Chaplot", + "web": "http://www.cs.cmu.edu/~dchaplot/" + }, + "SaurabhGupta": { + "name": "Saurabh Gupta", + "web": "http://saurabhg.web.illinois.edu/" + }, + "JanaKosecka": { + "name": "Jana Kosecka", + "web": "https://cs.gmu.edu/~kosecka/" + }, + "JitendraMalik": { + "name": "Jitendra Malik", + "web": "https://people.eecs.berkeley.edu/~malik/" + }, + "RoozbehMottaghi": { + "name": "Roozbeh Mottaghi", + "web": "https://cs.stanford.edu/~roozbeh/" + }, + "AmirZamir": { + "name": "Amir R. Zamir", + "web": "https://cs.stanford.edu/~amirz/" + }, + "FanboXiang": { + "name": "Fanbo Xiang", + "web": "https://www.fbxiang.com/" + }, + "YuzheQin": { + "name": "Yuzhe Qin", + "web": "https://yzqin.github.io/" + }, + "YikuanXia": { + "name": "Yikuan Xia" + }, + "HaoZhu": { + "name": "Hao Zhu", + "web": "https://berniezhu.github.io/" + }, + "FangchenLiu": { + "name": "Fangchen Liu", + "web": "https://fangchenliu.github.io/" + }, + "MinghuaLiu": { + "name": "Minghua Liu", + "web": "http://cseweb.ucsd.edu/~mil070/" + }, + "YifuYuan": { + "name": "Yifu Yuan" + }, + "JustinDieter": { + "name": "Justin Dieter" + }, + "TianWang": { + "name": "Tian Wang" + }, + "madhawa": { + "name": "Madhawa Vidanapathirana", + "web": "https://madhawav.github.io/" + }, + "yasu": { + "name": "Yasutaka Furukawa", + "web": "https://www.cs.sfu.ca/~furukawa/" + }, + "YasamanEtesam": { + "name": "Yasaman Etesam", + "tags": ["alumni"], + "alumni": [ + "Fall 2019" + ] + }, + "AliGholami": { + "name": "Ali Gholami", + "tags": ["alumni"], + "alumni": [ + { + "description": "M.Sc. Fall 2019 to Spring 2022, next Zippin" , + "thesis": { + "title": "Dense Captioning for 3D Environments using Natural Language", + "url": "https://theses.lib.sfu.ca/file/thesis/6920" + } + } + ] + }, + "SoniaRaychaudhuri": { + "name": "Sonia Raychaudhuri", + "web": "https://sonia-raychaudhuri.github.io/", + "tags": ["student", "PhD"], + "details": "Fall 2020" + }, + "LeonKochiev": { + "name": "Leon Kochiev", + "tags": ["alumni"], + "alumni": [ + { + "description": "M.Sc. Fall 2019 to Summer 2021, next Integrant" , + "thesis": { + "title": "Neural State Machine for 2D and 3D Visual Question Answering", + "url": "https://theses.lib.sfu.ca/file/thesis/6675" + } + } + ] + }, + "YueRuan": { + "name": "Yue Ruan", + "tags": ["alumni"], + "alumni": [ + { + "description": "M.Sc. Fall 2019 to Spring 2022, next Amazon" , + "thesis": { + "title": "TriCoLo: Trimodal Contrastive Loss for Text to Shape Retrieval", + "url": "https://theses.lib.sfu.ca/file/thesis/6930" + } + } + ] + }, + "AkshitSharma": { + "name": "Akshit Sharma", + "tags": ["alumni"], + "alumni": [ + { + "description": "M.Sc. Fall 2019 to Fall 2022", + "thesis": { + "title": "DenseRefer3D: A Language and 3D Dataset for Coreference Resolution and Referring Expression Comprehension", + "url": "https://theses.lib.sfu.ca/file/thesis/7202" + } + } + ] + }, + "HanxiaoJiang": { + "name": "Hanxiao (Shawn) Jiang", + "web": "https://jianghanxiao.github.io/", + "tags": ["alumni"], + "alumni": [{ + "description": "undergrad, Fall 2019 - 2020, M.Sc. Fall 2020 to Summer 2023, next UIUC PhD", + "thesis": { + "title": "OPD: Single-view 3D Openable Part Detection", + "url": "https://summit.sfu.ca/item/36177" + } + }] + }, + "YongsenMao": { + "name": "Yongsen Mao", + "web": "https://sammaoys.github.io/" + }, + "SanjayHaresh": { + "name": "Sanjay Haresh", + "web": "https://www.sanjayharesh.com/" + }, + "HanHungLee": { + "name": "Han-Hung Lee", + "web": "https://hanhung.github.io/", + "tags": ["student", "PhD"], + "details": "Fall 2021" + }, + "XiaohaoSun": { + "name": "Xiaohao Sun", + "web": "https://sun-xh.github.io/", + "tags": ["student", "PhD"], + "details": "Fall 2021" + }, + "XingguangYan": { + "name": "Xingguang Yan", + "web": "http://yanxg.art/", + "tags": ["student", "PhD"], + "details": "Fall 2022" + }, + "QiruiWu": { + "name": "Qirui Wu", + "web": "https://qiruiw.github.io/", + "tags": ["alumni", "student", "PhD"], + "details": "Spring 2022", + "alumni": [ + "undergrad, Fall 2019 - 2021, next SFU PhD" + ] + }, + "AustinWang": { + "name": "Austin T. Wang", + "web": "https://atwang16.github.io/", + "tags": ["student", "PhD"], + "details": "Fall 2023" + }, + "XiaoliangHuo": { + "name": "Xiaoliang Huo", + "tags": ["student", "PhD"], + "details": "Fall 2023, co-advised with Manolis Savva" + }, + "WeijieLin": { + "name": "Weijie (Lewis) Lin", + "web": "https://lewislinn.github.io/", + "tags": ["alumni"], + "alumni": [ + "undergrad, Spring 2020 - 2021, next CMU MS" + ] + }, + "YimingZhang": { + "name": "Yiming Zhang", + "tags": ["alumni", "student", "PhD"], + "details": "Spring 2024", + "alumni": [ { + "description": "undergrad, 2021, M.Sc. 2022 - 2023, next SFU PhD", + "thesis": { + "title": "Multi3DRefer: Grounding text description to multiple 3D objects", + "url": "https://summit.sfu.ca/item/37957" + } + } + ] + }, + "ZemingGong": { + "name": "Zeming Gong", + "web": "https://zmgong.github.io/", + "tags": ["alumni", "student", "Master"], + "details": "Summer 2023", + "alumni": [ + "undergrad, 2022, next SFU MSc" + ] + }, + "DenysIliash": { + "name": "Denys Iliash", + "tags": ["student", "Master"], + "details": "Spring 2024", + "alumni": [ + "undergrad, Fall 2023, next SFU MSc" + ] + }, + "MrinalGoshalia": { + "name": "Mrinal Goshalia", + "tags": ["student", "Undergrad"], + "details": "Fall 2023" + }, + "DaveZhenyuChen": { + "name": "Dave Zhenyu Chen", + "web": "http://www.niessnerlab.org/members/zhenyu_chen/profile.html", + "tags": ["alumni"], + "alumni": [ + "visiting student, Spring 2022, PhD at Technical University of Munich; TUM-IAS funded student co-advised with Matthias Niessner" + ] + }, + "TommasoCampari": { + "name": "Tommaso Campari", + "web": "https://www.tommasocampari.com/", + "tags": ["alumni"], + "alumni": [ + "visiting student, Spring 2022, PhD at University of Padova" + ] + }, + "EnricoCancelli": { + "name": "Enrico Cancelli", + "web": "http://hit.psy.unipd.it/enrico-cancelli", + "tags": ["alumni"], + "alumni": [ + "visiting student, Spring 2024, PhD at University of Padova" + ] + }, + "AditiJain": { + "name": "Aditi Jain", + "tags": ["alumni"], + "alumni": [ + "visiting student, Summer 2022, Undergraduate at IIT Delhi" + ] + }, + "NingWang": { + "name": "Ning Wang", + "web": "https://wangning-001.github.io/", + "tags": ["alumni"], + "alumni": [ + "visiting student, Spring 2023-2024, PhD at Wuhan University" + ] + }, + "JiayiLiu": { + "name": "Jiayi Liu", + "web": "https://3dlg-hcvc.github.io/singapo/" + }, + "JiaqiTan": { + "name": "Jiaqi Tan", + "web": "https://christinatan0704.github.io/mysite/" + }, + "SaimWani": { + "name": "Saim Wani", + "web": "https://saimwani.github.io/" + }, + "ShivanshPatel": { + "name": "Shivansh Patel", + "web": "https://shivanshpatel35.github.io/" + }, + "UnnatJain": { + "name": "Unnat Jain", + "web": "https://unnat.github.io/" + }, + "VladmirKim": { + "name": "Vladmir Kim", + "web": "http://www.vovakim.com/" + }, + "SiddharthaChaudhuri": { + "name": "Siddhartha Chaudhuri", + "web": "https://www.cse.iitb.ac.in/~sidch/" + }, + "XianghaoXu": { + "name": "Xianghao Xu", + "web": "https://xxh43.github.io/" + }, + "DavidCharatan": { + "name": "David Charatan", + "web": "https://davidcharatan.com/" + }, + "MaeHeitmann": { + "name": "Mae Heitmann" + }, + "AliMahdaviAmiri": { + "name": "Ali Mahdavi-Amiri", + "web": "hhttps://arash-mham.github.io/" + }, + "DhruvBatra": { + "name": "Dhruv Batra", + "web": "https://www.cc.gatech.edu/~dbatra/" + }, + "SoniaChernova": { + "name": "Sonia Chernova", + "web": "https://www.cc.gatech.edu/~chernova/" + }, + "AndrewDavison": { + "name": "Andrew J. Davison", + "web": "https://www.doc.ic.ac.uk/~ajd/" + }, + "JiaDeng": { + "name": "Jia Deng", + "web": "https://www.cs.princeton.edu/~jiadeng/" + }, + "SergeyLevine": { + "name": "Sergey Levine", + "web": "https://people.eecs.berkeley.edu/~svlevine/" + }, + "IgorMordatch": { + "name": "Igor Mordatch" + }, + "AndrewSzot": { + "name": "Andrew Szot", + "web": "https://www.andrewszot.com/" + }, + "ErikWijmans": { + "name": "Erik Wijmans", + "web": "https://wijmans.xyz/" + }, + "YiliZhao": { + "name": "Yili Zhao", + "web": "http://www.yilizhao.net/" + }, + "AaronGokaslan": { + "name": "Aaron Gokaslan", + "web": "https://skylion007.github.io/" + }, + "MustafaMukadam": { + "name": "Mustafa Mukadam", + "web": "http://www.mustafamukadam.com/" + }, + "FranziskaMeier": { + "name": "Franziska Meier", + "web": "https://fmeier.github.io/" + }, + "ZsoltKira": { + "name": "Zsolt Kira", + "web": "https://www.cc.gatech.edu/~zk15/" + }, + "VladimírVondrus": { + "name": "Vladimír Vondrus", + "web": "https://github.com/mosra" + }, + "AlexanderSchwing": { + "name": "Alexander Schwing", + "web": "https://alexander-schwing.de/" + }, + "SvetlanaLazebnik": { + "name": "Svetlana Lazebnik", + "web": "https://slazebni.cs.illinois.edu/" + }, + "SanthoshRamakrishnan": { + "name": "Santhosh K. Ramakrishnan", + "web": "https://srama2512.github.io/" + }, + "RonghangHu": { + "name": "Ronghang Hu", + "web": "https://ronghanghu.com/" + }, + "XinleiChen": { + "name": "Xinlei Chen", + "web": "https://xinleic.xyz/" + }, + "KarmeshYadav": { + "name": "Karmesh Yadav", + "web": "https://www.karmeshyadav.com/" + }, + "RamRamrakhya": { + "name": "Ram Ramrakhya", + "web": "https://ram81.github.io/" + }, + "TheoGervet": { + "name": "Theo Gervet", + "web": "https://theophilegervet.github.io/" + }, + "LucianoSerafini": { + "name": "Luciano Serafini", + "web": "https://dkm.fbk.eu/author/lucianoserafini/" + }, + "LambertoBallan": { + "name": "Lamberto Ballan", + "web": "http://www.lambertoballan.net/" + }, + "YonatanBisk": { + "name": "Yonatan Bisk", + "web": "https://yonatanbisk.com/" + }, + "MukulKhanna": { + "name": "Mukul Khanna", + "web": "https://mukulkhanna.github.io/" + }, + "ChrisPaxton": { + "name": "Chris Paxton", + "web": "https://cpaxton.github.io/" + }, + "BrennanShacklett": { + "name": "Brennan Shacklett", + "web": "https://cs.stanford.edu/~bps/" + }, + "JoakimHaurum": { + "name": "Joakim Bruslund Haurum", + "web": "https://joakimhaurum.github.io/" + }, + "ScottLowe": { + "name": "Scott C Lowe", + "web": "https://scottclowe.com/" + }, + "GrahamTaylor": { + "name": "Graham W Taylor", + "web": "https://www.gwtaylor.ca/" + }, + "LilaKari": { + "name": "Lila Kari", + "web": "https://cs.uwaterloo.ca/~lila/" + }, + "PaulFieguth": { + "name": "Paul Fieguth", + "web": "https://uwaterloo.ca/systems-design-engineering/profile/pfieguth" + }, + "PabloMillanArias": { + "name": "Pablo Millan Arias", + "web": "https://millanp95.github.io/" + }, + "ZiyuWan": { + "name": "Ziyu Wan", + "web": "http://raywzy.com/" + } +} diff --git a/posts/BIOSCAN-jobposting_SimonFraser.pdf b/posts/BIOSCAN-jobposting_SimonFraser.pdf new file mode 100644 index 0000000..da1f1b1 Binary files /dev/null and b/posts/BIOSCAN-jobposting_SimonFraser.pdf differ diff --git a/pubs.html b/pubs.html new file mode 100644 index 0000000..747d697 --- /dev/null +++ b/pubs.html @@ -0,0 +1,6000 @@ + + + + + + + + + + Angel Xuan Chang - Publications | Angel Xuan Chang + + + + + + + + + + + + + +
+
+ + +

Angel Xuan Chang

+ +
+
+

+ I am an Assistant Professor at Simon Fraser University. + Prior to this, I was a visiting research scientist at Facebook AI Research and a research scientist at Eloquent Labs working on dialogue. I received my Ph.D. in Computer Science from Stanford, where I was part of the Natural Language Processing Group and advised by Chris Manning. + My research focuses on connecting language to 3D representations of shapes and scenes and grounding of language for embodied agents in indoor environments. I have worked on methods for synthesizing 3D scenes and shapes from natural language, and various datasets for 3D scene understanding. In general, I am interested in the semantics of shapes and scenes, the representation and acquisition of common sense knowledge, and reasoning using probabilistic models. + Some of my other interests include drawing and dance. +

+

+ +

+
+
+ Angel Xuan Chang +
+ angelx-{at}-sfu-[dot]-ca +

+ Assistant Professor
+ School of Computing Science
+ Simon Fraser University
+ GrUVi Lab | SFU NatLang | SFU AI/ML
+ Canada CIFAR AI Chair (Amii)
+ TUM-IAS Hans Fischer Fellow
+ Google Scholar +
+
+
+ + + +
+
+ + +
+
+

Publications

+ + + + + + + + + + +
+
+ + + + +
+

2022

+ +
+
+ + Understanding Pure CLIP Guidance for Voxel Grid NeRF Models +   + +
+
+

+ + Understanding Pure CLIP Guidance for Voxel Grid NeRF Models + +

+ + + + Han-Hung Lee, + + + + + + Angel X. Chang
+
+ + + arXiv:2201.07366 [cs.CV], September 2022
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + MultiScan: Scalable RGBD scanning for 3D environments with articulated objects +   + +
+
+

+ + MultiScan: Scalable RGBD scanning for 3D environments with articulated objects + +

+ + + + Yongsen Mao, + + + + + + Yiming Zhang, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + NeurIPS 2022
+ + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Articulated 3D Human-Object Interactions from RGB Videos: An Empirical Analysis of Approaches and Challenges +   + +
+
+

+ + Articulated 3D Human-Object Interactions from RGB Videos: An Empirical Analysis of Approaches and Challenges + +

+ + + + Sanjay Haresh, + + + + + + Xiaohao Sun, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + 3DV 2022
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + OPD: Single-view 3D Openable Part Detection +   + +
+
+

+ + OPD: Single-view 3D Openable Part Detection + +

+ + + + Hanxiao (Shawn) Jiang, + + + + + + Yongsen Mao, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + ECCV 2022
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + D3Net: A Speaker-Listener Architecture for Semi-supervised Dense Captioning and Visual Grounding in RGB-D Scans +   + +
+
+

+ + D3Net: A Speaker-Listener Architecture for Semi-supervised Dense Captioning and Visual Grounding in RGB-D Scans + +

+ + + + Dave Zhenyu Chen, + + + + + + Qirui Wu, + + + + + + Matthias Nießner, + + + + + + Angel X. Chang
+
+ + + ECCV 2022
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + 3DVQA: Visual Question Answering for 3D Environments +   + +
+
+

+ + 3DVQA: Visual Question Answering for 3D Environments + +

+ + + + Yasaman Etesam, + + + + + + Leon Kochiev, + + + + + + Angel X. Chang
+
+ + + CRV 2022
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + TriCoLo: Trimodal Contrastive Loss for fine-grained Text to Shape Retrieval +   + +
+
+

+ + TriCoLo: Trimodal Contrastive Loss for fine-grained Text to Shape Retrieval + +

+ + + + Yue Ruan, + + + + + + Han-Hung Lee, + + + + + Ke Zhang, + + + + + Angel X. Chang
+
+ + + arXiv:2201.07366 [cs.CV], January 2022
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+

2021

+ +
+
+ + Habitat-Matterport 3D Dataset (HM3D): 1000 Large-scale 3D Environments for Embodied AI +   + +
+
+

+ + Habitat-Matterport 3D Dataset (HM3D): 1000 Large-scale 3D Environments for Embodied AI + +

+ + + + Santhosh K. Ramakrishnan, + + + + + + Aaron Gokaslan, + + + + + + Erik Wijmans, + + + + + Oleksandr Maksymets, + + + + Alexander Clegg, + + + + John Turner, + + + + Eric Undersander, + + + + Wojciech Galuba, + + + + Andrew Westbury, + + + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Yili Zhao, + + + + + + Dhruv Batra
+ + + + NeurIPS Datasets and Benchmarks Track 2021
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Habitat 2.0: Training Home Assistants to Rearrange their Habitat +   + +
+
+

+ + Habitat 2.0: Training Home Assistants to Rearrange their Habitat + +

+ + + + Andrew Szot, + + + + + Alexander Clegg, + + + + Eric Undersander, + + + + + Erik Wijmans, + + + + + + Yili Zhao, + + + + + John Turner, + + + + Noah Maestre, + + + + + Mustafa Mukadam, + + + + + + Devendra Chaplot, + + + + + Oleksandr Maksymets, + + + + + Aaron Gokaslan, + + + + + + Vladimír Vondrus, + + + + + Sameer Dharur, + + + + + Franziska Meier, + + + + + Wojciech Galuba, + + + + + Angel X. Chang, + + + + + + Zsolt Kira, + + + + + + Vladlen Koltun, + + + + + + Jitendra Malik, + + + + + + Manolis Savva, + + + + + + Dhruv Batra
+ + + + NeurIPS 2021
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + | + post + + + + +
+
+ +
+
+ + Language-Aligned Waypoint (LAW) Supervision for Vision-and-Language Navigation in Continuous Environments +   + +
+
+

+ + Language-Aligned Waypoint (LAW) Supervision for Vision-and-Language Navigation in Continuous Environments + +

+ + + + Sonia Raychaudhuri, + + + + + + Shivansh Patel, + + + + + + Saim Wani, + + + + + + Unnat Jain, + + + + + + Angel X. Chang
+
+ + + EMNLP 2021 (short)
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Interpretation of Emergent Communication in Heterogeneous Collaborative Embodied Agents +   + +
+
+

+ + Interpretation of Emergent Communication in Heterogeneous Collaborative Embodied Agents + +

+ + + + Shivansh Patel, + + + + + + Saim Wani, + + + + + + Unnat Jain, + + + + + + Alexander Schwing, + + + + + + Svetlana Lazebnik, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + ICCV 2021
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Roominoes: Learning to Assemble 3D Rooms into Floor Plans +   + +
+
+

+ + Roominoes: Learning to Assemble 3D Rooms into Floor Plans + +

+ + + + Kai Wang, + + + + + + Xianghao Xu, + + + + + Leon Lei, + + + + Selena Ling, + + + + Natalie Lindsay, + + + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Daniel Ritchie
+ + + + SGP 2021
+ + + + + pdf + + + + + + + + + + + | + video + + + + + + + + + + + + + + +
+
+ +
+
+ + Scan2Cap: Context-aware Dense Captioning in RGB-D Scans +   + +
+
+

+ + Scan2Cap: Context-aware Dense Captioning in RGB-D Scans + +

+ + + + Dave Zhenyu Chen, + + + + + + Ali Gholami, + + + + + + Matthias Nießner, + + + + + + Angel X. Chang
+
+ + + CVPR 2021
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Plan2Scene: Converting Floorplans to 3D Scenes +   + +
+
+

+ + Plan2Scene: Converting Floorplans to 3D Scenes + +

+ + + + Madhawa Vidanapathirana, + + + + + + Qirui Wu, + + + + + + Yasutaka Furukawa, + + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + CVPR 2021
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Mirror3D: Depth Refinement for Mirror Surfaces +   + +
+
+

+ + Mirror3D: Depth Refinement for Mirror Surfaces + +

+ + + + Jiaqi Tan, + + + + + + Weijie (Lewis) Lin, + + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + CVPR 2021
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+

2020

+ +
+
+ + Rearrangement: A Challenge for Embodied AI +   + +
+
+

+ + Rearrangement: A Challenge for Embodied AI + +

+ + + + Dhruv Batra, + + + + + + Angel X. Chang, + + + + + + Sonia Chernova, + + + + + + Andrew J. Davison, + + + + + + Jia Deng, + + + + + + Vladlen Koltun, + + + + + + Sergey Levine, + + + + + + Jitendra Malik, + + + + + + Igor Mordatch, + + + + + + Roozbeh Mottaghi, + + + + + + Manolis Savva, + + + + + + Hao Su
+ + + + arXiv:2011.01975 [cs.AI], November 2020
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Motion Annotation Programs: A Scalable Approach to Annotating Kinematic Articulations in Large 3D Shape Collections +   + +
+
+

+ + Motion Annotation Programs: A Scalable Approach to Annotating Kinematic Articulations in Large 3D Shape Collections + +

+ + + + Xianghao Xu, + + + + + + David Charatan, + + + + + + Sonia Raychaudhuri, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Mae Heitmann, + + + + + + Vladmir Kim, + + + + + + Siddhartha Chaudhuri, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Daniel Ritchie
+ + + + 3DV 2020
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + | + demo + + + + + + | webpage +
+
+ +
+
+ + Multi-ON: Benchmarking Semantic Map Memory using Multi-Object Navigation +   + +
+
+

+ + Multi-ON: Benchmarking Semantic Map Memory using Multi-Object Navigation + +

+ + + + Saim Wani, + + + + + + Shivansh Patel, + + + + + + Unnat Jain, + + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + NeurIPS 2020
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Sneak Pique: Exploring Autocompletion as a Data Discovery Scaffold for Supporting Visual Analysis +   + +
+
+

+ + Sneak Pique: Exploring Autocompletion as a Data Discovery Scaffold for Supporting Visual Analysis + +

+ + + + Vidya Setlur, + + + + + + Enamul Hoque, + + + + + + Dae Hyun Kim, + + + + + + Angel X. Chang
+
+ + + UIST 2020
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + ScanRefer: 3D Object Localization in RGB-D Scans using Natural Language +   + +
+
+

+ + ScanRefer: 3D Object Localization in RGB-D Scans using Natural Language + +

+ + + + Dave Zhenyu Chen, + + + + + + Angel X. Chang, + + + + + + Matthias Nießner
+ + + + ECCV 2020
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + | + benchmark + + + + + + + + | webpage +
+
+ +
+
+ + SAPIEN: a SimulAted Part-based Interactive ENvironment +   + +
+
+

+ + SAPIEN: a SimulAted Part-based Interactive ENvironment + +

+ + + + Fanbo Xiang, + + + + + + Yuzhe Qin, + + + + + + Kaichun Mo, + + + + + + Yikuan Xia, + + + + + + Hao Zhu, + + + + + + Fangchen Liu, + + + + + + Minghua Liu, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Yifu Yuan, + + + + + + He Wang, + + + + + + Li Yi, + + + + + + Angel X. Chang, + + + + + + Leonidas Guibas, + + + + + + Hao Su
+ + + + CVPR 2020
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+

2019

+ +
+
+ + Mimic and Rephrase: Reflective Listening in Open-Ended Dialogue +   + +
+
+

+ + Mimic and Rephrase: Reflective Listening in Open-Ended Dialogue + +

+ + + + Justin Dieter, + + + + + + Tian Wang, + + + + + + Gabor Angeli, + + + + + + Angel X. Chang, + + + + + + Arun Tejasvi Chaganty
+ + + + CONLL 2019
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + PlanIT: Planning and Instantiating Indoor Scenes with Relation Graph and Spatial Prior Networks +   + +
+
+

+ + PlanIT: Planning and Instantiating Indoor Scenes with Relation Graph and Spatial Prior Networks + +

+ + + + Kai Wang, + + + + + + Yu-An Lin, + + + + + + Ben Weissmann, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Daniel Ritchie
+ + + + SIGGRAPH 2019
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Hierarchy Denoising Recursive Autoencoders for 3D Scene Layout Prediction +   + +
+
+

+ + Hierarchy Denoising Recursive Autoencoders for 3D Scene Layout Prediction + +

+ + + + Yifei Shi, + + + + + + Angel X. Chang, + + + + + + Zhelun Wu, + + + + + + Manolis Savva, + + + + + + Kai Xu
+ + + + CVPR 2019, arXiv:1903.03757 [cs.CV]
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + PartNet: A Large-scale Benchmark for Fine-grained and Hierarchical Part-level 3D Object Understanding +   + +
+
+

+ + PartNet: A Large-scale Benchmark for Fine-grained and Hierarchical Part-level 3D Object Understanding + +

+ + + + Kaichun Mo, + + + + + + Shilin Zhu, + + + + + + Angel X. Chang, + + + + + + Li Yi, + + + + + + Subarna Tripathi, + + + + + + Leonidas Guibas, + + + + + + Hao Su
+ + + + CVPR 2019, arXiv:1812.02713 [cs.CV]
+ + + + + pdf + + + + + + + + + + + | + video + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Scan2CAD: Learning CAD Model Alignment in RGB-D Scans +   + +
+
+

+ + Scan2CAD: Learning CAD Model Alignment in RGB-D Scans + +

+ + + + Armen Avetisyan, + + + + + + Manuel Dahnert, + + + + + + Angela Dai, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Matthias Nießner
+ + + + CVPR 2019 (oral), arXiv:1811.11187 [cs.CV]
+ + + + + pdf + + + + + + + | + code + + + + + + + | + video + + + + + + + + + | + benchmark + + + + + + + + +
+
+ + +
+ +
+

2018

+ +
+
+ + Deep Convolutional Priors for Indoor Scene Synthesis +   + +
+
+

+ + Deep Convolutional Priors for Indoor Scene Synthesis + +

+ + + + Kai Wang, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Daniel Ritchie
+ + + + SIGGRAPH 2018
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + On evaluation of embodied navigation agents +   + +
+
+

+ + On evaluation of embodied navigation agents + +

+ + + + Peter Anderson, + + + + + + Angel X. Chang, + + + + + + Devendra Chaplot, + + + + + + Alexey Dosovitskiy, + + + + + + Saurabh Gupta, + + + + + + Vladlen Koltun, + + + + + + Jana Kosecka, + + + + + + Jitendra Malik, + + + + + + Roozbeh Mottaghi, + + + + + + Manolis Savva, + + + + + + Amir R. Zamir
+ + + + arXiv:1807.06757 [cs.AI], July 2018
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Text2Shape: Generating Shapes from Natural Language by Learning Joint Embeddings +   + +
+
+

+ + Text2Shape: Generating Shapes from Natural Language by Learning Joint Embeddings + +

+ + + + Kevin Chen, + + + + + + Christopher B. Choy, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Thomas Funkhouser, + + + + + + Silvio Savarese
+ + + + Proceedings of ACCV 2018 (oral), arXiv:1803.08495 [cs.CV]
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Im2Pano3D: Extrapolating 360 Structure and Semantics Beyond the Field of View +   + +
+
+

+ + Im2Pano3D: Extrapolating 360 Structure and Semantics Beyond the Field of View + +

+ + + + Shuran Song, + + + + + + Andy Zeng, + + + + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Silvio Savarese, + + + + + + Thomas Funkhouser
+ + + + Proceedings of CVPR 2018, arXiv:1712.04569 [cs.CV]
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Linking WordNet to 3D Shapes +   + +
+
+

+ + Linking WordNet to 3D Shapes + +

+ + + + Angel X. Chang, + + + + + + Rishi Mago, + + + + + + Pranav Krishna, + + + + + + Manolis Savva, + + + + + + Christiane Fellbaum
+ + + + Proceedings of Global WordNet Conference 2018
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+

2017

+ +
+
+ + MINOS: Multimodal Indoor Simulator for Navigation in Complex Environments +   + +
+
+

+ + MINOS: Multimodal Indoor Simulator for Navigation in Complex Environments + +

+ + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Alexey Dosovitskiy, + + + + + + Thomas Funkhouser, + + + + + + Vladlen Koltun
+ + + + arXiv:1712.03931 [cs.LG]
+ + + + + pdf + + + + + + + | + code + + + + + + + | + video + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Cross-modal Attribute Transfer for Rescaling 3D Models +   + +
+
+

+ + Cross-modal Attribute Transfer for Rescaling 3D Models + +

+ + + + Lin Shao, + + + + + + Angel X. Chang, + + + + + + Hao Su, + + + + + + Manolis Savva, + + + + + + Leonidas Guibas
+ + + + Proceedings of 3DV 2017
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Matterport3D: Learning from RGB-D Data in Indoor Environments +   + +
+
+

+ + Matterport3D: Learning from RGB-D Data in Indoor Environments + +

+ + + + Angel X. Chang, + + + + + + Angela Dai, + + + + + + Thomas Funkhouser, + + + + + + Maciej Halber, + + + + + + Matthias Nießner, + + + + + + Manolis Savva, + + + + + + Shuran Song, + + + + + + Andy Zeng, + + + + + + Yinda Zhang
+ + + + Proceedings of 3DV 2017, arXiv:1709.06158 [cs.CV]
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Learning Where to Look: Data-Driven Viewpoint Set Selection for 3D Scenes +   + +
+
+

+ + Learning Where to Look: Data-Driven Viewpoint Set Selection for 3D Scenes + +

+ + + + Kyle Genova, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Thomas Funkhouser
+ + + + arXiv:1704.02393 [cs.CV]
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + ScanNet: Richly-annotated 3D Reconstructions of Indoor Scenes +   + +
+
+

+ + ScanNet: Richly-annotated 3D Reconstructions of Indoor Scenes + +

+ + + + Angela Dai, + + + + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Maciej Halber, + + + + + + Thomas Funkhouser, + + + + + + Matthias Nießner
+ + + + Proceedings of CVPR 2017 (spotlight), arXiv:1702.04405 [cs.CV]
+ + + + + pdf + + + + + + + | + code + + + + + + + | + video + + + + + + + + + | + benchmark + + + + + + + + | webpage +
+
+ +
+
+ + Semantic Scene Completion from a Single Depth Image +   + +
+
+

+ + Semantic Scene Completion from a Single Depth Image + +

+ + + + Shuran Song, + + + + + + Fisher Yu, + + + + + + Andy Zeng, + + + + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Thomas Funkhouser
+ + + + Proceedings of CVPR 2017 (oral), arXiv:1611.08974 [cs.CV]
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + A Two-stage Sieve Approach to Quote Attribution +   + +
+
+

+ + A Two-stage Sieve Approach to Quote Attribution + +

+ + + + Grace Muzny, + + + + + + Michael Fang, + + + + + + Angel X. Chang, + + + + + + Dan Jurafsky
+ + + + Proceedings of EACL 2017
+ + + + + pdf + + + + + + + | + code + + + + + | + data + + + + + + + + + + + + + + + + +
+
+ +
+
+ + SceneSuggest: Context-driven 3D Scene Design +   + +
+
+

+ + SceneSuggest: Context-driven 3D Scene Design + +

+ + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Maneesh Agrawala
+ + + + arXiv:1703.00061 [cs.CG], March 2017
+ + + + + pdf + + + + + + + + + + + + + + + + + + + | + demo + + + + + + +
+
+ +
+
+ + SceneSeer: 3D Scene Design with Natural Language +   + +
+
+

+ + SceneSeer: 3D Scene Design with Natural Language + +

+ + + + Angel X. Chang, + + + + + + Mihail Eric, + + + + + + Manolis Savva, + + + + + + Christopher D. Manning
+ + + + arXiv:1703.00050 [cs.CG], March 2017
+ + + + + pdf + + + + + + + + + + + + + + + + + + + | + demo + + + + + + +
+
+ + +
+ +
+

2016

+ +
+
+ + Eviza: A Natural Language Interface for Visual Analysis +   + +
+
+

+ + Eviza: A Natural Language Interface for Visual Analysis + +

+ + + + Vidya Setlur, + + + + + + Sarah E. Battersby, + + + + + + Melanie Tory, + + + + + + Rich Gossweiler, + + + + + + Angel X. Chang
+
+ + + UIST 2016
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + PiGraphs: Learning Interaction Snapshots from Observations +   + +
+
+

+ + PiGraphs: Learning Interaction Snapshots from Observations + +

+ + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Pat Hanrahan, + + + + + + Matthew Fisher, + + + + + + Matthias Nießner
+ + + + SIGGRAPH 2016
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ +
+
+

+ + Evaluating the word-expert approach for Named-Entity Disambiguation + +

+ + + + Angel X. Chang, + + + + + + Valentin I. Spitkovsky, + + + + + + Christopher D. Manning, + + + + + + Eneko Agirre
+ + + + arXiv:1603.04767 [cs.CL], March 2016
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+

2015

+ +
+
+ + Text to 3D scene generation +   + +
+
+

+ + Text to 3D scene generation + +

+ + + + Angel X. Chang
+
+ + + Ph.D. dissertation, Department of Computer Science, Stanford University, 2015
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + ShapeNet: An Information-Rich 3D Model Repository +   + +
+
+

+ + ShapeNet: An Information-Rich 3D Model Repository + +

+ + + + Angel X. Chang, + + + + + + Thomas Funkhouser, + + + + + + Leonidas Guibas, + + + + + + Pat Hanrahan, + + + + + + Qixing Huang, + + + + + + Zimo Li, + + + + + + Silvio Savarese, + + + + + + Manolis Savva, + + + + + + Shuran Song, + + + + + + Hao Su, + + + + + + Jianxiong Xiao, + + + + + + Li Yi, + + + + + + Fisher Yu
+ + + + arXiv:1512.03012 [cs.GR], Dec 2015
+ + + + + pdf + + + + + | + bib + + + + + | + code + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Generating Semantically Precise Scene Graphs from Textual Descriptions for Improved Image Retrieval +   + +
+
+

+ + Generating Semantically Precise Scene Graphs from Textual Descriptions for Improved Image Retrieval + +

+ + + + Sebastian Schuster, + + + + + + Ranjay Krishna, + + + + + + Angel X. Chang, + + + + + + Li Fei-Fei, + + + + + + Christopher D. Manning
+ + + + In Proceedings of the Fourth Workshop on Vision and Language (VL15)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Semantically-Enriched 3D Models for Common-sense Knowledge +   + +
+
+

+ + Semantically-Enriched 3D Models for Common-sense Knowledge + +

+ + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Pat Hanrahan
+ + + + CVPR 2015 Vision meets Cognition Workshop
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Text to 3D Scene Generation with Rich Lexical Grounding +   + +
+
+

+ + Text to 3D Scene Generation with Rich Lexical Grounding + +

+ + + + Angel X. Chang, + + + + + + Will Monroe, + + + + + + Manolis Savva, + + + + + + Christopher Potts, + + + + + + Christopher D. Manning
+ + + + Proceedings of ACL 2015
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+

2014

+ +
+
+ + SceneGrok: Inferring Action Maps in 3D Environments +   + +
+
+

+ + SceneGrok: Inferring Action Maps in 3D Environments + +

+ + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Pat Hanrahan, + + + + + + Matthew Fisher, + + + + + + Matthias Nießner
+ + + + Proceedings of SIGGRAPH Asia 2014
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + On Being the Right Scale: Sizing Large Collections of 3D Models +   + +
+
+

+ + On Being the Right Scale: Sizing Large Collections of 3D Models + +

+ + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Gilbert Bernstein, + + + + + + Christopher D. Manning, + + + + + + Pat Hanrahan
+ + + + SIGGRAPH Asia 2014 Workshop on Indoor Scene Understanding: Where Graphics meets Vision
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Learning Spatial Knowledge for Text to 3D Scene Generation +   + +
+
+

+ + Learning Spatial Knowledge for Text to 3D Scene Generation + +

+ + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Christopher D. Manning
+ + + + Proceedings of the 2014 Conference on Empirical Methods in Natural Language Processing (EMNLP 2014)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Learning Affordance Maps by Observing Interactions +   + +
+
+

+ + Learning Affordance Maps by Observing Interactions + +

+ + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Matthew Fisher, + + + + + + Matthias Nießner, + + + + + + Pat Hanrahan
+ + + + CVPR 2014 Workshop on Functionality, Physics, Intentionality and Causality
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Interactive Learning of Spatial Knowledge for Text to 3D Scene Generation +   + +
+
+

+ + Interactive Learning of Spatial Knowledge for Text to 3D Scene Generation + +

+ + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Christopher D. Manning
+ + + + Proceedings of the ACL 2014 Workshop on Interactive Language Learning, Visualization, and Interfaces
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Semantic Parsing for Text to 3D Scene Generation +   + +
+
+

+ + Semantic Parsing for Text to 3D Scene Generation + +

+ + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Christopher D. Manning
+ + + + Proceedings of the ACL 2014 Workshop on Semantic Parsing
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + TransPhoner: Automated Mnemonic Keyword Generation +   + +
+
+

+ + TransPhoner: Automated Mnemonic Keyword Generation + +

+ + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Christopher D. Manning, + + + + + + Pat Hanrahan
+ + + + Proceedings of CHI 2014
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ +
+
+

+ + TokensRegex: Defining cascaded regular expressions over tokens + +

+ + + + Angel X. Chang, + + + + + + Christopher D. Manning
+ + + + Stanford University Technical Report
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+

2013

+ +
+
+ + Deterministic coreference resolution based on entity-centric, precision-ranked rules +   + +
+
+

+ + Deterministic coreference resolution based on entity-centric, precision-ranked rules + +

+ + + + Heeyoung Lee, + + + + + + Angel X. Chang, + + + + + + Yves Peirsman, + + + + + + Nathanael Chambers, + + + + + + Mihai Surdeanu, + + + + + + Dan Jurafsky
+ + + + In Computational Linguistics 39(4)
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ +
+
+

+ + Stanford's 2013 KBP System + +

+ + + + Gabor Angeli, + + + + + + Arun Tejasvi Chaganty, + + + + + + Angel X. Chang, + + + + + + Kevin Reschke, + + + + + + Julie Tibshirani, + + + + + + Jean Y. Wu, + + + + + + Osbert Bastani, + + + + + + Keith Siilats, + + + + + + Christopher D. Manning
+ + + + In Proceedings of the Sixth Text Analysis Conference (TAC 2014)
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ +
+
+

+ + SUTime: Evaluation in TempEval-3 + +

+ + + + Angel X. Chang, + + + + + + Christopher D. Manning
+ + + + In Second Joint Conference on Lexical and Computational Semantics (*SEM), Volume 2: Proceedings of the Seventh International Workshop on Semantic Evaluation (SemEval 2013)
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+

2012

+ +
+
+ +
+
+

+ + Joint Entity and Event Coreference Resolution across Documents + +

+ + + + Heeyoung Lee, + + + + + + Marta Recasens, + + + + + + Angel X. Chang, + + + + + + Mihai Surdeanu, + + + + + + Dan Jurafsky
+ + + + Proceedings of the Conference on Empirical Methods in Natural Language Processing and Computational Natural Language Learning (EMNLP-CoNLL 2012)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + SUTime: A Library for Recognizing and Normalizing Time Expressions. +   + +
+
+

+ + SUTime: A Library for Recognizing and Normalizing Time Expressions. + +

+ + + + Angel X. Chang, + + + + + + Christopher D. Manning
+ + + + In Proceedings of the Eighth International Conference on Language Resources and Evaluation (LREC 2012)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + | + poster + + + + + + + | + demo + + + + + + | webpage +
+
+ +
+
+ +
+
+

+ + A Cross-Lingual Dictionary for English Wikipedia Concepts + +

+ + + + Valentin I. Spitkovsky, + + + + + + Angel X. Chang
+
+ + + In Proceedings of the Eighth International Conference on Language Resources and Evaluation (LREC 2012)
+ + + + + pdf + + + + + | + bib + + + + + + + | + data + + + + + + + | + slides + + + + + + + + + + + | + post + + + + +
+
+ + +
+ +
+

2011

+ +
+
+ +
+
+

+ + Stanford's Distantly-Supervised Slot-Filling System + +

+ + + + Mihai Surdeanu, + + + + + + Sonal Gupta, + + + + + + John Bauer, + + + + + + David McClosky, + + + + + + Angel X. Chang, + + + + + + Valentin I. Spitkovsky, + + + + + + Christopher D. Manning
+ + + + In Proceedings of the Fourth Text Analysis Conference (TAC 2011)
+ + + + + pdf + + + + + | + bib + + + + + + + | + data + + + + + + + + + + + + + + + + +
+
+ +
+
+ +
+
+

+ + Strong Baselines for Cross-Lingual Entity Linking + +

+ + + + Valentin I. Spitkovsky, + + + + + + Angel X. Chang
+
+ + + In Proceedings of the Fourth Text Analysis Conference (TAC 2011)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ +
+
+

+ + Stanford-UBC Entity Linking at TAC-KBP, Again + +

+ + + + Angel X. Chang, + + + + + + Valentin I. Spitkovsky, + + + + + + Eneko Agirre, + + + + + + Christopher D. Manning
+ + + + In Proceedings of the Fourth Text Analysis Conference (TAC 2011)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ +
+
+

+ + Unsupervised Dependency Parsing without Gold Part-of-Speech Tags + +

+ + + + Valentin I. Spitkovsky, + + + + + + Hiyan Alshawi, + + + + + + Angel X. Chang, + + + + + + Dan Jurafsky
+ + + + In Proceedings of the 2011 Conference on Empirical Methods in Natural Language Processing (EMNLP 2011)
+ + + + + pdf + + + + + | + bib + + + + + + + | + data + + + + + + + + + | + poster + + + + + + + + + + +
+
+ +
+
+ +
+
+

+ + Stanford's Multi-Pass Sieve Coreference Resolution System at the CoNLL-2011 Shared Task + +

+ + + + Heeyoung Lee, + + + + + + Yves Peirsman, + + + + + + Angel X. Chang, + + + + + + Nathanael Chambers, + + + + + + Mihai Surdeanu, + + + + + + Dan Jurafsky
+ + + + In Proceedings of the CoNLL-2011 Shared Task
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+

2010

+ +
+
+ +
+
+

+ + A Simple Distant Supervision Approach for the TAC-KBP Slot Filling Task + +

+ + + + Mihai Surdeanu, + + + + + + David McClosky, + + + + + + Julie Tibshirani, + + + + + + John Bauer, + + + + + + Angel X. Chang, + + + + + + Valentin I. Spitkovsky, + + + + + + Christopher D. Manning
+ + + + In Proceedings of the Third Text Analysis Conference (TAC 2010)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + | + slides + + + + + + + + + + + + +
+
+ +
+
+ +
+
+

+ + Stanford-UBC Entity Linking at TAC-KBP + +

+ + + + Angel X. Chang, + + + + + + Valentin I. Spitkovsky, + + + + + + Eric Yeh, + + + + + + Eneko Agirre, + + + + + + Christopher D. Manning
+ + + + In Proceedings of the Third Text Analysis Conference (TAC 2010)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + | + poster + + + + + + + + + + +
+
+ + +
+ +
+

2009

+ +
+
+ +
+
+

+ + Stanford-UBC at TAC-KBP + +

+ + + + Eneko Agirre, + + + + + + Angel X. Chang, + + + + + + Dan Jurafsky, + + + + + + Christopher D. Manning, + + + + + + Valentin I. Spitkovsky, + + + + + + Eric Yeh
+ + + + In Proceedings of the Second Text Analysis Conference (TAC 2009)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + | + slides + + + + + + + + + + + + +
+
+ + +
+ +
+

2000

+ +
+
+ + The Fractal Geometry of the Boundary of Dragon Curves +   + +
+
+

+ + The Fractal Geometry of the Boundary of Dragon Curves + +

+ + + + Angel X. Chang, + + + + + + Tianrong Zhang
+ + + + In Journal of Recreational Mathematics 30 (1), 9-22
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ + +
+
+ +
+
+
+ +
+ + diff --git a/pubs.json b/pubs.json new file mode 100644 index 0000000..ad29509 --- /dev/null +++ b/pubs.json @@ -0,0 +1,1803 @@ +[ + { + "date": "2024-10", + "title": "SINGAPO: Single Image Controlled Generation of Articulated Parts in Objects", + "authors": [ + "JiayiLiu", + "DenysIliash", + "angel", + "manolis", + "AliMahdaviAmiri" + ], + "text": "arXiv:2410.16499 [cs.CV], October 2024", + "img": "files/singapo.png", + "web": "https://3dlg-hcvc.github.io/singapo/", + "pdf": "https://arxiv.org/pdf/2410.16499", + "code": "https://github.com/3dlg-hcvc/singapo", + "venue": ["vision", "preprint"], + "tags": ["3d", "articulations"] + }, + { + "date": "2024-09", + "title": "S2O: Static to Openable Enhancement for Articulated 3D Objects", + "authors": [ + "DenysIliash", + "HanxiaoJiang", + "YimingZhang", + "manolis", + "angel" + ], + "text": "arXiv:2409.18896 [cs.CV], September 2024", + "img": "files/s2o.png", + "web": "https://3dlg-hcvc.github.io/s2o/", + "pdf": "https://arxiv.org/pdf/2409.18896", + "code": "https://github.com/3dlg-hcvc/s2o", + "venue": ["vision", "preprint"], + "tags": ["3d", "articulations"] + }, + { + "date": "2024-08", + "title": "An Object is Worth 64x64 Pixels: Generating 3D Object via Image Diffusion", + "authors": [ + "XingguangYan", + "HanHungLee", + "ZiyuWan", + "angel" + ], + "text": "3DV 2025, arXiv:2408.03178 [cs.CV], August 2024", + "img": "files/omage.png", + "web": "https://omages.github.io/", + "pdf": "https://arxiv.org/pdf/2408.03178", + "code": "https://github.com/3dlg-hcvc/omages", + "venue": ["vision", "preprint"], + "tags": ["3d", "synthesis"] + }, + { + "date": "2024-08", + "title": "SceneMotifCoder: Example-driven Visual Program Learning for Generating 3D Object Arrangements", + "authors": [ + "Hou In Ivan Tam", + "Hou In Derek Pun", + "AustinWang", + "angel", + "manolis" + ], + "text": "3DV 2025, arXiv:2408.02211 [cs.GR], August 2024", + "img": "files/smc.png", + "web": "https://3dlg-hcvc.github.io/smc/", + "pdf": "https://arxiv.org/pdf/2408.02211", + "code": "https://github.com/3dlg-hcvc/smc", + "video": "https://youtu.be/iXsx18R7NN8", + "venue": ["vision", "preprint"], + "tags": ["3d", "synthesis", "text2scene"] + }, + { + "date": "2024-07", + "title": "R3DS: Reality-linked 3D Scenes for Panoramic Scene Understanding", + "authors": [ + "QiruiWu", + "SoniaRaychaudhuri", + "DanielRitchie", + "manolis", + "angel" + ], + "text": "ECCV 2024", + "img": "files/r3ds.webp", + "web": "https://3dlg-hcvc.github.io/r3ds/", + "pdf": "https://arxiv.org/pdf/2403.12301", + "code": "https://github.com/3dlg-hcvc/r3ds", + "venue": ["vision", "conference"], + "tags": ["3d"] + }, + { + "date": "2023-12", + "title": "BIOSCAN-5M: A Multimodal Dataset for Insect Biodiversity", + "authors": [ + "Zahra Gharaee", + "ScottLowe", + "ZemingGong", + "PabloMillanArias", + "Nicholas Pellegrino", + "AustinWang", + "JoakimHaurum", + "Iuliia Zarubiieva", + "LilaKari", + "Dirk Steinke", + "GrahamTaylor", + "PaulFieguth", + "angel" + ], + "text": "NeurIPS D&B 2024, arXiv:2406.12723 [cs.LG], June 2024", + "img": "files/bioscan5m.png", + "pdf": "https://arxiv.org/pdf/2406.12723", + "web": "https://biodiversitygenomics.net/projects/5m-insects/", + "code": "https://github.com/zahrag/BIOSCAN-5M", + "venue": ["ai", "preprint"], + "tags": ["bioscan", "dataset"] + }, + { + "date": "2024-08", + "title": "Duoduo CLIP: Efficient 3D Understanding with Multi-View Images", + "authors": [ + "HanHungLee", + "YimingZhang", + "angel" + ], + "text": "arXiv:2406.11579 [cs.CV], June 2024", + "img": "files/duoduoclip.png", + "web": "https://3dlg-hcvc.github.io/DuoduoCLIP/", + "pdf": "https://arxiv.org/pdf/2406.11579", + "code": "https://github.com/3dlg-hcvc/DuoduoCLIP", + "venue": ["vision", "preprint"], + "tags": ["vl","3d"] + }, + { + "date": "2024-06", + "title": "Habitat Synthetic Scenes Dataset (HSSD-200): An Analysis of 3D Scene Scale and Realism Tradeoffs for ObjectGoal Navigation", + "authors": [ + "MukulKhanna", + "YongsenMao", + "HanxiaoJiang", + "SanjayHaresh", + "BrennanShacklett", + "DhruvBatra", + "Alexander William Clegg", + "Eric Undersander", + "angel", + "manolis" + ], + "text": "CVPR 2024, arXiv:2306.11290 [cs.CV], June 2023", + "img": "files/hssd.png", + "pdf": "https://arxiv.org/pdf/2306.11290.pdf", + "web": "https://3dlg-hcvc.github.io/hssd/", + "code": "https://github.com/3dlg-hcvc/hssd/", + "data": "https://huggingface.co/hssd", + "venue": ["vision", "conference"], + "tags": ["embodied", "navigation", "3d", "dataset"] + }, + { + "date": "2024-05", + "title": "CLIBD: Bridging Vision and Genomics for Biodiversity Monitoring at Scale", + "authors": [ + "ZemingGong", + "AustinWang", + "XiaoliangHuo", + "JoakimHaurum", + "ScottLowe", + "GrahamTaylor", + "angel" + ], + "text": "arXiv:2405.17537 [cs.AI], May 2024", + "img": "files/bioscanclip.png", + "web": "https://3dlg-hcvc.github.io/bioscan-clip/", + "pdf": "https://arxiv.org/pdf/2405.17537", + "code": "https://github.com/3dlg-hcvc/bioscan-clip", + "venue": ["ai", "preprint"], + "tags": ["bioscan"] + }, + { + "date": "2024-05", + "title": "M2DNeRF: Multi-Modal Decomposition NeRF with 3D Feature Fields", + "authors": [ + "NingWang", + "Lefei Zhang", + "angel" + ], + "text": "arXiv:2405.05010 [cs.CV], May 2024", + "img": "files/m2dnerf.png", + "pdf": "https://arxiv.org/pdf/2405.05010", + "venue": ["vision", "preprint"], + "tags": ["3d"] + }, + { + "date": "2024-04", + "title": "Text-to-3D Shape Generation", + "authors": [ + "HanHungLee", + "manolis", + "angel" + ], + "text": "Eurographics STAR (State of The Art Report), CGF 2024", + "img": "files/tt3dstar.png", + "web": "https://3dlg-hcvc.github.io/tt3dstar/", + "pdf": "https://arxiv.org/pdf/2403.13289.pdf", + "venue": ["journal"], + "tags": ["3d", "shape", "synthesis", "text2shape", "survey"] + }, + { + "date": "2024-03", + "title": "Generalizing Single-View 3D Shape Retrieval to Occlusions and Unseen Objects", + "authors": [ + "QiruiWu", + "DanielRitchie", + "manolis", + "angel" + ], + "text": "3DV 2024", + "img": "files/genshaperetr.png", + "web": "https://3dlg-hcvc.github.io/generalizing_shape_retrieval/", + "pdf": "https://arxiv.org/pdf/2401.00405.pdf", + "code": "https://github.com/3dlg-hcvc/generalizing_shape_retrieval", + "venue": ["vision", "conference"], + "tags": ["3d"] + }, + { + "date": "2024-03", + "title": "OPDMulti: Openable Part Detection for Multiple Objects", + "authors": [ + "XiaohaoSun", + "HanxiaoJiang", + "manolis", + "angel" + ], + "text": "3DV 2024, arXiv:2303.14087 [cs.CV], arXiv March 2023", + "img": "files/opdmulti.png", + "pdf": "https://arxiv.org/pdf/2303.14087.pdf", + "web": "https://3dlg-hcvc.github.io/OPDMulti/", + "code": "https://github.com/3dlg-hcvc/OPDMulti", + "venue": ["vision", "conference"], + "tags": ["articulations", "3d"] + }, + { + "date": "2024-01", + "title": "MOPA: Modular Object Navigation with PointGoal Agents", + "authors": [ + "SoniaRaychaudhuri", + "TommasoCampari", + "UnnatJain", + "manolis", + "angel" + ], + "text": "WACV 2024, arXiv:2304.03696 [cs.RO, cs.CV], April 2023", + "img": "files/mopa.gif", + "pdf": "https://arxiv.org/pdf/2304.03696.pdf", + "web": "https://3dlg-hcvc.github.io/mopa/", + "code": "https://github.com/3dlg-hcvc/mopa", + "venue": ["vision", "conference"], + "tags": ["embodied", "navigation"] + }, + { + "date": "2024-01", + "title": "TriCoLo: Trimodal Contrastive Loss for Text to Shape Retrieval", + "authors": [ + "YueRuan", + "HanHungLee", + "YimingZhang", + "Ke Zhang", + "angel" + ], + "text": "WACV 2024, arXiv:2201.07366 [cs.CV], January 2022", + "img": "files/tricolo.png", + "pdf": "https://arxiv.org/pdf/2201.07366.pdf", + "web": "https://3dlg-hcvc.github.io/tricolo/", + "code": "https://github.com/3dlg-hcvc/tricolo", + "venue": ["vision", "conference"], + "tags": ["vl","3d"] + }, + { + "date": "2023-12", + "title": "BarcodeBERT: Transformers for Biodiversity Analysis", + "authors": [ + "PabloMillanArias", + "Niousha Sadjadi", + "Monireh Safari", + "ZemingGong", + "AustinWang", + "ScottLowe", + "JoakimHaurum", + "Iuliia Zarubiieva", + "Dirk Steinke", + "LilaKari", + "angel", + "GrahamTaylor" + ], + "text": "4th Workshop on Self-Supervised Learning: Theory and Practice (NeurIPS 2023)", + "img": "files/barcodebert.png", + "pdf": "https://arxiv.org/pdf/2311.02401.pdf", + "code": "https://github.com/Kari-Genomics-Lab/BarcodeBERT", + "venue": ["ai", "workshop"], + "tags": ["bioscan"] + }, + { + "date": "2023-12", + "title": "A Step Towards Worldwide Biodiversity Assessment: The BIOSCAN-1M Insect Dataset", + "authors": [ + "Zahra Gharaee", + "ZemingGong", + "Nicholas Pellegrino", + "Iuliia Zarubiieva", + "JoakimHaurum", + "ScottLowe", + "Jaclyn TA McKeown", + "Chris CY Ho", + "Joschka McLeod", + "Yi-Yun C Wei", + "Jireh Agda", + "Sujeevan Ratnasingham", + "Dirk Steinke", + "angel", + "GrahamTaylor", + "PaulFieguth" + ], + "text": "NeurIPS Datasets and Benchmarks 2023", + "img": "files/bioscan1m.png", + "pdf": "https://arxiv.org/pdf/2307.10455.pdf", + "web": "https://biodiversitygenomics.net/1M_insects/", + "code": "https://github.com/zahrag/BIOSCAN-1M", + "venue": ["ai", "conference"], + "tags": ["bioscan", "dataset"] + }, + { + "date": "2023-11", + "title": "HomeRobot: Open Vocabulary Mobile Manipulation", + "authors": [ + "Sriram Yenamandra", + "Arun Ramachandran", + "KarmeshYadav", + "Austin Wang", + "MukulKhanna", + "TheoGervet", + "Tsung-Yen Yang", + "Vidhi Jain", + "Alexander William Clegg", + "John Turner", + "ZsoltKira", + "manolis", + "angel", + "DevendraChaplot", + "DhruvBatra", + "RoozbehMottaghi", + "YonatanBisk", + "ChrisPaxton" + ], + "text": "CoRL 2023", + "img": "files/ovmm.jpeg", + "pdf": "https://arxiv.org/pdf/2306.11565.pdf", + "web": "https://ovmm.github.io/", + "code": "https://github.com/facebookresearch/home-robot", + "challenge": "https://aihabitat.org/challenge/2023_homerobot_ovmm/", + "venue": ["robotics", "conference"], + "tags": ["embodied", "rearrangement"] + }, + { + "date": "2023-10", + "title": "Multi3DRefer: Grounding Text Description to Multiple 3D Objects", + "authors": [ + "YimingZhang", + "ZemingGong", + "angel" + ], + "text": "ICCV 2023", + "img": "files/multi3drefer.png", + "pdf": "https://arxiv.org/pdf/2309.05251.pdf", + "web": "https://3dlg-hcvc.github.io/multi3drefer", + "code": "https://github.com/3dlg-hcvc/M3DRef-CLIP", + "venue": ["vision", "conference"], + "tags": ["3d", "vl"] + }, + { + "date": "2023-10", + "title": "Exploiting Proximity-Aware Tasks for Embodied Social Navigation", + "authors": [ + "EnricoCancelli", + "TommasoCampari", + "LucianoSerafini", + "angel", + "LambertoBallan" + ], + "text": "ICCV 2023", + "img": "files/prox.png", + "pdf": "https://arxiv.org/pdf/2212.00767.pdf", + "venue": ["vision", "conference"], + "tags": ["embodied", "navigation"] + }, + { + "date": "2023-10", + "title": "UniT3D: A Unified Transformer for 3D Dense Captioning and Visual Grounding", + "authors": [ + "DaveZhenyuChen", + "RonghangHu", + "XinleiChen", + "matthias", + "angel" + ], + "text": "ICCV 2023", + "img": "files/unit3d.jpeg", + "pdf": "https://arxiv.org/pdf/2212.00836.pdf", + "web": "http://www.niessnerlab.org/projects/chen2022unit3d.html", + "venue": ["vision", "conference"], + "tags": ["3d", "vl"] + }, + { + "date": "2023-06", + "title": "Habitat-Matterport 3D Semantics Dataset", + "authors": [ + "KarmeshYadav", + "RamRamrakhya", + "SanthoshRamakrishnan", + "TheoGervet", + "John Turner", + "AaronGokaslan", + "Noah Maestre", + "angel", + "DhruvBatra", + "manolis", + "Alexander William Clegg", + "DevendraChaplot" + ], + "text": "CVPR 2023", + "img": "files/hm3dsem.png", + "pdf": "https://arxiv.org/pdf/2210.05633.pdf", + "web": "https://aihabitat.org/datasets/hm3d-semantics/", + "venue": ["vision", "conference"], + "tags": ["3d", "scans", "dataset"] + }, + { + "date": "2023-06", + "title": "Evaluating 3D Shape Analysis Methods for Robustness to Rotation Invariance", + "authors": [ + "Supriya Pandhre", + "angel", + "manolis" + ], + "text": "CRV 2023", + "img": "files/3drotinv.png", + "pdf": "https://arxiv.org/pdf/2305.18557.pdf", + "venue": ["vision","conference"], + "tags": ["3d"] + }, + { + "date": "2022-09", + "title": "Understanding Pure CLIP Guidance for Voxel Grid NeRF Models", + "authors": [ + "HanHungLee", + "angel" + ], + "text": "arXiv:2201.07366 [cs.CV], September 2022", + "img": "files/pureclipnerf.png", + "pdf": "https://arxiv.org/pdf/2209.15172.pdf", + "web": "https://hanhung.github.io/PureCLIPNeRF/", + "code": "https://github.com/hanhung/PureCLIPNeRF", + "venue": ["preprint"], + "tags": ["3d", "shape", "synthesis", "text2shape"] + }, + { + "date": "2022-9", + "title": "MultiScan: Scalable RGBD scanning for 3D environments with articulated objects", + "authors": [ + "YongsenMao", + "YimingZhang", + "HanxiaoJiang", + "angel", + "manolis" + ], + "text": "NeurIPS 2022", + "img": "files/multiscan.png", + "pdf": "https://openreview.net/pdf?id=YxUdazpgweG", + "web": "https://3dlg-hcvc.github.io/multiscan", + "code": "https://github.com/smartscenes/multiscan", + "venue": ["ai", "conference"], + "tags": ["articulations","3d"] + }, + { + "date": "2022-09", + "title": "Articulated 3D Human-Object Interactions from RGB Videos: An Empirical Analysis of Approaches and Challenges", + "authors": [ + "SanjayHaresh", + "XiaohaoSun", + "HanxiaoJiang", + "angel", + "manolis" + ], + "text": "3DV 2022", + "img": "files/3dhoi.png", + "pdf": "https://arxiv.org/pdf/2209.05612.pdf", + "web": "https://3dlg-hcvc.github.io/3dhoi/", + "code": "https://github.com/3dlg-hcvc/3dhoi", + "venue": ["vision", "conference"], + "tags": ["articulations","3d"] + }, + { + "date": "2022-07", + "title": "OPD: Single-view 3D Openable Part Detection", + "authors": [ + "HanxiaoJiang", + "YongsenMao", + "manolis", + "angel" + ], + "text": "ECCV 2022", + "img": "files/opd.png", + "pdf": "https://arxiv.org/pdf/2203.16421.pdf", + "web": "https://3dlg-hcvc.github.io/OPD/", + "code": "https://github.com/3dlg-hcvc/OPD", + "venue": ["vision", "conference"], + "tags": ["articulations","3d"] + }, + { + "date": "2022-07", + "title": "D3Net: A Unified Speaker-Listener Architecture for 3D Dense Captioning and Visual Grounding", + "authors": [ + "DaveZhenyuChen", + "QiruiWu", + "matthias", + "angel" + ], + "text": "ECCV 2022", + "img": "files/d3net.png", + "pdf": "https://arxiv.org/pdf/2112.01551.pdf", + "web": "https://daveredrum.github.io/D3Net/", + "code": "https://github.com/daveredrum/D3Net", + "venue": ["vision", "conference"], + "tags": ["vl","3d"] + }, + { + "date": "2022-05", + "title": "3DVQA: Visual Question Answering for 3D Environments", + "authors": [ + "YasamanEtesam", + "LeonKochiev", + "angel" + ], + "text": "CRV 2022", + "img": "files/3dvqa.png", + "pdf": "https://ieeexplore.ieee.org/document/9866910", + "web": "https://3dlg-hcvc.github.io/3DVQA/", + "code": "https://github.com/3dlg-hcvc/3DVQA", + "venue": ["vision", "conference"], + "tags": ["vqa","3d"] + }, + { + "date": "2021-12", + "title": "Habitat-Matterport 3D Dataset (HM3D): 1000 Large-scale 3D Environments for Embodied AI", + "authors": [ + "SanthoshRamakrishnan", + "AaronGokaslan", + "ErikWijmans", + "Oleksandr Maksymets", + "Alexander Clegg", + "John Turner", + "Eric Undersander", + "Wojciech Galuba", + "Andrew Westbury", + "angel", + "manolis", + "YiliZhao", + "DhruvBatra" + ], + "text": "NeurIPS Datasets and Benchmarks Track 2021", + "img": "files/hm3d.jpeg", + "pdf": "https://arxiv.org/pdf/2109.08238.pdf", + "web": "https://aihabitat.org/datasets/hm3d/", + "venue": ["ai","conference"], + "tags": ["embodied","dataset","3d"] + }, + { + "date": "2021-12", + "title": "Habitat 2.0: Training Home Assistants to Rearrange their Habitat", + "authors": [ + "AndrewSzot", + "Alexander Clegg", + "Eric Undersander", + "ErikWijmans", + "YiliZhao", + "John Turner", + "Noah Maestre", + "MustafaMukadam", + "DevendraChaplot", + "Oleksandr Maksymets", + "AaronGokaslan", + "VladimírVondrus", + "Sameer Dharur", + "FranziskaMeier", + "Wojciech Galuba", + "angel", + "ZsoltKira", + "vladlen", + "JitendraMalik", + "manolis", + "DhruvBatra" + ], + "text": "NeurIPS 2021", + "img": "files/hab2.jpeg", + "pdf": "https://arxiv.org/pdf/2106.14405.pdf", + "post": "https://ai.facebook.com/blog/habitat-20-training-home-assistant-robots-with-faster-simulation-and-new-benchmarks/", + "code": "https://github.com/facebookresearch/habitat-sim", + "venue": ["ai", "conference"], + "tags": ["embodied","simulator"] + }, + { + "date": "2021-10", + "title": "Language-Aligned Waypoint (LAW) Supervision for Vision-and-Language Navigation in Continuous Environments", + "authors": [ + "SoniaRaychaudhuri", + "ShivanshPatel", + "SaimWani", + "UnnatJain", + "angel" + ], + "text": "EMNLP 2021 (short)", + "web": "https://3dlg-hcvc.github.io/LAW-VLNCE/", + "pdf": "https://arxiv.org/pdf/2109.15207.pdf", + "code": "https://github.com/3dlg-hcvc/LAW-VLNCE", + "img": "files/law-vlnce.gif", + "venue": ["nlp", "conference"], + "tags": ["embodied"] + }, + { + "date": "2021-08", + "title": "Interpretation of Emergent Communication in Heterogeneous Collaborative Embodied Agents", + "authors": [ + "ShivanshPatel", + "SaimWani", + "UnnatJain", + "AlexanderSchwing", + "SvetlanaLazebnik", + "manolis", + "angel" + ], + "text": "ICCV 2021", + "web": "https://shivanshpatel35.github.io/comon/", + "pdf": "https://arxiv.org/pdf/2110.05769.pdf", + "code": "https://github.com/saimwani/CoMON", + "img": "files/comon.png", + "venue": ["vision", "conference"], + "tags": ["embodied"] + }, + { + "date": "2021-06", + "title": "Roominoes: Learning to Assemble 3D Rooms into Floor Plans", + "authors": [ + "KaiWang", + "XianghaoXu", + "Leon Lei", + "Selena Ling", + "Natalie Lindsay", + "angel", + "manolis", + "DanielRitchie" + ], + "text": "SGP 2021", + "img": "files/roominoes.png", + "pdf": "https://drive.google.com/file/d/1aSePGciFxnLRF4dhJynmNNctVfndIRpE/view", + "video": "https://www.youtube.com/watch?v=GBzpgLPP5VY&ab_channel=KaiWang", + "venue": ["graphics", "conference"], + "tags": ["3d", "scene"] + }, + { + "date": "2021-06", + "title": "Scan2Cap: Context-aware Dense Captioning in RGB-D Scans", + "authors": [ + "DaveZhenyuChen", + "AliGholami", + "matthias", + "angel" + ], + "text": "CVPR 2021", + "pdf": "https://arxiv.org/pdf/2012.02206.pdf", + "img": "files/scan2cap.jpg", + "web": "https://daveredrum.github.io/Scan2Cap/", + "code": "https://github.com/daveredrum/Scan2Cap", + "venue": ["vision", "conference"], + "tags": ["vl","3d"] + }, + { + "date": "2021-06", + "title": "Plan2Scene: Converting Floorplans to 3D Scenes", + "authors": [ + "madhawa", + "QiruiWu", + "yasu", + "angel", + "manolis" + ], + "text": "CVPR 2021", + "pdf": "https://arxiv.org/pdf/2106.05375.pdf", + "img": "files/plan2scene.png", + "web": "https://3dlg-hcvc.github.io/plan2scene/", + "code": "https://github.com/3dlg-hcvc/plan2scene", + "venue": ["vision", "conference"], + "tags": ["3d", "scene"] + }, + { + "date": "2021-06", + "title": "Mirror3D: Depth Refinement for Mirror Surfaces", + "authors": [ + "JiaqiTan", + "WeijieLin", + "angel", + "manolis" + ], + "text": "CVPR 2021", + "pdf": "https://arxiv.org/pdf/2106.06629.pdf", + "img": "files/mirror3d.png", + "web": "https://3dlg-hcvc.github.io/mirror3d/", + "code": "https://github.com/3dlg-hcvc/mirror3d", + "venue": ["vision", "conference"], + "tags": ["3d", "dataset"] + }, + { + "date": "2020-11", + "title": "Rearrangement: A Challenge for Embodied AI", + "authors": [ + "DhruvBatra", + "angel", + "SoniaChernova", + "AndrewDavison", + "JiaDeng", + "vladlen", + "SergeyLevine", + "JitendraMalik", + "IgorMordatch", + "RoozbehMottaghi", + "manolis", + "HaoSu" + ], + "img": "files/rearrangement.png", + "text": "arXiv:2011.01975 [cs.AI], November 2020", + "pdf": "https://arxiv.org/pdf/2011.01975.pdf", + "venue": ["ai","preprint"], + "tags": ["embodied"] + }, + { + "date": "2020-10", + "title": "Motion Annotation Programs: A Scalable Approach to Annotating Kinematic Articulations in Large 3D Shape Collections", + "authors": [ + "XianghaoXu", + "DavidCharatan", + "SoniaRaychaudhuri", + "HanxiaoJiang", + "MaeHeitmann", + "VladmirKim", + "SiddharthaChaudhuri", + "manolis", + "angel", + "DanielRitchie" + ], + "text": "3DV 2020", + "img": "files/maps.png", + "web": "http://visual.cs.brown.edu/projects/articulations-webpage/", + "pdf": "http://visual.cs.brown.edu/projects/articulations-webpage/articulations_3dv2020.pdf", + "code": "https://github.com/brownvc/articulations", + "demo": "http://articulations.cs.brown.edu/", + "venue": ["vision", "conference"], + "tags": ["3d","articulations"] + }, + { + "date": "2020-10", + "title": "Multi-ON: Benchmarking Semantic Map Memory using Multi-Object Navigation", + "authors": [ + "SaimWani", + "ShivanshPatel", + "UnnatJain", + "angel", + "manolis" + ], + "text": "NeurIPS 2020", + "img": "files/multion.jpg", + "web": "https://shivanshpatel35.github.io/multi-ON/", + "pdf": "https://shivanshpatel35.github.io/multi-ON/resources/MultiON.pdf", + "code": "https://github.com/saimwani/multiON", + "venue": ["ml", "conference"], + "tags": ["embodied"] + }, + { + "date": "2020-07", + "title": "Sneak Pique: Exploring Autocompletion as a Data Discovery Scaffold for Supporting Visual Analysis", + "authors": [ + "VidyaSetlur", + "EnamulHoque", + "DaeHyunKim", + "angel" + ], + "text": "UIST 2020", + "img": "https://www.tableau.com/sites/default/files/2023-01/sneakpique_1.png", + "web": "https://www.tableau.com/research/publications/sneak-pique", + "pdf": "https://www.tableau.com/sites/default/files/2023-01/Autocomplete-UIST2020.pdf", + "venue": ["hci", "conference"], + "tags": ["dataviz"] + }, + { + "date": "2020-06", + "title": "ScanRefer: 3D Object Localization in RGB-D Scans using Natural Language", + "authors": [ + "DaveZhenyuChen", + "angel", + "matthias" + ], + "text": "ECCV 2020", + "img": "files/scanrefer.jpg", + "web": "https://daveredrum.github.io/ScanRefer/", + "pdf": "https://daveredrum.github.io/ScanRefer/davezchen_eccv2020_scanrefer.pdf", + "code": "https://github.com/daveredrum/ScanRefer", + "benchmark": "http://kaldir.vc.in.tum.de/scanrefer_benchmark/", + "venue": ["vision", "conference"], + "tags": ["vl","3d"] + }, + { + "date": "2020-06", + "title": "SAPIEN: a SimulAted Part-based Interactive ENvironment", + "authors": [ + "FanboXiang", + "YuzheQin", + "KaichunMo", + "YikuanXia", + "HaoZhu", + "FangchenLiu", + "MinghuaLiu", + "HanxiaoJiang", + "YifuYuan", + "HeWang", + "LiYi", + "angel", + "leo", + "HaoSu" + ], + "text": "CVPR 2020", + "img": "files/sapien.png", + "web": "https://sapien.ucsd.edu/", + "pdf": "https://arxiv.org/abs/2003.08515", + "venue": ["vision", "conference"], + "tags": ["simulator", "articulations"] + }, + { + "date": "2019-11", + "title": "Mimic and Rephrase: Reflective Listening in Open-Ended Dialogue", + "authors": [ + "JustinDieter", + "TianWang", + "gabor", + "angel", + "arun" + ], + "text": "CONLL 2019", + "img": "files/mimic.png", + "pdf": "https://www.aclweb.org/anthology/K19-1037.pdf", + "code": "https://github.com/square/MimicAndRephrase", + "venue": ["nlp", "conference"], + "tags": ["rephrase"] + }, + { + "date": "2019-08", + "title": "PlanIT: Planning and Instantiating Indoor Scenes with Relation Graph and Spatial Prior Networks", + "authors": [ + "KaiWang", + "YuAnLin", + "BenWeissmann", + "manolis", + "angel", + "DanielRitchie" + ], + "text": "SIGGRAPH 2019", + "pdf": "https://drive.google.com/file/d/1CJCM6EQyeUWwxdk6tl8cVxEIhV7s3DoA/view", + "code": "https://github.com/brownvc/planit", + "img": "https://dritchie.github.io/img/pubthumbs/graphsynth.png", + "venue": ["graphics", "conference"], + "tags": ["3d", "scene", "synthesis"] + }, + { + "date": "2019-03", + "title": "Hierarchy Denoising Recursive Autoencoders for 3D Scene Layout Prediction", + "authors": [ + "YifeiShi", + "angel", + "ZhelunWu", + "manolis", + "KaiXu" + ], + "text": "CVPR 2019, arXiv:1903.03757 [cs.CV]", + "img": "files/hierarchylayout.jpg", + "pdf": "https://arxiv.org/pdf/1903.03757", + "web": "http://www.yifeishi.net/hierarchylayout.html", + "code": "https://github.com/yifeishi/HierarchyLayout", + "venue": ["vision", "conference"], + "tags": [ + "3d", + "scene" + ] + }, + { + "date": "2018-12", + "year": "2019", + "title": "PartNet: A Large-scale Benchmark for Fine-grained and Hierarchical Part-level 3D Object Understanding", + "authors": [ + "KaichunMo", + "ShilinZhu", + "angel", + "LiYi", + "SubarnaTripathi", + "leo", + "HaoSu" + ], + "text": "CVPR 2019, arXiv:1812.02713 [cs.CV]", + "img": "https://cs.stanford.edu/~kaichun/partnet/images/teaser.png", + "pdf": "https://arxiv.org/abs/1812.02713", + "web": "https://cs.stanford.edu/~kaichun/partnet/", + "video": "https://youtu.be/7pEuoxmb-MI", + "venue": ["vision", "conference"], + "tags": [ + "3d", + "shape", + "parts", + "dataset" + ] + }, + { + "date": "2018-11", + "year": "2019", + "title": "Scan2CAD: Learning CAD Model Alignment in RGB-D Scans", + "authors": [ + "ArmenAvetisyan", + "ManuelDahnert", + "angie", + "manolis", + "angel", + "matthias" + ], + "text": "CVPR 2019 (oral), arXiv:1811.11187 [cs.CV]", + "img": "files/scan2cad.png", + "pdf": "https://arxiv.org/pdf/1811.11187.pdf", + "video": "http://www.youtube.com/watch?v=PiHSYpgLTfA", + "code": "https://github.com/skanti/Scan2CAD", + "benchmark": "http://kaldir.vc.in.tum.de/scan2cad_benchmark/", + "venue": ["vision", "conference"], + "tags": [ + "3d", + "scene", + "scans", + "dataset" + ] + }, + { + "date": "2018-08", + "title": "Deep Convolutional Priors for Indoor Scene Synthesis", + "authors": [ + "KaiWang", + "manolis", + "angel", + "DanielRitchie" + ], + "text": "SIGGRAPH 2018", + "img": "http://msavva.github.io/files/deepsynth.png", + "pdf": "https://dritchie.github.io/pdf/deepsynth.pdf", + "code": "https://github.com/brownvc/deep-synth", + "venue": ["graphics", "conference"], + "tags": [ + "3d", + "scene", + "synthesis" + ] + }, + { + "date": "2018-07", + "title": "On evaluation of embodied navigation agents", + "authors": [ + "PeterAnderson", + "angel", + "DevendraChaplot", + "alexey", + "SaurabhGupta", + "vladlen", + "JanaKosecka", + "JitendraMalik", + "RoozbehMottaghi", + "manolis", + "AmirZamir" + ], + "text": "arXiv:1807.06757 [cs.AI], July 2018", + "pdf": "https://arxiv.org/abs/1807.06757", + "img": "files/spl.jpeg", + "venue": ["ai", "preprint"], + "tags": [ + "navigation", + "embodied" + ] + }, + { + "date": "2018-03", + "title": "Text2Shape: Generating Shapes from Natural Language by Learning Joint Embeddings", + "authors": [ + "KevinChen", + "ChrisChoy", + "manolis", + "angel", + "tom", + "silvio" + ], + "text": "Proceedings of ACCV 2018 (oral), arXiv:1803.08495 [cs.CV]", + "pdf": "https://arxiv.org/abs/1803.08495", + "img": "http://msavva.github.io/files/text2shape.png", + "web": "http://text2shape.stanford.edu/", + "code": "https://github.com/kchen92/text2shape/", + "venue": ["vision", "conference"], + "tags": [ + "3d", + "shape", + "synthesis", + "text2shape" + ] + }, + { + "date": "2017-12", + "title": "MINOS: Multimodal Indoor Simulator for Navigation in Complex Environments", + "authors": [ + "manolis", + "angel", + "alexey", + "tom", + "vladlen" + ], + "text": "arXiv:1712.03931 [cs.LG]", + "pdf": "https://arxiv.org/abs/1712.03931", + "img": "https://github.com/minosworld/minos/raw/master/docs/img/video_thumbnail.png", + "web": "https://www.minosworld.org/", + "code": "https://github.com/minosworld/minos", + "video": "https://youtu.be/c0mL9K64q84", + "venue": ["ai", "preprint"], + "tags": [ + "navigation", + "simulator", + "embodied" + ] + }, + { + "date": "2017-12", + "year": "2018", + "title": "Im2Pano3D: Extrapolating 360 Structure and Semantics Beyond the Field of View", + "authors": [ + "shuran", + "andy", + "angel", + "manolis", + "silvio", + "tom" + ], + "text": "Proceedings of CVPR 2018, arXiv:1712.04569 [cs.CV]", + "pdf": "https://arxiv.org/abs/1712.04569", + "img": "http://im2pano3d.cs.princeton.edu/teaser.jpg", + "web": "http://im2pano3d.cs.princeton.edu/", + "venue": ["vision", "conference"], + "tags": [ + "vision", + "panorama" + ] + }, + { + "date": "2017-11", + "year": "2018", + "title": "Linking WordNet to 3D Shapes", + "authors": [ + "angel", + "RishiMago", + "PranavKrishna", + "manolis", + "christiane" + ], + "text": "Proceedings of Global WordNet Conference 2018", + "pdf": "http://compling.hss.ntu.edu.sg/events/2018-gwc/pdfs/GWC2018_paper_66.pdf", + "img": "files/wordnetLink.png", + "venue": ["nlp", "resources"], + "tags": [ + "shape" + ] + }, + { + "date": "2017-10", + "title": "Cross-modal Attribute Transfer for Rescaling 3D Models", + "authors": [ + "lin", + "angel", + "HaoSu", + "manolis", + "leo" + ], + "text": "Proceedings of 3DV 2017", + "pdf": "http://cseweb.ucsd.edu/~haosu/papers/3dv2017_attribute_transfer.pdf", + "img": "http://msavva.github.io/files/attributeTransfer.png", + "venue": ["vision", "conference"] + }, + { + "date": "2017-10", + "title": "Matterport3D: Learning from RGB-D Data in Indoor Environments", + "authors": [ + "angel", + "angie", + "tom", + "maciej", + "matthias", + "manolis", + "shuran", + "andy", + "yinda" + ], + "text": "Proceedings of 3DV 2017, arXiv:1709.06158 [cs.CV]", + "pdf": "https://arxiv.org/pdf/1709.06158.pdf", + "img": "http://msavva.github.io/files/matterport3d.png", + "code": "https://github.com/niessner/Matterport", + "web": "https://niessner.github.io/Matterport/", + "venue": ["vision", "conference"], + "tags": [ + "3d", + "scans", + "dataset" + ] + }, + { + "date": "2017-04", + "title": "Learning Where to Look: Data-Driven Viewpoint Set Selection for 3D Scenes", + "authors": [ + "kyle", + "manolis", + "angel", + "tom" + ], + "text": "arXiv:1704.02393 [cs.CV]", + "pdf": "https://arxiv.org/pdf/1704.02393.pdf", + "img": "http://msavva.github.io/files/viewsets.png", + "venue": ["vision", "preprint"] + }, + { + "date": "2017-02", + "title": "ScanNet: Richly-annotated 3D Reconstructions of Indoor Scenes", + "authors": [ + "angie", + "angel", + "manolis", + "maciej", + "tom", + "matthias" + ], + "text": "Proceedings of CVPR 2017 (spotlight), arXiv:1702.04405 [cs.CV]", + "img": "files/scannet.jpg", + "pdf": "https://arxiv.org/pdf/1702.04405.pdf", + "web": "http://www.scan-net.org/", + "code": "https://github.com/ScanNet/ScanNet", + "video": "http://www.youtube.com/watch?v=Olx4OnoZWQQ", + "benchmark": "http://kaldir.vc.in.tum.de/scannet_benchmark/", + "venue": ["vision", "conference"], + "tags": [ + "3d", + "scans", + "dataset" + ] + }, + { + "date": "2016-11", + "year": "2017", + "title": "Semantic Scene Completion from a Single Depth Image", + "authors": [ + "shuran", + "fisher", + "andy", + "angel", + "manolis", + "tom" + ], + "text": "Proceedings of CVPR 2017 (oral), arXiv:1611.08974 [cs.CV]", + "img": "files/sscnet.jpeg", + "pdf": "https://arxiv.org/pdf/1611.08974v1.pdf", + "web": "http://vision.princeton.edu/projects/2016/SSCNet/", + "venue": ["vision", "conference"], + "tags": [ + "3d", + "scene" + ] + }, + { + "date": "2017-04", + "title": "A Two-stage Sieve Approach to Quote Attribution", + "authors": [ + "grace", + "MichaelFang", + "angel", + "danj" + ], + "text": "Proceedings of EACL 2017", + "img": "files/quote-attribution.png", + "pdf": "https://www.aclweb.org/anthology/E17-1044.pdf", + "data": "http://nlp.stanford.edu/~muzny/quoteli.html", + "code": "http://nlp.stanford.edu/~muzny/quoteli.html", + "venue": ["nlp", "conference"], + "tags": [ + "quote-attribution" + ] + }, + { + "date": "2017-03", + "title": "SceneSuggest: Context-driven 3D Scene Design", + "authors": [ + "manolis", + "angel", + "maneesh" + ], + "text": "arXiv:1703.00061 [cs.CG], March 2017", + "img": "http://msavva.github.io/files/scenesuggest.png", + "pdf": "http://arxiv.org/abs/1703.00061.pdf", + "demo": "http://aspis.cmpt.sfu.ca/scene-toolkit/scene-suggest.html", + "venue": ["preprint"] + }, + { + "date": "2017-03", + "title": "SceneSeer: 3D Scene Design with Natural Language", + "authors": [ + "angel", + "mihail", + "manolis", + "chrisman" + ], + "text": "arXiv:1703.00050 [cs.CG], March 2017", + "img": "http://msavva.github.io/files/sceneseer.png", + "pdf": "http://arxiv.org/abs/1703.00050.pdf", + "demo": "http://aspis.cmpt.sfu.ca/scene-toolkit/text2scene.html", + "venue": ["preprint"] + }, + { + "date": "2016-10", + "title": "Eviza: A Natural Language Interface for Visual Analysis", + "authors": [ + "VidyaSetlur", + "SarahBattersby", + "MelanieTory", + "RichGossweiler", + "angel" + ], + "text": "UIST 2016", + "img": "https://www.tableau.com/sites/default/files/2023-01/eviza_teaser_0.png", + "pdf": "https://www.tableau.com/sites/default/files/2023-01/uist4832-setlurA_0.pdf", + "web": "https://www.tableau.com/research/publications/eviza-natural-language-interface-visual-analysis", + "venue": ["hci", "conference"], + "tags": ["dataviz"] + }, + { + "date": "2016-07", + "title": "PiGraphs: Learning Interaction Snapshots from Observations", + "authors": [ + "manolis", + "angel", + "pat", + "matt", + "matthias" + ], + "text": "SIGGRAPH 2016", + "img": "http://graphics.stanford.edu/projects/pigraphs/pigraphs.png", + "pdf": "http://graphics.stanford.edu/projects/pigraphs/", + "bib": "http://graphics.stanford.edu/projects/pigraphs/", + "web": "http://graphics.stanford.edu/projects/pigraphs/", + "venue": ["graphics", "conference"] + }, + { + "date": "2016-03", + "title": "Evaluating the word-expert approach for Named-Entity Disambiguation", + "authors": [ + "angel", + "val", + "chrisman", + "eneko" + ], + "text": "arXiv:1603.04767 [cs.CL], March 2016", + "pdf": "http://arxiv.org/pdf/1603.04767v1", + "venue": ["nlp", "preprint"], + "tags": [ + "entity-linking" + ] + }, + { + "date": "2015-12", + "title": "Text to 3D scene generation", + "authors": [ + "angel" + ], + "text": "Ph.D. dissertation, Department of Computer Science, Stanford University, 2015", + "img": "files/text2scene.png", + "pdf": "https://purl.stanford.edu/vg064sy5087", + "venue": ["thesis"], + "tags": [ + "text2scene" + ] + }, + { + "date": "2015-12", + "title": "ShapeNet: An Information-Rich 3D Model Repository", + "authors": [ + "angel", + "tom", + "leo", + "pat", + "qixing", + "zimo", + "silvio", + "manolis", + "shuran", + "HaoSu", + "jianxiong", + "LiYi", + "fisher" + ], + "text": "arXiv:1512.03012 [cs.GR], Dec 2015", + "img": "files/shapenet.png", + "pdf": "http://arxiv.org/pdf/1512.03012v1", + "bib": "http://shapenet.cs.stanford.edu/resources/shapenet.bib", + "code": "https://github.com/ShapeNet", + "web": "http://shapenet.cs.stanford.edu/", + "venue": ["preprint"], + "tags": ["3d", "shape", "dataset"] + }, + { + "date": "2015-10", + "title": "Generating Semantically Precise Scene Graphs from Textual Descriptions for Improved Image Retrieval", + "authors": [ + "sebastian", + "ranjay", + "angel", + "fei-fei", + "chrisman" + ], + "text": "In Proceedings of the Fourth Workshop on Vision and Language (VL15)", + "pdf": "http://nlp.stanford.edu/~sebschu/pubs/schuster-krishna-chang-feifei-manning-vl15.pdf", + "bib": "http://nlp.stanford.edu/~sebschu/pubs/schuster-krishna-chang-feifei-manning-vl15.bib", + "web": "http://nlp.stanford.edu/software/scenegraph-parser.shtml", + "img": "files/twomen.png", + "venue": ["nlp", "workshop"], + "tags": [ + "scene-graphs", + "image-retrieval" + ] + }, + { + "date": "2015-07", + "title": "Semantically-Enriched 3D Models for Common-sense Knowledge", + "authors": [ + "manolis", + "angel", + "pat" + ], + "text": "CVPR 2015 Vision meets Cognition Workshop", + "img": "files/semgeo.png", + "pdf": "http://graphics.stanford.edu/projects/semgeo/semgeo.pdf", + "bib": "http://graphics.stanford.edu/projects/semgeo/semgeo.bib", + "web": "http://graphics.stanford.edu/projects/semgeo/", + "venue": ["vision", "workshop"], + "tags": [ + "common-sense" + ] + }, + { + "date": "2015-06", + "title": "Text to 3D Scene Generation with Rich Lexical Grounding", + "authors": [ + "angel", + "will", + "manolis", + "chrispotts", + "chrisman" + ], + "text": "Proceedings of ACL 2015", + "img": "files/lexground.png", + "pdf": "http://nlp.stanford.edu/pubs/chang-acl2015-lexground.pdf", + "bib": "http://nlp.stanford.edu/pubs/chang-acl2015-lexground.bib", + "web": "http://nlp.stanford.edu/data/text2scene.shtml", + "venue": ["nlp", "conference"], + "tags": [ + "spatial", + "text2scene" + ] + }, + { + "date": "2014-12", + "title": "SceneGrok: Inferring Action Maps in 3D Environments", + "authors": [ + "manolis", + "angel", + "pat", + "matt", + "matthias" + ], + "text": "Proceedings of SIGGRAPH Asia 2014", + "img": "files/scenegrok.png", + "pdf": "http://graphics.stanford.edu/projects/scenegrok/scenegrok.pdf", + "bib": "http://graphics.stanford.edu/projects/scenegrok/scenegrok.bib", + "web": "http://graphics.stanford.edu/projects/scenegrok/", + "venue": ["graphics", "conference"], + "tags": [ + "actionmap" + ] + }, + { + "date": "2014-12", + "title": "On Being the Right Scale: Sizing Large Collections of 3D Models", + "authors": [ + "manolis", + "angel", + "gilbert", + "chrisman", + "pat" + ], + "text": "SIGGRAPH Asia 2014 Workshop on Indoor Scene Understanding: Where Graphics meets Vision", + "img": "files/sizes.png", + "pdf": "http://graphics.stanford.edu/projects/sizes/sizes.pdf", + "bib": "http://graphics.stanford.edu/projects/sizes/sizes.bib", + "web": "http://graphics.stanford.edu/projects/sizes/", + "venue": ["graphics", "workshop"], + "tags": [ + "common-sense" + ] + }, + { + "date": "2014-10", + "title": "Learning Spatial Knowledge for Text to 3D Scene Generation", + "authors": [ + "angel", + "manolis", + "chrisman" + ], + "text": "Proceedings of the 2014 Conference on Empirical Methods in Natural Language Processing (EMNLP 2014)", + "img": "files/spatialLearning.png", + "pdf": "http://nlp.stanford.edu/pubs/spatial-emnlp2014.pdf", + "bib": "http://nlp.stanford.edu/pubs/spatial-emnlp2014.bib", + "web": "http://nlp.stanford.edu/projects/text2scene.shtml", + "venue": ["nlp", "conference"], + "tags": [ + "spatial", + "text2scene" + ] + }, + { + "date": "2014-07", + "title": "Learning Affordance Maps by Observing Interactions", + "authors": [ + "manolis", + "angel", + "matt", + "matthias", + "pat" + ], + "text": "CVPR 2014 Workshop on Functionality, Physics, Intentionality and Causality", + "img": "files/fpic2014.png", + "pdf": "files/fpic2014.pdf", + "bib": "files/fpic2014.bib", + "venue": ["vision", "workshop"] + }, + { + "date": "2014-06", + "title": "Interactive Learning of Spatial Knowledge for Text to 3D Scene Generation", + "authors": [ + "angel", + "manolis", + "chrisman" + ], + "text": "Proceedings of the ACL 2014 Workshop on Interactive Language Learning, Visualization, and Interfaces", + "img": "files/interactiveLearning.png", + "pdf": "http://nlp.stanford.edu/pubs/scenegen-aclviz2014.pdf", + "bib": "http://nlp.stanford.edu/pubs/scenegen-aclviz2014.bib", + "web": "http://nlp.stanford.edu/projects/text2scene.shtml", + "venue": ["nlp", "workshop"], + "tags": [ + "spatial", + "text2scene" + ] + }, + { + "date": "2014-06", + "title": "Semantic Parsing for Text to 3D Scene Generation", + "authors": [ + "angel", + "manolis", + "chrisman" + ], + "text": "Proceedings of the ACL 2014 Workshop on Semantic Parsing", + "img": "files/semanticParsing.png", + "pdf": "http://nlp.stanford.edu/pubs/scenegen-sp2014.pdf", + "bib": "http://nlp.stanford.edu/pubs/scenegen-sp2014.bib", + "web": "http://nlp.stanford.edu/projects/text2scene.shtml", + "venue": ["nlp", "workshop"], + "tags": [ + "text2scene" + ] + }, + { + "date": "2014-05", + "title": "TransPhoner: Automated Mnemonic Keyword Generation", + "authors": [ + "manolis", + "angel", + "chrisman", + "pat" + ], + "text": "Proceedings of CHI 2014", + "img": "files/transphoner.png", + "pdf": "http://graphics.stanford.edu/projects/transphoner/TransPhoner.pdf", + "bib": "http://graphics.stanford.edu/projects/transphoner/TransPhoner.bib", + "web": "http://graphics.stanford.edu/projects/transphoner/", + "venue": ["hci", "conference"] + }, + { + "date": "2014-04", + "title": "TokensRegex: Defining cascaded regular expressions over tokens", + "authors": [ + "angel", + "chrisman" + ], + "text": "Stanford University Technical Report", + "pdf": "http://nlp.stanford.edu/pubs/tokensregex-tr-2014.pdf", + "bib": "http://nlp.stanford.edu/pubs/tokensregex-tr-2014.bib", + "web": "http://nlp.stanford.edu/software/tokensregex.shtml", + "venue": ["nlp", "preprint"] + }, + { + "date": "2013-12", + "title": "Deterministic coreference resolution based on entity-centric, precision-ranked rules", + "authors": [ + "heeyoung", + "angel", + "yves", + "nate", + "mihai", + "danj" + ], + "text": "In Computational Linguistics 39(4)", + "img": "files/corefSieves.png", + "pdf": "http://www.mitpressjournals.org/doi/pdf/10.1162/COLI_a_00152", + "web": "http://nlp.stanford.edu/software/dcoref.shtml", + "venue": ["nlp", "journal"], + "tags": [ + "coreference" + ] + }, + { + "date": "2013-11", + "title": "Stanford's 2013 KBP System", + "authors": [ + "gabor", + "arun", + "angel", + "KevinReschke", + "julie", + "jean", + "osbert", + "KeithSiilats", + "chrisman" + ], + "text": "In Proceedings of the Sixth Text Analysis Conference (TAC 2014)", + "pdf": "http://stanford.edu/~angeli/papers/2014-tac-kbp.pdf", + "venue": ["nlp", "workshop"], + "tags": [ + "kbp" + ] + }, + { + "date": "2013-07", + "title": "SUTime: Evaluation in TempEval-3", + "authors": [ + "angel", + "chrisman" + ], + "text": "In Second Joint Conference on Lexical and Computational Semantics (*SEM), Volume 2: Proceedings of the Seventh International Workshop on Semantic Evaluation (SemEval 2013)", + "pdf": "http://aclweb.org/anthology/S/S13/S13-2013.pdf", + "venue": ["nlp", "workshop"], + "tags": [ + "time" + ] + }, + { + "date": "2012-07", + "title": "Joint Entity and Event Coreference Resolution across Documents", + "authors": [ + "heeyoung", + "marta", + "angel", + "mihai", + "danj" + ], + "text": "Proceedings of the Conference on Empirical Methods in Natural Language Processing and Computational Natural Language Learning (EMNLP-CoNLL 2012)", + "pdf": "http://nlp.stanford.edu/pubs/emnlp2012-coref.pdf", + "bib": "http://nlp.stanford.edu/pubs/emnlp2012-coref.bib", + "venue": ["nlp", "conference"], + "tags": [ + "coreference" + ] + }, + { + "date": "2012-05", + "title": "SUTime: A Library for Recognizing and Normalizing Time Expressions.", + "authors": [ + "angel", + "chrisman" + ], + "text": "In Proceedings of the Eighth International Conference on Language Resources and Evaluation (LREC 2012)", + "img": "files/sutime.png", + "pdf": "http://nlp.stanford.edu/pubs/lrec2012-sutime.pdf", + "bib": "http://nlp.stanford.edu/pubs/lrec2012-sutime.bib", + "poster": "http://nlp.stanford.edu/pubs/lrec2012-sutime-poster.pdf", + "web": "http://nlp.stanford.edu/software/sutime.shtml", + "demo": "http://nlp.stanford.edu:8080/sutime", + "venue": ["nlp", "resources"], + "tags": [ + "time" + ] + }, + { + "date": "2012-05", + "title": "A Cross-Lingual Dictionary for English Wikipedia Concepts", + "authors": [ + "val", + "angel" + ], + "text": "In Proceedings of the Eighth International Conference on Language Resources and Evaluation (LREC 2012)", + "pdf": "http://nlp.stanford.edu/pubs/crosswikis.pdf", + "bib": "http://nlp.stanford.edu/pubs/crosswikis.bib", + "slides": "http://nlp.stanford.edu/pubs/crosswikis-slides.pdf", + "post": "https://plus.google.com/117790530324740296539/posts/VSjc4KYpug2", + "data": "http://nlp.stanford.edu/pubs/crosswikis-data.tar.bz2", + "venue": ["nlp", "resources"], + "tags": [ + "entity-linking" + ] + }, + { + "date": "2011-11", + "title": "Stanford's Distantly-Supervised Slot-Filling System", + "authors": [ + "mihai", + "sonal", + "johnbauer", + "davidm", + "angel", + "val", + "chrisman" + ], + "text": "In Proceedings of the Fourth Text Analysis Conference (TAC 2011)", + "pdf": "http://nlp.stanford.edu/pubs/kbp2011-slotfilling.pdf", + "bib": "http://nlp.stanford.edu/pubs/kbp2011-slotfilling.bib", + "data": "http://nlp.stanford.edu/pubs/kbp_trigger_words.txt", + "venue": ["nlp", "workshop"], + "tags": [ + "kbp" + ] + }, + { + "date": "2011-11", + "title": "Strong Baselines for Cross-Lingual Entity Linking", + "authors": [ + "val", + "angel" + ], + "text": "In Proceedings of the Fourth Text Analysis Conference (TAC 2011)", + "pdf": "http://nlp.stanford.edu/pubs/kbp2011-crosslinking.pdf", + "bib": "http://nlp.stanford.edu/pubs/kbp2011-crosslinking.bib", + "venue": ["nlp", "workshop"], + "tags": [ + "entity-linking" + ] + }, + { + "date": "2011-11", + "title": "Stanford-UBC Entity Linking at TAC-KBP, Again", + "authors": [ + "angel", + "val", + "eneko", + "chrisman" + ], + "text": "In Proceedings of the Fourth Text Analysis Conference (TAC 2011)", + "pdf": "http://nlp.stanford.edu/pubs/kbp2011-entitylinking.pdf", + "bib": "http://nlp.stanford.edu/pubs/kbp2011-entitylinking.bib", + "venue": ["nlp", "workshop"], + "tags": [ + "entity-linking" + ] + }, + { + "date": "2011-07", + "title": "Unsupervised Dependency Parsing without Gold Part-of-Speech Tags", + "authors": [ + "val", + "hiyan", + "angel", + "danj" + ], + "text": "In Proceedings of the 2011 Conference on Empirical Methods in Natural Language Processing (EMNLP 2011)", + "pdf": "http://nlp.stanford.edu/pubs/goldtags.pdf", + "bib": "http://nlp.stanford.edu/pubs/goldtags.bib", + "poster": "http://nlp.stanford.edu/pubs/goldtags-poster.pdf", + "data": "http://nlp.stanford.edu/pubs/goldtags-data.tar.bz2", + "venue": ["nlp", "conference"], + "tags": [ + "parsing" + ] + }, + { + "date": "2011-06", + "title": "Stanford's Multi-Pass Sieve Coreference Resolution System at the CoNLL-2011 Shared Task", + "authors": [ + "heeyoung", + "yves", + "angel", + "nate", + "mihai", + "danj" + ], + "text": "In Proceedings of the CoNLL-2011 Shared Task", + "pdf": "http://nlp.stanford.edu/pubs/conllst2011-coref.pdf", + "bib": "http://nlp.stanford.edu/pubs/conllst2011-coref.bib", + "web": "http://nlp.stanford.edu/software/dcoref.shtml", + "venue": ["nlp", "conference"], + "tags": [ + "coreference" + ] + }, + { + "date": "2010-11", + "title": "A Simple Distant Supervision Approach for the TAC-KBP Slot Filling Task", + "authors": [ + "mihai", + "davidm", + "julie", + "johnbauer", + "angel", + "val", + "chrisman" + ], + "text": "In Proceedings of the Third Text Analysis Conference (TAC 2010)", + "pdf": "http://nlp.stanford.edu/pubs/kbp2010-slotfilling.pdf", + "bib": "http://nlp.stanford.edu/pubs/kbp2010-slotfilling.bib", + "slides": "http://nlp.stanford.edu/pubs/kbp2010-slotfilling-slides.pptx", + "venue": ["nlp", "workshop"], + "tags": [ + "kbp", + "Slot-Filling" + ] + }, + { + "date": "2010-11", + "title": "Stanford-UBC Entity Linking at TAC-KBP", + "authors": [ + "angel", + "val", + "ericyeh", + "eneko", + "chrisman" + ], + "text": "In Proceedings of the Third Text Analysis Conference (TAC 2010)", + "pdf": "http://nlp.stanford.edu/pubs/kbp2010-entitylinking.pdf", + "bib": "http://nlp.stanford.edu/pubs/kbp2010-entitylinking.bib", + "poster": "http://nlp.stanford.edu/pubs/kbp2010-entitylinking-poster.pdf", + "venue": ["nlp", "workshop"], + "tags": [ + "kbp", + "entity-linking" + ] + }, + { + "date": "2009-11", + "title": "Stanford-UBC at TAC-KBP", + "authors": [ + "eneko", + "angel", + "danj", + "chrisman", + "val", + "ericyeh" + ], + "text": "In Proceedings of the Second Text Analysis Conference (TAC 2009)", + "pdf": "http://nlp.stanford.edu/pubs/subctackbp.pdf", + "bib": "http://nlp.stanford.edu/pubs/subctackbp.bib", + "slides": "http://nlp.stanford.edu/pubs/subctackbp-slides.pdf", + "venue": ["nlp", "workshop"], + "tags": [ + "kbp" + ] + }, + { + "date": "2000-01", + "title": "The Fractal Geometry of the Boundary of Dragon Curves", + "authors": [ + "angel", + "tianrong" + ], + "text": "In Journal of Recreational Mathematics 30 (1), 9-22", + "img": "files/dragonCurve.png", + "pdf": "pubs/dragonbound.pdf", + "bib": "pubs/dragonbound.bib", + "venue": ["misc", "journal"] + } +] \ No newline at end of file diff --git a/pubs/dragonbound.bib b/pubs/dragonbound.bib new file mode 100644 index 0000000..41a3982 --- /dev/null +++ b/pubs/dragonbound.bib @@ -0,0 +1,9 @@ +@Article{chang2000dragonbound, + title = {The Fractal Geometry of the Boundary of Dragon Curves}, + author = {Chang, Angel and Zhang, Tianrong}, + journal = {Journal of Recreational Mathematics}, + year = {2000}, + number = {1}, + volume = {30}, + pages = {9-22} +} \ No newline at end of file diff --git a/pubs/dragonbound.pdf b/pubs/dragonbound.pdf new file mode 100644 index 0000000..b60d810 Binary files /dev/null and b/pubs/dragonbound.pdf differ diff --git a/pubsall.html b/pubsall.html new file mode 100644 index 0000000..5b524d1 --- /dev/null +++ b/pubsall.html @@ -0,0 +1,8342 @@ + + + + + + + + + + Angel Xuan Chang - Publications | Angel Xuan Chang + + + + + + + + + + + + + +
+
+ + +

Angel Xuan Chang

+ +
+
+

+ I am an Associate Professor at Simon Fraser University. + Prior to this, I was a visiting research scientist at Facebook AI Research and a research scientist at Eloquent Labs working on dialogue. I received my Ph.D. in Computer Science from Stanford, where I was part of the Natural Language Processing Group and advised by Chris Manning. + My research focuses on connecting language to 3D representations of shapes and scenes and grounding of language for embodied agents in indoor environments. I have worked on methods for synthesizing 3D scenes and shapes from natural language, and various datasets for 3D scene understanding. In general, I am interested in the semantics of shapes and scenes, the representation and acquisition of common sense knowledge, and reasoning using probabilistic models. + Some of my other interests include drawing and dance. +

+

+ +

+
+
+ Angel Xuan Chang +
+ angelx-{at}-sfu-[dot]-ca +

+ Associate Professor
+ School of Computing Science
+ Simon Fraser University
+ 3DLG + | GrUVi + | SFU NatLang
+ SFU AI/ML + | VINCI
+ Canada CIFAR AI Chair (Amii)
+ TUM-IAS Hans Fischer Fellow (2018-2022)
+ Google Scholar +
+
+
+ + + +
+
+ + +
+ +
+ + + + +
+

2025

+ +
+
+ + ViGiL3D: A Linguistically Diverse Dataset for 3D Visual Grounding +   + +
+
+

+ + ViGiL3D: A Linguistically Diverse Dataset for 3D Visual Grounding + +

+ + + + Austin T. Wang, + + + + + + Zeming Gong, + + + + + + Angel X. Chang
+
+ + + arXiv:2501.01366 [cs.CV], January 2025
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+

2024

+ +
+
+ + Diorama: Unleashing Zero-shot Single-view 3D Scene Modeling +   + +
+
+

+ + Diorama: Unleashing Zero-shot Single-view 3D Scene Modeling + +

+ + + + Qirui Wu, + + + + + + Denys Iliash, + + + + + + Daniel Ritchie, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + arXiv:2411.19492 [cs.CV], November 2024
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + NL-SLAM for OC-VLN: Natural Language Grounded SLAM for Object-Centric VLN +   + +
+
+

+ + NL-SLAM for OC-VLN: Natural Language Grounded SLAM for Object-Centric VLN + +

+ + + + Sonia Raychaudhuri, + + + + + + Duy Ta, + + + + + + Katrina Ashton, + + + + + + Angel X. Chang, + + + + + + Jiuguang Wang, + + + + + + Bernadette Bucher
+ + + + arXiv:2411.07848 [cs.RO], November 2024
+ + + + + pdf + + + + + + + + + + + | + video + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + SINGAPO: Single Image Controlled Generation of Articulated Parts in Objects +   + +
+
+

+ + SINGAPO: Single Image Controlled Generation of Articulated Parts in Objects + +

+ + + + Jiayi Liu, + + + + + + Denys Iliash, + + + + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Ali Mahdavi-Amiri
+ + + + arXiv:2410.16499 [cs.CV], October 2024
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + S2O: Static to Openable Enhancement for Articulated 3D Objects +   + +
+
+

+ + S2O: Static to Openable Enhancement for Articulated 3D Objects + +

+ + + + Denys Iliash, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Yiming Zhang, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + arXiv:2409.18896 [cs.CV], September 2024
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + An Object is Worth 64x64 Pixels: Generating 3D Object via Image Diffusion +   + +
+
+

+ + An Object is Worth 64x64 Pixels: Generating 3D Object via Image Diffusion + +

+ + + + Xingguang Yan, + + + + + + Han-Hung Lee, + + + + + + Ziyu Wan, + + + + + + Angel X. Chang
+
+ + + 3DV 2025, arXiv:2408.03178 [cs.CV], August 2024
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + SceneMotifCoder: Example-driven Visual Program Learning for Generating 3D Object Arrangements +   + +
+
+

+ + SceneMotifCoder: Example-driven Visual Program Learning for Generating 3D Object Arrangements + +

+ + + Hou In Ivan Tam, + + + + Hou In Derek Pun, + + + + + Austin T. Wang, + + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + 3DV 2025, arXiv:2408.02211 [cs.GR], August 2024
+ + + + + pdf + + + + + + + | + code + + + + + + + | + video + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + R3DS: Reality-linked 3D Scenes for Panoramic Scene Understanding +   + +
+
+

+ + R3DS: Reality-linked 3D Scenes for Panoramic Scene Understanding + +

+ + + + Qirui Wu, + + + + + + Sonia Raychaudhuri, + + + + + + Daniel Ritchie, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + ECCV 2024
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + BIOSCAN-5M: A Multimodal Dataset for Insect Biodiversity +   + +
+
+

+ + BIOSCAN-5M: A Multimodal Dataset for Insect Biodiversity + +

+ + + Zahra Gharaee, + + + + + Scott C Lowe, + + + + + + Zeming Gong, + + + + + + Pablo Millan Arias, + + + + + Nicholas Pellegrino, + + + + + Austin T. Wang, + + + + + + Joakim Bruslund Haurum, + + + + + Iuliia Zarubiieva, + + + + + Lila Kari, + + + + + Dirk Steinke, + + + + + Graham W Taylor, + + + + + + Paul Fieguth, + + + + + + Angel X. Chang
+
+ + + NeurIPS D&B 2024, arXiv:2406.12723 [cs.LG], June 2024
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Duoduo CLIP: Efficient 3D Understanding with Multi-View Images +   + +
+
+

+ + Duoduo CLIP: Efficient 3D Understanding with Multi-View Images + +

+ + + + Han-Hung Lee, + + + + + + Yiming Zhang, + + + + + + Angel X. Chang
+
+ + + arXiv:2406.11579 [cs.CV], June 2024
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Habitat Synthetic Scenes Dataset (HSSD-200): An Analysis of 3D Scene Scale and Realism Tradeoffs for ObjectGoal Navigation +   + +
+
+

+ + Habitat Synthetic Scenes Dataset (HSSD-200): An Analysis of 3D Scene Scale and Realism Tradeoffs for ObjectGoal Navigation + +

+ + + + Mukul Khanna, + + + + + + Yongsen Mao, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Sanjay Haresh, + + + + + + Brennan Shacklett, + + + + + + Dhruv Batra, + + + + + Alexander William Clegg, + + + + Eric Undersander, + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + CVPR 2024, arXiv:2306.11290 [cs.CV], June 2023
+ + + + + pdf + + + + + + + | + code + + + + + | + data + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + CLIBD: Bridging Vision and Genomics for Biodiversity Monitoring at Scale +   + +
+
+

+ + CLIBD: Bridging Vision and Genomics for Biodiversity Monitoring at Scale + +

+ + + + Zeming Gong, + + + + + + Austin T. Wang, + + + + + + Xiaoliang Huo, + + + + + + Joakim Bruslund Haurum, + + + + + + Scott C Lowe, + + + + + + Graham W Taylor, + + + + + + Angel X. Chang
+
+ + + arXiv:2405.17537 [cs.AI], May 2024
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + M2DNeRF: Multi-Modal Decomposition NeRF with 3D Feature Fields +   + +
+
+

+ + M2DNeRF: Multi-Modal Decomposition NeRF with 3D Feature Fields + +

+ + + + Ning Wang, + + + + + Lefei Zhang, + + + + + Angel X. Chang
+
+ + + arXiv:2405.05010 [cs.CV], May 2024
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Text-to-3D Shape Generation +   + +
+
+

+ + Text-to-3D Shape Generation + +

+ + + + Han-Hung Lee, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + Eurographics STAR (State of The Art Report), CGF 2024
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Generalizing Single-View 3D Shape Retrieval to Occlusions and Unseen Objects +   + +
+
+

+ + Generalizing Single-View 3D Shape Retrieval to Occlusions and Unseen Objects + +

+ + + + Qirui Wu, + + + + + + Daniel Ritchie, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + 3DV 2024
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + OPDMulti: Openable Part Detection for Multiple Objects +   + +
+
+

+ + OPDMulti: Openable Part Detection for Multiple Objects + +

+ + + + Xiaohao Sun, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + 3DV 2024, arXiv:2303.14087 [cs.CV], arXiv March 2023
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + MOPA: Modular Object Navigation with PointGoal Agents +   + +
+
+

+ + MOPA: Modular Object Navigation with PointGoal Agents + +

+ + + + Sonia Raychaudhuri, + + + + + + Tommaso Campari, + + + + + + Unnat Jain, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + WACV 2024, arXiv:2304.03696 [cs.RO, cs.CV], April 2023
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + TriCoLo: Trimodal Contrastive Loss for Text to Shape Retrieval +   + +
+
+

+ + TriCoLo: Trimodal Contrastive Loss for Text to Shape Retrieval + +

+ + + + Yue Ruan, + + + + + + Han-Hung Lee, + + + + + + Yiming Zhang, + + + + + Ke Zhang, + + + + + Angel X. Chang
+
+ + + WACV 2024, arXiv:2201.07366 [cs.CV], January 2022
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+

2023

+ +
+
+ + BarcodeBERT: Transformers for Biodiversity Analysis +   + +
+
+

+ + BarcodeBERT: Transformers for Biodiversity Analysis + +

+ + + + Pablo Millan Arias, + + + + + Niousha Sadjadi, + + + + Monireh Safari, + + + + + Zeming Gong, + + + + + + Austin T. Wang, + + + + + + Scott C Lowe, + + + + + + Joakim Bruslund Haurum, + + + + + Iuliia Zarubiieva, + + + + Dirk Steinke, + + + + + Lila Kari, + + + + + + Angel X. Chang, + + + + + + Graham W Taylor
+ + + + 4th Workshop on Self-Supervised Learning: Theory and Practice (NeurIPS 2023)
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + A Step Towards Worldwide Biodiversity Assessment: The BIOSCAN-1M Insect Dataset +   + +
+
+

+ + A Step Towards Worldwide Biodiversity Assessment: The BIOSCAN-1M Insect Dataset + +

+ + + Zahra Gharaee, + + + + + Zeming Gong, + + + + + Nicholas Pellegrino, + + + + Iuliia Zarubiieva, + + + + + Joakim Bruslund Haurum, + + + + + + Scott C Lowe, + + + + + Jaclyn TA McKeown, + + + + Chris CY Ho, + + + + Joschka McLeod, + + + + Yi-Yun C Wei, + + + + Jireh Agda, + + + + Sujeevan Ratnasingham, + + + + Dirk Steinke, + + + + + Angel X. Chang, + + + + + + Graham W Taylor, + + + + + + Paul Fieguth
+ + + + NeurIPS Datasets and Benchmarks 2023
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + HomeRobot: Open Vocabulary Mobile Manipulation +   + +
+
+

+ + HomeRobot: Open Vocabulary Mobile Manipulation + +

+ + + Sriram Yenamandra, + + + + Arun Ramachandran, + + + + + Karmesh Yadav, + + + + + Austin Wang, + + + + + Mukul Khanna, + + + + + + Theo Gervet, + + + + + Tsung-Yen Yang, + + + + Vidhi Jain, + + + + Alexander William Clegg, + + + + John Turner, + + + + + Zsolt Kira, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Devendra Singh Chaplot, + + + + + + Dhruv Batra, + + + + + + Roozbeh Mottaghi, + + + + + + Yonatan Bisk, + + + + + + Chris Paxton
+ + + + CoRL 2023
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + | + challenge + + + + | webpage +
+
+ +
+
+ + Multi3DRefer: Grounding Text Description to Multiple 3D Objects +   + +
+
+

+ + Multi3DRefer: Grounding Text Description to Multiple 3D Objects + +

+ + + + Yiming Zhang, + + + + + + Zeming Gong, + + + + + + Angel X. Chang
+
+ + + ICCV 2023
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Exploiting Proximity-Aware Tasks for Embodied Social Navigation +   + +
+
+

+ + Exploiting Proximity-Aware Tasks for Embodied Social Navigation + +

+ + + + Enrico Cancelli, + + + + + + Tommaso Campari, + + + + + + Luciano Serafini, + + + + + + Angel X. Chang, + + + + + + Lamberto Ballan
+ + + + ICCV 2023
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + UniT3D: A Unified Transformer for 3D Dense Captioning and Visual Grounding +   + +
+
+

+ + UniT3D: A Unified Transformer for 3D Dense Captioning and Visual Grounding + +

+ + + + Dave Zhenyu Chen, + + + + + + Ronghang Hu, + + + + + + Xinlei Chen, + + + + + + Matthias Nießner, + + + + + + Angel X. Chang
+
+ + + ICCV 2023
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Habitat-Matterport 3D Semantics Dataset +   + +
+
+

+ + Habitat-Matterport 3D Semantics Dataset + +

+ + + + Karmesh Yadav, + + + + + + Ram Ramrakhya, + + + + + + Santhosh K. Ramakrishnan, + + + + + + Theo Gervet, + + + + + John Turner, + + + + + Aaron Gokaslan, + + + + + Noah Maestre, + + + + + Angel X. Chang, + + + + + + Dhruv Batra, + + + + + + Manolis Savva, + + + + + Alexander William Clegg, + + + + + Devendra Singh Chaplot
+ + + + CVPR 2023
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Evaluating 3D Shape Analysis Methods for Robustness to Rotation Invariance +   + +
+
+

+ + Evaluating 3D Shape Analysis Methods for Robustness to Rotation Invariance + +

+ + + Supriya Pandhre, + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + CRV 2023
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+

2022

+ +
+
+ + Understanding Pure CLIP Guidance for Voxel Grid NeRF Models +   + +
+
+

+ + Understanding Pure CLIP Guidance for Voxel Grid NeRF Models + +

+ + + + Han-Hung Lee, + + + + + + Angel X. Chang
+
+ + + arXiv:2201.07366 [cs.CV], September 2022
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + MultiScan: Scalable RGBD scanning for 3D environments with articulated objects +   + +
+
+

+ + MultiScan: Scalable RGBD scanning for 3D environments with articulated objects + +

+ + + + Yongsen Mao, + + + + + + Yiming Zhang, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + NeurIPS 2022
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Articulated 3D Human-Object Interactions from RGB Videos: An Empirical Analysis of Approaches and Challenges +   + +
+
+

+ + Articulated 3D Human-Object Interactions from RGB Videos: An Empirical Analysis of Approaches and Challenges + +

+ + + + Sanjay Haresh, + + + + + + Xiaohao Sun, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + 3DV 2022
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + OPD: Single-view 3D Openable Part Detection +   + +
+
+

+ + OPD: Single-view 3D Openable Part Detection + +

+ + + + Hanxiao (Shawn) Jiang, + + + + + + Yongsen Mao, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + ECCV 2022
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + D3Net: A Unified Speaker-Listener Architecture for 3D Dense Captioning and Visual Grounding +   + +
+
+

+ + D3Net: A Unified Speaker-Listener Architecture for 3D Dense Captioning and Visual Grounding + +

+ + + + Dave Zhenyu Chen, + + + + + + Qirui Wu, + + + + + + Matthias Nießner, + + + + + + Angel X. Chang
+
+ + + ECCV 2022
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + 3DVQA: Visual Question Answering for 3D Environments +   + +
+
+

+ + 3DVQA: Visual Question Answering for 3D Environments + +

+ + + + Yasaman Etesam, + + + + + + Leon Kochiev, + + + + + + Angel X. Chang
+
+ + + CRV 2022
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+

2021

+ +
+
+ + Habitat-Matterport 3D Dataset (HM3D): 1000 Large-scale 3D Environments for Embodied AI +   + +
+
+

+ + Habitat-Matterport 3D Dataset (HM3D): 1000 Large-scale 3D Environments for Embodied AI + +

+ + + + Santhosh K. Ramakrishnan, + + + + + + Aaron Gokaslan, + + + + + + Erik Wijmans, + + + + + Oleksandr Maksymets, + + + + Alexander Clegg, + + + + John Turner, + + + + Eric Undersander, + + + + Wojciech Galuba, + + + + Andrew Westbury, + + + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Yili Zhao, + + + + + + Dhruv Batra
+ + + + NeurIPS Datasets and Benchmarks Track 2021
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Habitat 2.0: Training Home Assistants to Rearrange their Habitat +   + +
+
+

+ + Habitat 2.0: Training Home Assistants to Rearrange their Habitat + +

+ + + + Andrew Szot, + + + + + Alexander Clegg, + + + + Eric Undersander, + + + + + Erik Wijmans, + + + + + + Yili Zhao, + + + + + John Turner, + + + + Noah Maestre, + + + + + Mustafa Mukadam, + + + + + + Devendra Singh Chaplot, + + + + + Oleksandr Maksymets, + + + + + Aaron Gokaslan, + + + + + + Vladimír Vondrus, + + + + + Sameer Dharur, + + + + + Franziska Meier, + + + + + Wojciech Galuba, + + + + + Angel X. Chang, + + + + + + Zsolt Kira, + + + + + + Vladlen Koltun, + + + + + + Jitendra Malik, + + + + + + Manolis Savva, + + + + + + Dhruv Batra
+ + + + NeurIPS 2021
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + | + post + + + + + + +
+
+ +
+
+ + Language-Aligned Waypoint (LAW) Supervision for Vision-and-Language Navigation in Continuous Environments +   + +
+
+

+ + Language-Aligned Waypoint (LAW) Supervision for Vision-and-Language Navigation in Continuous Environments + +

+ + + + Sonia Raychaudhuri, + + + + + + Shivansh Patel, + + + + + + Saim Wani, + + + + + + Unnat Jain, + + + + + + Angel X. Chang
+
+ + + EMNLP 2021 (short)
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Interpretation of Emergent Communication in Heterogeneous Collaborative Embodied Agents +   + +
+
+

+ + Interpretation of Emergent Communication in Heterogeneous Collaborative Embodied Agents + +

+ + + + Shivansh Patel, + + + + + + Saim Wani, + + + + + + Unnat Jain, + + + + + + Alexander Schwing, + + + + + + Svetlana Lazebnik, + + + + + + Manolis Savva, + + + + + + Angel X. Chang
+
+ + + ICCV 2021
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Roominoes: Learning to Assemble 3D Rooms into Floor Plans +   + +
+
+

+ + Roominoes: Learning to Assemble 3D Rooms into Floor Plans + +

+ + + + Kai Wang, + + + + + + Xianghao Xu, + + + + + Leon Lei, + + + + Selena Ling, + + + + Natalie Lindsay, + + + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Daniel Ritchie
+ + + + SGP 2021
+ + + + + pdf + + + + + + + + + + + | + video + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Scan2Cap: Context-aware Dense Captioning in RGB-D Scans +   + +
+
+

+ + Scan2Cap: Context-aware Dense Captioning in RGB-D Scans + +

+ + + + Dave Zhenyu Chen, + + + + + + Ali Gholami, + + + + + + Matthias Nießner, + + + + + + Angel X. Chang
+
+ + + CVPR 2021
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Plan2Scene: Converting Floorplans to 3D Scenes +   + +
+
+

+ + Plan2Scene: Converting Floorplans to 3D Scenes + +

+ + + + Madhawa Vidanapathirana, + + + + + + Qirui Wu, + + + + + + Yasutaka Furukawa, + + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + CVPR 2021
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Mirror3D: Depth Refinement for Mirror Surfaces +   + +
+
+

+ + Mirror3D: Depth Refinement for Mirror Surfaces + +

+ + + + Jiaqi Tan, + + + + + + Weijie (Lewis) Lin, + + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + CVPR 2021
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+

2020

+ +
+
+ + Rearrangement: A Challenge for Embodied AI +   + +
+
+

+ + Rearrangement: A Challenge for Embodied AI + +

+ + + + Dhruv Batra, + + + + + + Angel X. Chang, + + + + + + Sonia Chernova, + + + + + + Andrew J. Davison, + + + + + + Jia Deng, + + + + + + Vladlen Koltun, + + + + + + Sergey Levine, + + + + + + Jitendra Malik, + + + + + + Igor Mordatch, + + + + + + Roozbeh Mottaghi, + + + + + + Manolis Savva, + + + + + + Hao Su
+ + + + arXiv:2011.01975 [cs.AI], November 2020
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Motion Annotation Programs: A Scalable Approach to Annotating Kinematic Articulations in Large 3D Shape Collections +   + +
+
+

+ + Motion Annotation Programs: A Scalable Approach to Annotating Kinematic Articulations in Large 3D Shape Collections + +

+ + + + Xianghao Xu, + + + + + + David Charatan, + + + + + + Sonia Raychaudhuri, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Mae Heitmann, + + + + + + Vladmir Kim, + + + + + + Siddhartha Chaudhuri, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Daniel Ritchie
+ + + + 3DV 2020
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + | + demo + + + + + + + + | webpage +
+
+ +
+
+ + Multi-ON: Benchmarking Semantic Map Memory using Multi-Object Navigation +   + +
+
+

+ + Multi-ON: Benchmarking Semantic Map Memory using Multi-Object Navigation + +

+ + + + Saim Wani, + + + + + + Shivansh Patel, + + + + + + Unnat Jain, + + + + + + Angel X. Chang, + + + + + + Manolis Savva
+ + + + NeurIPS 2020
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Sneak Pique: Exploring Autocompletion as a Data Discovery Scaffold for Supporting Visual Analysis +   + +
+
+

+ + Sneak Pique: Exploring Autocompletion as a Data Discovery Scaffold for Supporting Visual Analysis + +

+ + + + Vidya Setlur, + + + + + + Enamul Hoque, + + + + + + Dae Hyun Kim, + + + + + + Angel X. Chang
+
+ + + UIST 2020
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + ScanRefer: 3D Object Localization in RGB-D Scans using Natural Language +   + +
+
+

+ + ScanRefer: 3D Object Localization in RGB-D Scans using Natural Language + +

+ + + + Dave Zhenyu Chen, + + + + + + Angel X. Chang, + + + + + + Matthias Nießner
+ + + + ECCV 2020
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + | + benchmark + + + + + + + + + + | webpage +
+
+ +
+
+ + SAPIEN: a SimulAted Part-based Interactive ENvironment +   + +
+
+

+ + SAPIEN: a SimulAted Part-based Interactive ENvironment + +

+ + + + Fanbo Xiang, + + + + + + Yuzhe Qin, + + + + + + Kaichun Mo, + + + + + + Yikuan Xia, + + + + + + Hao Zhu, + + + + + + Fangchen Liu, + + + + + + Minghua Liu, + + + + + + Hanxiao (Shawn) Jiang, + + + + + + Yifu Yuan, + + + + + + He Wang, + + + + + + Li Yi, + + + + + + Angel X. Chang, + + + + + + Leonidas Guibas, + + + + + + Hao Su
+ + + + CVPR 2020
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+

2019

+ +
+
+ + Mimic and Rephrase: Reflective Listening in Open-Ended Dialogue +   + +
+
+

+ + Mimic and Rephrase: Reflective Listening in Open-Ended Dialogue + +

+ + + + Justin Dieter, + + + + + + Tian Wang, + + + + + + Gabor Angeli, + + + + + + Angel X. Chang, + + + + + + Arun Tejasvi Chaganty
+ + + + CONLL 2019
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + PlanIT: Planning and Instantiating Indoor Scenes with Relation Graph and Spatial Prior Networks +   + +
+
+

+ + PlanIT: Planning and Instantiating Indoor Scenes with Relation Graph and Spatial Prior Networks + +

+ + + + Kai Wang, + + + + + + Yu-An Lin, + + + + + + Ben Weissmann, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Daniel Ritchie
+ + + + SIGGRAPH 2019
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Hierarchy Denoising Recursive Autoencoders for 3D Scene Layout Prediction +   + +
+
+

+ + Hierarchy Denoising Recursive Autoencoders for 3D Scene Layout Prediction + +

+ + + + Yifei Shi, + + + + + + Angel X. Chang, + + + + + + Zhelun Wu, + + + + + + Manolis Savva, + + + + + + Kai Xu
+ + + + CVPR 2019, arXiv:1903.03757 [cs.CV]
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + PartNet: A Large-scale Benchmark for Fine-grained and Hierarchical Part-level 3D Object Understanding +   + +
+
+

+ + PartNet: A Large-scale Benchmark for Fine-grained and Hierarchical Part-level 3D Object Understanding + +

+ + + + Kaichun Mo, + + + + + + Shilin Zhu, + + + + + + Angel X. Chang, + + + + + + Li Yi, + + + + + + Subarna Tripathi, + + + + + + Leonidas Guibas, + + + + + + Hao Su
+ + + + CVPR 2019, arXiv:1812.02713 [cs.CV]
+ + + + + pdf + + + + + + + + + + + | + video + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Scan2CAD: Learning CAD Model Alignment in RGB-D Scans +   + +
+
+

+ + Scan2CAD: Learning CAD Model Alignment in RGB-D Scans + +

+ + + + Armen Avetisyan, + + + + + + Manuel Dahnert, + + + + + + Angela Dai, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Matthias Nießner
+ + + + CVPR 2019 (oral), arXiv:1811.11187 [cs.CV]
+ + + + + pdf + + + + + + + | + code + + + + + + + | + video + + + + + + + + + | + benchmark + + + + + + + + + + +
+
+ + +
+ +
+

2018

+ +
+
+ + Deep Convolutional Priors for Indoor Scene Synthesis +   + +
+
+

+ + Deep Convolutional Priors for Indoor Scene Synthesis + +

+ + + + Kai Wang, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Daniel Ritchie
+ + + + SIGGRAPH 2018
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + On evaluation of embodied navigation agents +   + +
+
+

+ + On evaluation of embodied navigation agents + +

+ + + + Peter Anderson, + + + + + + Angel X. Chang, + + + + + + Devendra Singh Chaplot, + + + + + + Alexey Dosovitskiy, + + + + + + Saurabh Gupta, + + + + + + Vladlen Koltun, + + + + + + Jana Kosecka, + + + + + + Jitendra Malik, + + + + + + Roozbeh Mottaghi, + + + + + + Manolis Savva, + + + + + + Amir R. Zamir
+ + + + arXiv:1807.06757 [cs.AI], July 2018
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Text2Shape: Generating Shapes from Natural Language by Learning Joint Embeddings +   + +
+
+

+ + Text2Shape: Generating Shapes from Natural Language by Learning Joint Embeddings + +

+ + + + Kevin Chen, + + + + + + Christopher B. Choy, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Thomas Funkhouser, + + + + + + Silvio Savarese
+ + + + Proceedings of ACCV 2018 (oral), arXiv:1803.08495 [cs.CV]
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Im2Pano3D: Extrapolating 360 Structure and Semantics Beyond the Field of View +   + +
+
+

+ + Im2Pano3D: Extrapolating 360 Structure and Semantics Beyond the Field of View + +

+ + + + Shuran Song, + + + + + + Andy Zeng, + + + + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Silvio Savarese, + + + + + + Thomas Funkhouser
+ + + + Proceedings of CVPR 2018, arXiv:1712.04569 [cs.CV]
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Linking WordNet to 3D Shapes +   + +
+
+

+ + Linking WordNet to 3D Shapes + +

+ + + + Angel X. Chang, + + + + + + Rishi Mago, + + + + + + Pranav Krishna, + + + + + + Manolis Savva, + + + + + + Christiane Fellbaum
+ + + + Proceedings of Global WordNet Conference 2018
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+

2017

+ +
+
+ + MINOS: Multimodal Indoor Simulator for Navigation in Complex Environments +   + +
+
+

+ + MINOS: Multimodal Indoor Simulator for Navigation in Complex Environments + +

+ + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Alexey Dosovitskiy, + + + + + + Thomas Funkhouser, + + + + + + Vladlen Koltun
+ + + + arXiv:1712.03931 [cs.LG]
+ + + + + pdf + + + + + + + | + code + + + + + + + | + video + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Cross-modal Attribute Transfer for Rescaling 3D Models +   + +
+
+

+ + Cross-modal Attribute Transfer for Rescaling 3D Models + +

+ + + + Lin Shao, + + + + + + Angel X. Chang, + + + + + + Hao Su, + + + + + + Manolis Savva, + + + + + + Leonidas Guibas
+ + + + Proceedings of 3DV 2017
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Matterport3D: Learning from RGB-D Data in Indoor Environments +   + +
+
+

+ + Matterport3D: Learning from RGB-D Data in Indoor Environments + +

+ + + + Angel X. Chang, + + + + + + Angela Dai, + + + + + + Thomas Funkhouser, + + + + + + Maciej Halber, + + + + + + Matthias Nießner, + + + + + + Manolis Savva, + + + + + + Shuran Song, + + + + + + Andy Zeng, + + + + + + Yinda Zhang
+ + + + Proceedings of 3DV 2017, arXiv:1709.06158 [cs.CV]
+ + + + + pdf + + + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Learning Where to Look: Data-Driven Viewpoint Set Selection for 3D Scenes +   + +
+
+

+ + Learning Where to Look: Data-Driven Viewpoint Set Selection for 3D Scenes + +

+ + + + Kyle Genova, + + + + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Thomas Funkhouser
+ + + + arXiv:1704.02393 [cs.CV]
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + ScanNet: Richly-annotated 3D Reconstructions of Indoor Scenes +   + +
+
+

+ + ScanNet: Richly-annotated 3D Reconstructions of Indoor Scenes + +

+ + + + Angela Dai, + + + + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Maciej Halber, + + + + + + Thomas Funkhouser, + + + + + + Matthias Nießner
+ + + + Proceedings of CVPR 2017 (spotlight), arXiv:1702.04405 [cs.CV]
+ + + + + pdf + + + + + + + | + code + + + + + + + | + video + + + + + + + + + | + benchmark + + + + + + + + + + | webpage +
+
+ +
+
+ + Semantic Scene Completion from a Single Depth Image +   + +
+
+

+ + Semantic Scene Completion from a Single Depth Image + +

+ + + + Shuran Song, + + + + + + Fisher Yu, + + + + + + Andy Zeng, + + + + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Thomas Funkhouser
+ + + + Proceedings of CVPR 2017 (oral), arXiv:1611.08974 [cs.CV]
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + A Two-stage Sieve Approach to Quote Attribution +   + +
+
+

+ + A Two-stage Sieve Approach to Quote Attribution + +

+ + + + Grace Muzny, + + + + + + Michael Fang, + + + + + + Angel X. Chang, + + + + + + Dan Jurafsky
+ + + + Proceedings of EACL 2017
+ + + + + pdf + + + + + + + | + code + + + + + | + data + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + SceneSuggest: Context-driven 3D Scene Design +   + +
+
+

+ + SceneSuggest: Context-driven 3D Scene Design + +

+ + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Maneesh Agrawala
+ + + + arXiv:1703.00061 [cs.CG], March 2017
+ + + + + pdf + + + + + + + + + + + + + + + + + + + | + demo + + + + + + + + +
+
+ +
+
+ + SceneSeer: 3D Scene Design with Natural Language +   + +
+
+

+ + SceneSeer: 3D Scene Design with Natural Language + +

+ + + + Angel X. Chang, + + + + + + Mihail Eric, + + + + + + Manolis Savva, + + + + + + Christopher D. Manning
+ + + + arXiv:1703.00050 [cs.CG], March 2017
+ + + + + pdf + + + + + + + + + + + + + + + + + + + | + demo + + + + + + + + +
+
+ + +
+ +
+

2016

+ +
+
+ + Eviza: A Natural Language Interface for Visual Analysis +   + +
+
+

+ + Eviza: A Natural Language Interface for Visual Analysis + +

+ + + + Vidya Setlur, + + + + + + Sarah E. Battersby, + + + + + + Melanie Tory, + + + + + + Rich Gossweiler, + + + + + + Angel X. Chang
+
+ + + UIST 2016
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + PiGraphs: Learning Interaction Snapshots from Observations +   + +
+
+

+ + PiGraphs: Learning Interaction Snapshots from Observations + +

+ + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Pat Hanrahan, + + + + + + Matthew Fisher, + + + + + + Matthias Nießner
+ + + + SIGGRAPH 2016
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ +
+
+

+ + Evaluating the word-expert approach for Named-Entity Disambiguation + +

+ + + + Angel X. Chang, + + + + + + Valentin I. Spitkovsky, + + + + + + Christopher D. Manning, + + + + + + Eneko Agirre
+ + + + arXiv:1603.04767 [cs.CL], March 2016
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+

2015

+ +
+
+ + Text to 3D scene generation +   + +
+
+

+ + Text to 3D scene generation + +

+ + + + Angel X. Chang
+
+ + + Ph.D. dissertation, Department of Computer Science, Stanford University, 2015
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + ShapeNet: An Information-Rich 3D Model Repository +   + +
+
+

+ + ShapeNet: An Information-Rich 3D Model Repository + +

+ + + + Angel X. Chang, + + + + + + Thomas Funkhouser, + + + + + + Leonidas Guibas, + + + + + + Pat Hanrahan, + + + + + + Qixing Huang, + + + + + + Zimo Li, + + + + + + Silvio Savarese, + + + + + + Manolis Savva, + + + + + + Shuran Song, + + + + + + Hao Su, + + + + + + Jianxiong Xiao, + + + + + + Li Yi, + + + + + + Fisher Yu
+ + + + arXiv:1512.03012 [cs.GR], Dec 2015
+ + + + + pdf + + + + + | + bib + + + + + | + code + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Generating Semantically Precise Scene Graphs from Textual Descriptions for Improved Image Retrieval +   + +
+
+

+ + Generating Semantically Precise Scene Graphs from Textual Descriptions for Improved Image Retrieval + +

+ + + + Sebastian Schuster, + + + + + + Ranjay Krishna, + + + + + + Angel X. Chang, + + + + + + Li Fei-Fei, + + + + + + Christopher D. Manning
+ + + + In Proceedings of the Fourth Workshop on Vision and Language (VL15)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Semantically-Enriched 3D Models for Common-sense Knowledge +   + +
+
+

+ + Semantically-Enriched 3D Models for Common-sense Knowledge + +

+ + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Pat Hanrahan
+ + + + CVPR 2015 Vision meets Cognition Workshop
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Text to 3D Scene Generation with Rich Lexical Grounding +   + +
+
+

+ + Text to 3D Scene Generation with Rich Lexical Grounding + +

+ + + + Angel X. Chang, + + + + + + Will Monroe, + + + + + + Manolis Savva, + + + + + + Christopher Potts, + + + + + + Christopher D. Manning
+ + + + Proceedings of ACL 2015
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+

2014

+ +
+
+ + SceneGrok: Inferring Action Maps in 3D Environments +   + +
+
+

+ + SceneGrok: Inferring Action Maps in 3D Environments + +

+ + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Pat Hanrahan, + + + + + + Matthew Fisher, + + + + + + Matthias Nießner
+ + + + Proceedings of SIGGRAPH Asia 2014
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + On Being the Right Scale: Sizing Large Collections of 3D Models +   + +
+
+

+ + On Being the Right Scale: Sizing Large Collections of 3D Models + +

+ + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Gilbert Bernstein, + + + + + + Christopher D. Manning, + + + + + + Pat Hanrahan
+ + + + SIGGRAPH Asia 2014 Workshop on Indoor Scene Understanding: Where Graphics meets Vision
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Learning Spatial Knowledge for Text to 3D Scene Generation +   + +
+
+

+ + Learning Spatial Knowledge for Text to 3D Scene Generation + +

+ + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Christopher D. Manning
+ + + + Proceedings of the 2014 Conference on Empirical Methods in Natural Language Processing (EMNLP 2014)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Learning Affordance Maps by Observing Interactions +   + +
+
+

+ + Learning Affordance Maps by Observing Interactions + +

+ + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Matthew Fisher, + + + + + + Matthias Nießner, + + + + + + Pat Hanrahan
+ + + + CVPR 2014 Workshop on Functionality, Physics, Intentionality and Causality
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + Interactive Learning of Spatial Knowledge for Text to 3D Scene Generation +   + +
+
+

+ + Interactive Learning of Spatial Knowledge for Text to 3D Scene Generation + +

+ + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Christopher D. Manning
+ + + + Proceedings of the ACL 2014 Workshop on Interactive Language Learning, Visualization, and Interfaces
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + Semantic Parsing for Text to 3D Scene Generation +   + +
+
+

+ + Semantic Parsing for Text to 3D Scene Generation + +

+ + + + Angel X. Chang, + + + + + + Manolis Savva, + + + + + + Christopher D. Manning
+ + + + Proceedings of the ACL 2014 Workshop on Semantic Parsing
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ + TransPhoner: Automated Mnemonic Keyword Generation +   + +
+
+

+ + TransPhoner: Automated Mnemonic Keyword Generation + +

+ + + + Manolis Savva, + + + + + + Angel X. Chang, + + + + + + Christopher D. Manning, + + + + + + Pat Hanrahan
+ + + + Proceedings of CHI 2014
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ +
+
+

+ + TokensRegex: Defining cascaded regular expressions over tokens + +

+ + + + Angel X. Chang, + + + + + + Christopher D. Manning
+ + + + Stanford University Technical Report
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+

2013

+ +
+
+ + Deterministic coreference resolution based on entity-centric, precision-ranked rules +   + +
+
+

+ + Deterministic coreference resolution based on entity-centric, precision-ranked rules + +

+ + + + Heeyoung Lee, + + + + + + Angel X. Chang, + + + + + + Yves Peirsman, + + + + + + Nathanael Chambers, + + + + + + Mihai Surdeanu, + + + + + + Dan Jurafsky
+ + + + In Computational Linguistics 39(4)
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ +
+
+ +
+
+

+ + Stanford's 2013 KBP System + +

+ + + + Gabor Angeli, + + + + + + Arun Tejasvi Chaganty, + + + + + + Angel X. Chang, + + + + + + Kevin Reschke, + + + + + + Julie Tibshirani, + + + + + + Jean Y. Wu, + + + + + + Osbert Bastani, + + + + + + Keith Siilats, + + + + + + Christopher D. Manning
+ + + + In Proceedings of the Sixth Text Analysis Conference (TAC 2014)
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ +
+
+

+ + SUTime: Evaluation in TempEval-3 + +

+ + + + Angel X. Chang, + + + + + + Christopher D. Manning
+ + + + In Second Joint Conference on Lexical and Computational Semantics (*SEM), Volume 2: Proceedings of the Seventh International Workshop on Semantic Evaluation (SemEval 2013)
+ + + + + pdf + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+

2012

+ +
+
+ +
+
+

+ + Joint Entity and Event Coreference Resolution across Documents + +

+ + + + Heeyoung Lee, + + + + + + Marta Recasens, + + + + + + Angel X. Chang, + + + + + + Mihai Surdeanu, + + + + + + Dan Jurafsky
+ + + + Proceedings of the Conference on Empirical Methods in Natural Language Processing and Computational Natural Language Learning (EMNLP-CoNLL 2012)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ + SUTime: A Library for Recognizing and Normalizing Time Expressions. +   + +
+
+

+ + SUTime: A Library for Recognizing and Normalizing Time Expressions. + +

+ + + + Angel X. Chang, + + + + + + Christopher D. Manning
+ + + + In Proceedings of the Eighth International Conference on Language Resources and Evaluation (LREC 2012)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + | + poster + + + + + + + | + demo + + + + + + + + | webpage +
+
+ +
+
+ +
+
+

+ + A Cross-Lingual Dictionary for English Wikipedia Concepts + +

+ + + + Valentin I. Spitkovsky, + + + + + + Angel X. Chang
+
+ + + In Proceedings of the Eighth International Conference on Language Resources and Evaluation (LREC 2012)
+ + + + + pdf + + + + + | + bib + + + + + + + | + data + + + + + + + | + slides + + + + + + + + + + + | + post + + + + + + +
+
+ + +
+ +
+

2011

+ +
+
+ +
+
+

+ + Stanford's Distantly-Supervised Slot-Filling System + +

+ + + + Mihai Surdeanu, + + + + + + Sonal Gupta, + + + + + + John Bauer, + + + + + + David McClosky, + + + + + + Angel X. Chang, + + + + + + Valentin I. Spitkovsky, + + + + + + Christopher D. Manning
+ + + + In Proceedings of the Fourth Text Analysis Conference (TAC 2011)
+ + + + + pdf + + + + + | + bib + + + + + + + | + data + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ +
+
+

+ + Strong Baselines for Cross-Lingual Entity Linking + +

+ + + + Valentin I. Spitkovsky, + + + + + + Angel X. Chang
+
+ + + In Proceedings of the Fourth Text Analysis Conference (TAC 2011)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ +
+
+

+ + Stanford-UBC Entity Linking at TAC-KBP, Again + +

+ + + + Angel X. Chang, + + + + + + Valentin I. Spitkovsky, + + + + + + Eneko Agirre, + + + + + + Christopher D. Manning
+ + + + In Proceedings of the Fourth Text Analysis Conference (TAC 2011)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+
+ +
+
+

+ + Unsupervised Dependency Parsing without Gold Part-of-Speech Tags + +

+ + + + Valentin I. Spitkovsky, + + + + + + Hiyan Alshawi, + + + + + + Angel X. Chang, + + + + + + Dan Jurafsky
+ + + + In Proceedings of the 2011 Conference on Empirical Methods in Natural Language Processing (EMNLP 2011)
+ + + + + pdf + + + + + | + bib + + + + + + + | + data + + + + + + + + + | + poster + + + + + + + + + + + + +
+
+ +
+
+ +
+
+

+ + Stanford's Multi-Pass Sieve Coreference Resolution System at the CoNLL-2011 Shared Task + +

+ + + + Heeyoung Lee, + + + + + + Yves Peirsman, + + + + + + Angel X. Chang, + + + + + + Nathanael Chambers, + + + + + + Mihai Surdeanu, + + + + + + Dan Jurafsky
+ + + + In Proceedings of the CoNLL-2011 Shared Task
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + | webpage +
+
+ + +
+ +
+

2010

+ +
+
+ +
+
+

+ + A Simple Distant Supervision Approach for the TAC-KBP Slot Filling Task + +

+ + + + Mihai Surdeanu, + + + + + + David McClosky, + + + + + + Julie Tibshirani, + + + + + + John Bauer, + + + + + + Angel X. Chang, + + + + + + Valentin I. Spitkovsky, + + + + + + Christopher D. Manning
+ + + + In Proceedings of the Third Text Analysis Conference (TAC 2010)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + | + slides + + + + + + + + + + + + + + +
+
+ +
+
+ +
+
+

+ + Stanford-UBC Entity Linking at TAC-KBP + +

+ + + + Angel X. Chang, + + + + + + Valentin I. Spitkovsky, + + + + + + Eric Yeh, + + + + + + Eneko Agirre, + + + + + + Christopher D. Manning
+ + + + In Proceedings of the Third Text Analysis Conference (TAC 2010)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + | + poster + + + + + + + + + + + + +
+
+ + +
+ +
+

2009

+ +
+
+ +
+
+

+ + Stanford-UBC at TAC-KBP + +

+ + + + Eneko Agirre, + + + + + + Angel X. Chang, + + + + + + Dan Jurafsky, + + + + + + Christopher D. Manning, + + + + + + Valentin I. Spitkovsky, + + + + + + Eric Yeh
+ + + + In Proceedings of the Second Text Analysis Conference (TAC 2009)
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + | + slides + + + + + + + + + + + + + + +
+
+ + +
+ +
+

2000

+ +
+
+ + The Fractal Geometry of the Boundary of Dragon Curves +   + +
+
+

+ + The Fractal Geometry of the Boundary of Dragon Curves + +

+ + + + Angel X. Chang, + + + + + + Tianrong Zhang
+ + + + In Journal of Recreational Mathematics 30 (1), 9-22
+ + + + + pdf + + + + + | + bib + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ + +
+
+ +
+
+
+ +
+ + diff --git a/scripts/bootstrap.js b/scripts/bootstrap.js new file mode 100644 index 0000000..8a2e99a --- /dev/null +++ b/scripts/bootstrap.js @@ -0,0 +1,2377 @@ +/*! + * Bootstrap v3.3.7 (http://getbootstrap.com) + * Copyright 2011-2016 Twitter, Inc. + * Licensed under the MIT license + */ + +if (typeof jQuery === 'undefined') { + throw new Error('Bootstrap\'s JavaScript requires jQuery') +} + ++function ($) { + 'use strict'; + var version = $.fn.jquery.split(' ')[0].split('.') + if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1) || (version[0] > 3)) { + throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4') + } +}(jQuery); + +/* ======================================================================== + * Bootstrap: transition.js v3.3.7 + * http://getbootstrap.com/javascript/#transitions + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // CSS TRANSITION SUPPORT (Shoutout: http://www.modernizr.com/) + // ============================================================ + + function transitionEnd() { + var el = document.createElement('bootstrap') + + var transEndEventNames = { + WebkitTransition : 'webkitTransitionEnd', + MozTransition : 'transitionend', + OTransition : 'oTransitionEnd otransitionend', + transition : 'transitionend' + } + + for (var name in transEndEventNames) { + if (el.style[name] !== undefined) { + return { end: transEndEventNames[name] } + } + } + + return false // explicit for ie8 ( ._.) + } + + // http://blog.alexmaccaw.com/css-transitions + $.fn.emulateTransitionEnd = function (duration) { + var called = false + var $el = this + $(this).one('bsTransitionEnd', function () { called = true }) + var callback = function () { if (!called) $($el).trigger($.support.transition.end) } + setTimeout(callback, duration) + return this + } + + $(function () { + $.support.transition = transitionEnd() + + if (!$.support.transition) return + + $.event.special.bsTransitionEnd = { + bindType: $.support.transition.end, + delegateType: $.support.transition.end, + handle: function (e) { + if ($(e.target).is(this)) return e.handleObj.handler.apply(this, arguments) + } + } + }) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: alert.js v3.3.7 + * http://getbootstrap.com/javascript/#alerts + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // ALERT CLASS DEFINITION + // ====================== + + var dismiss = '[data-dismiss="alert"]' + var Alert = function (el) { + $(el).on('click', dismiss, this.close) + } + + Alert.VERSION = '3.3.7' + + Alert.TRANSITION_DURATION = 150 + + Alert.prototype.close = function (e) { + var $this = $(this) + var selector = $this.attr('data-target') + + if (!selector) { + selector = $this.attr('href') + selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7 + } + + var $parent = $(selector === '#' ? [] : selector) + + if (e) e.preventDefault() + + if (!$parent.length) { + $parent = $this.closest('.alert') + } + + $parent.trigger(e = $.Event('close.bs.alert')) + + if (e.isDefaultPrevented()) return + + $parent.removeClass('in') + + function removeElement() { + // detach from parent, fire event then clean up data + $parent.detach().trigger('closed.bs.alert').remove() + } + + $.support.transition && $parent.hasClass('fade') ? + $parent + .one('bsTransitionEnd', removeElement) + .emulateTransitionEnd(Alert.TRANSITION_DURATION) : + removeElement() + } + + + // ALERT PLUGIN DEFINITION + // ======================= + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.alert') + + if (!data) $this.data('bs.alert', (data = new Alert(this))) + if (typeof option == 'string') data[option].call($this) + }) + } + + var old = $.fn.alert + + $.fn.alert = Plugin + $.fn.alert.Constructor = Alert + + + // ALERT NO CONFLICT + // ================= + + $.fn.alert.noConflict = function () { + $.fn.alert = old + return this + } + + + // ALERT DATA-API + // ============== + + $(document).on('click.bs.alert.data-api', dismiss, Alert.prototype.close) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: button.js v3.3.7 + * http://getbootstrap.com/javascript/#buttons + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // BUTTON PUBLIC CLASS DEFINITION + // ============================== + + var Button = function (element, options) { + this.$element = $(element) + this.options = $.extend({}, Button.DEFAULTS, options) + this.isLoading = false + } + + Button.VERSION = '3.3.7' + + Button.DEFAULTS = { + loadingText: 'loading...' + } + + Button.prototype.setState = function (state) { + var d = 'disabled' + var $el = this.$element + var val = $el.is('input') ? 'val' : 'html' + var data = $el.data() + + state += 'Text' + + if (data.resetText == null) $el.data('resetText', $el[val]()) + + // push to event loop to allow forms to submit + setTimeout($.proxy(function () { + $el[val](data[state] == null ? this.options[state] : data[state]) + + if (state == 'loadingText') { + this.isLoading = true + $el.addClass(d).attr(d, d).prop(d, true) + } else if (this.isLoading) { + this.isLoading = false + $el.removeClass(d).removeAttr(d).prop(d, false) + } + }, this), 0) + } + + Button.prototype.toggle = function () { + var changed = true + var $parent = this.$element.closest('[data-toggle="buttons"]') + + if ($parent.length) { + var $input = this.$element.find('input') + if ($input.prop('type') == 'radio') { + if ($input.prop('checked')) changed = false + $parent.find('.active').removeClass('active') + this.$element.addClass('active') + } else if ($input.prop('type') == 'checkbox') { + if (($input.prop('checked')) !== this.$element.hasClass('active')) changed = false + this.$element.toggleClass('active') + } + $input.prop('checked', this.$element.hasClass('active')) + if (changed) $input.trigger('change') + } else { + this.$element.attr('aria-pressed', !this.$element.hasClass('active')) + this.$element.toggleClass('active') + } + } + + + // BUTTON PLUGIN DEFINITION + // ======================== + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.button') + var options = typeof option == 'object' && option + + if (!data) $this.data('bs.button', (data = new Button(this, options))) + + if (option == 'toggle') data.toggle() + else if (option) data.setState(option) + }) + } + + var old = $.fn.button + + $.fn.button = Plugin + $.fn.button.Constructor = Button + + + // BUTTON NO CONFLICT + // ================== + + $.fn.button.noConflict = function () { + $.fn.button = old + return this + } + + + // BUTTON DATA-API + // =============== + + $(document) + .on('click.bs.button.data-api', '[data-toggle^="button"]', function (e) { + var $btn = $(e.target).closest('.btn') + Plugin.call($btn, 'toggle') + if (!($(e.target).is('input[type="radio"], input[type="checkbox"]'))) { + // Prevent double click on radios, and the double selections (so cancellation) on checkboxes + e.preventDefault() + // The target component still receive the focus + if ($btn.is('input,button')) $btn.trigger('focus') + else $btn.find('input:visible,button:visible').first().trigger('focus') + } + }) + .on('focus.bs.button.data-api blur.bs.button.data-api', '[data-toggle^="button"]', function (e) { + $(e.target).closest('.btn').toggleClass('focus', /^focus(in)?$/.test(e.type)) + }) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: carousel.js v3.3.7 + * http://getbootstrap.com/javascript/#carousel + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // CAROUSEL CLASS DEFINITION + // ========================= + + var Carousel = function (element, options) { + this.$element = $(element) + this.$indicators = this.$element.find('.carousel-indicators') + this.options = options + this.paused = null + this.sliding = null + this.interval = null + this.$active = null + this.$items = null + + this.options.keyboard && this.$element.on('keydown.bs.carousel', $.proxy(this.keydown, this)) + + this.options.pause == 'hover' && !('ontouchstart' in document.documentElement) && this.$element + .on('mouseenter.bs.carousel', $.proxy(this.pause, this)) + .on('mouseleave.bs.carousel', $.proxy(this.cycle, this)) + } + + Carousel.VERSION = '3.3.7' + + Carousel.TRANSITION_DURATION = 600 + + Carousel.DEFAULTS = { + interval: 5000, + pause: 'hover', + wrap: true, + keyboard: true + } + + Carousel.prototype.keydown = function (e) { + if (/input|textarea/i.test(e.target.tagName)) return + switch (e.which) { + case 37: this.prev(); break + case 39: this.next(); break + default: return + } + + e.preventDefault() + } + + Carousel.prototype.cycle = function (e) { + e || (this.paused = false) + + this.interval && clearInterval(this.interval) + + this.options.interval + && !this.paused + && (this.interval = setInterval($.proxy(this.next, this), this.options.interval)) + + return this + } + + Carousel.prototype.getItemIndex = function (item) { + this.$items = item.parent().children('.item') + return this.$items.index(item || this.$active) + } + + Carousel.prototype.getItemForDirection = function (direction, active) { + var activeIndex = this.getItemIndex(active) + var willWrap = (direction == 'prev' && activeIndex === 0) + || (direction == 'next' && activeIndex == (this.$items.length - 1)) + if (willWrap && !this.options.wrap) return active + var delta = direction == 'prev' ? -1 : 1 + var itemIndex = (activeIndex + delta) % this.$items.length + return this.$items.eq(itemIndex) + } + + Carousel.prototype.to = function (pos) { + var that = this + var activeIndex = this.getItemIndex(this.$active = this.$element.find('.item.active')) + + if (pos > (this.$items.length - 1) || pos < 0) return + + if (this.sliding) return this.$element.one('slid.bs.carousel', function () { that.to(pos) }) // yes, "slid" + if (activeIndex == pos) return this.pause().cycle() + + return this.slide(pos > activeIndex ? 'next' : 'prev', this.$items.eq(pos)) + } + + Carousel.prototype.pause = function (e) { + e || (this.paused = true) + + if (this.$element.find('.next, .prev').length && $.support.transition) { + this.$element.trigger($.support.transition.end) + this.cycle(true) + } + + this.interval = clearInterval(this.interval) + + return this + } + + Carousel.prototype.next = function () { + if (this.sliding) return + return this.slide('next') + } + + Carousel.prototype.prev = function () { + if (this.sliding) return + return this.slide('prev') + } + + Carousel.prototype.slide = function (type, next) { + var $active = this.$element.find('.item.active') + var $next = next || this.getItemForDirection(type, $active) + var isCycling = this.interval + var direction = type == 'next' ? 'left' : 'right' + var that = this + + if ($next.hasClass('active')) return (this.sliding = false) + + var relatedTarget = $next[0] + var slideEvent = $.Event('slide.bs.carousel', { + relatedTarget: relatedTarget, + direction: direction + }) + this.$element.trigger(slideEvent) + if (slideEvent.isDefaultPrevented()) return + + this.sliding = true + + isCycling && this.pause() + + if (this.$indicators.length) { + this.$indicators.find('.active').removeClass('active') + var $nextIndicator = $(this.$indicators.children()[this.getItemIndex($next)]) + $nextIndicator && $nextIndicator.addClass('active') + } + + var slidEvent = $.Event('slid.bs.carousel', { relatedTarget: relatedTarget, direction: direction }) // yes, "slid" + if ($.support.transition && this.$element.hasClass('slide')) { + $next.addClass(type) + $next[0].offsetWidth // force reflow + $active.addClass(direction) + $next.addClass(direction) + $active + .one('bsTransitionEnd', function () { + $next.removeClass([type, direction].join(' ')).addClass('active') + $active.removeClass(['active', direction].join(' ')) + that.sliding = false + setTimeout(function () { + that.$element.trigger(slidEvent) + }, 0) + }) + .emulateTransitionEnd(Carousel.TRANSITION_DURATION) + } else { + $active.removeClass('active') + $next.addClass('active') + this.sliding = false + this.$element.trigger(slidEvent) + } + + isCycling && this.cycle() + + return this + } + + + // CAROUSEL PLUGIN DEFINITION + // ========================== + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.carousel') + var options = $.extend({}, Carousel.DEFAULTS, $this.data(), typeof option == 'object' && option) + var action = typeof option == 'string' ? option : options.slide + + if (!data) $this.data('bs.carousel', (data = new Carousel(this, options))) + if (typeof option == 'number') data.to(option) + else if (action) data[action]() + else if (options.interval) data.pause().cycle() + }) + } + + var old = $.fn.carousel + + $.fn.carousel = Plugin + $.fn.carousel.Constructor = Carousel + + + // CAROUSEL NO CONFLICT + // ==================== + + $.fn.carousel.noConflict = function () { + $.fn.carousel = old + return this + } + + + // CAROUSEL DATA-API + // ================= + + var clickHandler = function (e) { + var href + var $this = $(this) + var $target = $($this.attr('data-target') || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '')) // strip for ie7 + if (!$target.hasClass('carousel')) return + var options = $.extend({}, $target.data(), $this.data()) + var slideIndex = $this.attr('data-slide-to') + if (slideIndex) options.interval = false + + Plugin.call($target, options) + + if (slideIndex) { + $target.data('bs.carousel').to(slideIndex) + } + + e.preventDefault() + } + + $(document) + .on('click.bs.carousel.data-api', '[data-slide]', clickHandler) + .on('click.bs.carousel.data-api', '[data-slide-to]', clickHandler) + + $(window).on('load', function () { + $('[data-ride="carousel"]').each(function () { + var $carousel = $(this) + Plugin.call($carousel, $carousel.data()) + }) + }) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: collapse.js v3.3.7 + * http://getbootstrap.com/javascript/#collapse + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + +/* jshint latedef: false */ + ++function ($) { + 'use strict'; + + // COLLAPSE PUBLIC CLASS DEFINITION + // ================================ + + var Collapse = function (element, options) { + this.$element = $(element) + this.options = $.extend({}, Collapse.DEFAULTS, options) + this.$trigger = $('[data-toggle="collapse"][href="#' + element.id + '"],' + + '[data-toggle="collapse"][data-target="#' + element.id + '"]') + this.transitioning = null + + if (this.options.parent) { + this.$parent = this.getParent() + } else { + this.addAriaAndCollapsedClass(this.$element, this.$trigger) + } + + if (this.options.toggle) this.toggle() + } + + Collapse.VERSION = '3.3.7' + + Collapse.TRANSITION_DURATION = 350 + + Collapse.DEFAULTS = { + toggle: true + } + + Collapse.prototype.dimension = function () { + var hasWidth = this.$element.hasClass('width') + return hasWidth ? 'width' : 'height' + } + + Collapse.prototype.show = function () { + if (this.transitioning || this.$element.hasClass('in')) return + + var activesData + var actives = this.$parent && this.$parent.children('.panel').children('.in, .collapsing') + + if (actives && actives.length) { + activesData = actives.data('bs.collapse') + if (activesData && activesData.transitioning) return + } + + var startEvent = $.Event('show.bs.collapse') + this.$element.trigger(startEvent) + if (startEvent.isDefaultPrevented()) return + + if (actives && actives.length) { + Plugin.call(actives, 'hide') + activesData || actives.data('bs.collapse', null) + } + + var dimension = this.dimension() + + this.$element + .removeClass('collapse') + .addClass('collapsing')[dimension](0) + .attr('aria-expanded', true) + + this.$trigger + .removeClass('collapsed') + .attr('aria-expanded', true) + + this.transitioning = 1 + + var complete = function () { + this.$element + .removeClass('collapsing') + .addClass('collapse in')[dimension]('') + this.transitioning = 0 + this.$element + .trigger('shown.bs.collapse') + } + + if (!$.support.transition) return complete.call(this) + + var scrollSize = $.camelCase(['scroll', dimension].join('-')) + + this.$element + .one('bsTransitionEnd', $.proxy(complete, this)) + .emulateTransitionEnd(Collapse.TRANSITION_DURATION)[dimension](this.$element[0][scrollSize]) + } + + Collapse.prototype.hide = function () { + if (this.transitioning || !this.$element.hasClass('in')) return + + var startEvent = $.Event('hide.bs.collapse') + this.$element.trigger(startEvent) + if (startEvent.isDefaultPrevented()) return + + var dimension = this.dimension() + + this.$element[dimension](this.$element[dimension]())[0].offsetHeight + + this.$element + .addClass('collapsing') + .removeClass('collapse in') + .attr('aria-expanded', false) + + this.$trigger + .addClass('collapsed') + .attr('aria-expanded', false) + + this.transitioning = 1 + + var complete = function () { + this.transitioning = 0 + this.$element + .removeClass('collapsing') + .addClass('collapse') + .trigger('hidden.bs.collapse') + } + + if (!$.support.transition) return complete.call(this) + + this.$element + [dimension](0) + .one('bsTransitionEnd', $.proxy(complete, this)) + .emulateTransitionEnd(Collapse.TRANSITION_DURATION) + } + + Collapse.prototype.toggle = function () { + this[this.$element.hasClass('in') ? 'hide' : 'show']() + } + + Collapse.prototype.getParent = function () { + return $(this.options.parent) + .find('[data-toggle="collapse"][data-parent="' + this.options.parent + '"]') + .each($.proxy(function (i, element) { + var $element = $(element) + this.addAriaAndCollapsedClass(getTargetFromTrigger($element), $element) + }, this)) + .end() + } + + Collapse.prototype.addAriaAndCollapsedClass = function ($element, $trigger) { + var isOpen = $element.hasClass('in') + + $element.attr('aria-expanded', isOpen) + $trigger + .toggleClass('collapsed', !isOpen) + .attr('aria-expanded', isOpen) + } + + function getTargetFromTrigger($trigger) { + var href + var target = $trigger.attr('data-target') + || (href = $trigger.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7 + + return $(target) + } + + + // COLLAPSE PLUGIN DEFINITION + // ========================== + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.collapse') + var options = $.extend({}, Collapse.DEFAULTS, $this.data(), typeof option == 'object' && option) + + if (!data && options.toggle && /show|hide/.test(option)) options.toggle = false + if (!data) $this.data('bs.collapse', (data = new Collapse(this, options))) + if (typeof option == 'string') data[option]() + }) + } + + var old = $.fn.collapse + + $.fn.collapse = Plugin + $.fn.collapse.Constructor = Collapse + + + // COLLAPSE NO CONFLICT + // ==================== + + $.fn.collapse.noConflict = function () { + $.fn.collapse = old + return this + } + + + // COLLAPSE DATA-API + // ================= + + $(document).on('click.bs.collapse.data-api', '[data-toggle="collapse"]', function (e) { + var $this = $(this) + + if (!$this.attr('data-target')) e.preventDefault() + + var $target = getTargetFromTrigger($this) + var data = $target.data('bs.collapse') + var option = data ? 'toggle' : $this.data() + + Plugin.call($target, option) + }) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: dropdown.js v3.3.7 + * http://getbootstrap.com/javascript/#dropdowns + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // DROPDOWN CLASS DEFINITION + // ========================= + + var backdrop = '.dropdown-backdrop' + var toggle = '[data-toggle="dropdown"]' + var Dropdown = function (element) { + $(element).on('click.bs.dropdown', this.toggle) + } + + Dropdown.VERSION = '3.3.7' + + function getParent($this) { + var selector = $this.attr('data-target') + + if (!selector) { + selector = $this.attr('href') + selector = selector && /#[A-Za-z]/.test(selector) && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7 + } + + var $parent = selector && $(selector) + + return $parent && $parent.length ? $parent : $this.parent() + } + + function clearMenus(e) { + if (e && e.which === 3) return + $(backdrop).remove() + $(toggle).each(function () { + var $this = $(this) + var $parent = getParent($this) + var relatedTarget = { relatedTarget: this } + + if (!$parent.hasClass('open')) return + + if (e && e.type == 'click' && /input|textarea/i.test(e.target.tagName) && $.contains($parent[0], e.target)) return + + $parent.trigger(e = $.Event('hide.bs.dropdown', relatedTarget)) + + if (e.isDefaultPrevented()) return + + $this.attr('aria-expanded', 'false') + $parent.removeClass('open').trigger($.Event('hidden.bs.dropdown', relatedTarget)) + }) + } + + Dropdown.prototype.toggle = function (e) { + var $this = $(this) + + if ($this.is('.disabled, :disabled')) return + + var $parent = getParent($this) + var isActive = $parent.hasClass('open') + + clearMenus() + + if (!isActive) { + if ('ontouchstart' in document.documentElement && !$parent.closest('.navbar-nav').length) { + // if mobile we use a backdrop because click events don't delegate + $(document.createElement('div')) + .addClass('dropdown-backdrop') + .insertAfter($(this)) + .on('click', clearMenus) + } + + var relatedTarget = { relatedTarget: this } + $parent.trigger(e = $.Event('show.bs.dropdown', relatedTarget)) + + if (e.isDefaultPrevented()) return + + $this + .trigger('focus') + .attr('aria-expanded', 'true') + + $parent + .toggleClass('open') + .trigger($.Event('shown.bs.dropdown', relatedTarget)) + } + + return false + } + + Dropdown.prototype.keydown = function (e) { + if (!/(38|40|27|32)/.test(e.which) || /input|textarea/i.test(e.target.tagName)) return + + var $this = $(this) + + e.preventDefault() + e.stopPropagation() + + if ($this.is('.disabled, :disabled')) return + + var $parent = getParent($this) + var isActive = $parent.hasClass('open') + + if (!isActive && e.which != 27 || isActive && e.which == 27) { + if (e.which == 27) $parent.find(toggle).trigger('focus') + return $this.trigger('click') + } + + var desc = ' li:not(.disabled):visible a' + var $items = $parent.find('.dropdown-menu' + desc) + + if (!$items.length) return + + var index = $items.index(e.target) + + if (e.which == 38 && index > 0) index-- // up + if (e.which == 40 && index < $items.length - 1) index++ // down + if (!~index) index = 0 + + $items.eq(index).trigger('focus') + } + + + // DROPDOWN PLUGIN DEFINITION + // ========================== + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.dropdown') + + if (!data) $this.data('bs.dropdown', (data = new Dropdown(this))) + if (typeof option == 'string') data[option].call($this) + }) + } + + var old = $.fn.dropdown + + $.fn.dropdown = Plugin + $.fn.dropdown.Constructor = Dropdown + + + // DROPDOWN NO CONFLICT + // ==================== + + $.fn.dropdown.noConflict = function () { + $.fn.dropdown = old + return this + } + + + // APPLY TO STANDARD DROPDOWN ELEMENTS + // =================================== + + $(document) + .on('click.bs.dropdown.data-api', clearMenus) + .on('click.bs.dropdown.data-api', '.dropdown form', function (e) { e.stopPropagation() }) + .on('click.bs.dropdown.data-api', toggle, Dropdown.prototype.toggle) + .on('keydown.bs.dropdown.data-api', toggle, Dropdown.prototype.keydown) + .on('keydown.bs.dropdown.data-api', '.dropdown-menu', Dropdown.prototype.keydown) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: modal.js v3.3.7 + * http://getbootstrap.com/javascript/#modals + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // MODAL CLASS DEFINITION + // ====================== + + var Modal = function (element, options) { + this.options = options + this.$body = $(document.body) + this.$element = $(element) + this.$dialog = this.$element.find('.modal-dialog') + this.$backdrop = null + this.isShown = null + this.originalBodyPad = null + this.scrollbarWidth = 0 + this.ignoreBackdropClick = false + + if (this.options.remote) { + this.$element + .find('.modal-content') + .load(this.options.remote, $.proxy(function () { + this.$element.trigger('loaded.bs.modal') + }, this)) + } + } + + Modal.VERSION = '3.3.7' + + Modal.TRANSITION_DURATION = 300 + Modal.BACKDROP_TRANSITION_DURATION = 150 + + Modal.DEFAULTS = { + backdrop: true, + keyboard: true, + show: true + } + + Modal.prototype.toggle = function (_relatedTarget) { + return this.isShown ? this.hide() : this.show(_relatedTarget) + } + + Modal.prototype.show = function (_relatedTarget) { + var that = this + var e = $.Event('show.bs.modal', { relatedTarget: _relatedTarget }) + + this.$element.trigger(e) + + if (this.isShown || e.isDefaultPrevented()) return + + this.isShown = true + + this.checkScrollbar() + this.setScrollbar() + this.$body.addClass('modal-open') + + this.escape() + this.resize() + + this.$element.on('click.dismiss.bs.modal', '[data-dismiss="modal"]', $.proxy(this.hide, this)) + + this.$dialog.on('mousedown.dismiss.bs.modal', function () { + that.$element.one('mouseup.dismiss.bs.modal', function (e) { + if ($(e.target).is(that.$element)) that.ignoreBackdropClick = true + }) + }) + + this.backdrop(function () { + var transition = $.support.transition && that.$element.hasClass('fade') + + if (!that.$element.parent().length) { + that.$element.appendTo(that.$body) // don't move modals dom position + } + + that.$element + .show() + .scrollTop(0) + + that.adjustDialog() + + if (transition) { + that.$element[0].offsetWidth // force reflow + } + + that.$element.addClass('in') + + that.enforceFocus() + + var e = $.Event('shown.bs.modal', { relatedTarget: _relatedTarget }) + + transition ? + that.$dialog // wait for modal to slide in + .one('bsTransitionEnd', function () { + that.$element.trigger('focus').trigger(e) + }) + .emulateTransitionEnd(Modal.TRANSITION_DURATION) : + that.$element.trigger('focus').trigger(e) + }) + } + + Modal.prototype.hide = function (e) { + if (e) e.preventDefault() + + e = $.Event('hide.bs.modal') + + this.$element.trigger(e) + + if (!this.isShown || e.isDefaultPrevented()) return + + this.isShown = false + + this.escape() + this.resize() + + $(document).off('focusin.bs.modal') + + this.$element + .removeClass('in') + .off('click.dismiss.bs.modal') + .off('mouseup.dismiss.bs.modal') + + this.$dialog.off('mousedown.dismiss.bs.modal') + + $.support.transition && this.$element.hasClass('fade') ? + this.$element + .one('bsTransitionEnd', $.proxy(this.hideModal, this)) + .emulateTransitionEnd(Modal.TRANSITION_DURATION) : + this.hideModal() + } + + Modal.prototype.enforceFocus = function () { + $(document) + .off('focusin.bs.modal') // guard against infinite focus loop + .on('focusin.bs.modal', $.proxy(function (e) { + if (document !== e.target && + this.$element[0] !== e.target && + !this.$element.has(e.target).length) { + this.$element.trigger('focus') + } + }, this)) + } + + Modal.prototype.escape = function () { + if (this.isShown && this.options.keyboard) { + this.$element.on('keydown.dismiss.bs.modal', $.proxy(function (e) { + e.which == 27 && this.hide() + }, this)) + } else if (!this.isShown) { + this.$element.off('keydown.dismiss.bs.modal') + } + } + + Modal.prototype.resize = function () { + if (this.isShown) { + $(window).on('resize.bs.modal', $.proxy(this.handleUpdate, this)) + } else { + $(window).off('resize.bs.modal') + } + } + + Modal.prototype.hideModal = function () { + var that = this + this.$element.hide() + this.backdrop(function () { + that.$body.removeClass('modal-open') + that.resetAdjustments() + that.resetScrollbar() + that.$element.trigger('hidden.bs.modal') + }) + } + + Modal.prototype.removeBackdrop = function () { + this.$backdrop && this.$backdrop.remove() + this.$backdrop = null + } + + Modal.prototype.backdrop = function (callback) { + var that = this + var animate = this.$element.hasClass('fade') ? 'fade' : '' + + if (this.isShown && this.options.backdrop) { + var doAnimate = $.support.transition && animate + + this.$backdrop = $(document.createElement('div')) + .addClass('modal-backdrop ' + animate) + .appendTo(this.$body) + + this.$element.on('click.dismiss.bs.modal', $.proxy(function (e) { + if (this.ignoreBackdropClick) { + this.ignoreBackdropClick = false + return + } + if (e.target !== e.currentTarget) return + this.options.backdrop == 'static' + ? this.$element[0].focus() + : this.hide() + }, this)) + + if (doAnimate) this.$backdrop[0].offsetWidth // force reflow + + this.$backdrop.addClass('in') + + if (!callback) return + + doAnimate ? + this.$backdrop + .one('bsTransitionEnd', callback) + .emulateTransitionEnd(Modal.BACKDROP_TRANSITION_DURATION) : + callback() + + } else if (!this.isShown && this.$backdrop) { + this.$backdrop.removeClass('in') + + var callbackRemove = function () { + that.removeBackdrop() + callback && callback() + } + $.support.transition && this.$element.hasClass('fade') ? + this.$backdrop + .one('bsTransitionEnd', callbackRemove) + .emulateTransitionEnd(Modal.BACKDROP_TRANSITION_DURATION) : + callbackRemove() + + } else if (callback) { + callback() + } + } + + // these following methods are used to handle overflowing modals + + Modal.prototype.handleUpdate = function () { + this.adjustDialog() + } + + Modal.prototype.adjustDialog = function () { + var modalIsOverflowing = this.$element[0].scrollHeight > document.documentElement.clientHeight + + this.$element.css({ + paddingLeft: !this.bodyIsOverflowing && modalIsOverflowing ? this.scrollbarWidth : '', + paddingRight: this.bodyIsOverflowing && !modalIsOverflowing ? this.scrollbarWidth : '' + }) + } + + Modal.prototype.resetAdjustments = function () { + this.$element.css({ + paddingLeft: '', + paddingRight: '' + }) + } + + Modal.prototype.checkScrollbar = function () { + var fullWindowWidth = window.innerWidth + if (!fullWindowWidth) { // workaround for missing window.innerWidth in IE8 + var documentElementRect = document.documentElement.getBoundingClientRect() + fullWindowWidth = documentElementRect.right - Math.abs(documentElementRect.left) + } + this.bodyIsOverflowing = document.body.clientWidth < fullWindowWidth + this.scrollbarWidth = this.measureScrollbar() + } + + Modal.prototype.setScrollbar = function () { + var bodyPad = parseInt((this.$body.css('padding-right') || 0), 10) + this.originalBodyPad = document.body.style.paddingRight || '' + if (this.bodyIsOverflowing) this.$body.css('padding-right', bodyPad + this.scrollbarWidth) + } + + Modal.prototype.resetScrollbar = function () { + this.$body.css('padding-right', this.originalBodyPad) + } + + Modal.prototype.measureScrollbar = function () { // thx walsh + var scrollDiv = document.createElement('div') + scrollDiv.className = 'modal-scrollbar-measure' + this.$body.append(scrollDiv) + var scrollbarWidth = scrollDiv.offsetWidth - scrollDiv.clientWidth + this.$body[0].removeChild(scrollDiv) + return scrollbarWidth + } + + + // MODAL PLUGIN DEFINITION + // ======================= + + function Plugin(option, _relatedTarget) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.modal') + var options = $.extend({}, Modal.DEFAULTS, $this.data(), typeof option == 'object' && option) + + if (!data) $this.data('bs.modal', (data = new Modal(this, options))) + if (typeof option == 'string') data[option](_relatedTarget) + else if (options.show) data.show(_relatedTarget) + }) + } + + var old = $.fn.modal + + $.fn.modal = Plugin + $.fn.modal.Constructor = Modal + + + // MODAL NO CONFLICT + // ================= + + $.fn.modal.noConflict = function () { + $.fn.modal = old + return this + } + + + // MODAL DATA-API + // ============== + + $(document).on('click.bs.modal.data-api', '[data-toggle="modal"]', function (e) { + var $this = $(this) + var href = $this.attr('href') + var $target = $($this.attr('data-target') || (href && href.replace(/.*(?=#[^\s]+$)/, ''))) // strip for ie7 + var option = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data()) + + if ($this.is('a')) e.preventDefault() + + $target.one('show.bs.modal', function (showEvent) { + if (showEvent.isDefaultPrevented()) return // only register focus restorer if modal will actually get shown + $target.one('hidden.bs.modal', function () { + $this.is(':visible') && $this.trigger('focus') + }) + }) + Plugin.call($target, option, this) + }) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: tooltip.js v3.3.7 + * http://getbootstrap.com/javascript/#tooltip + * Inspired by the original jQuery.tipsy by Jason Frame + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // TOOLTIP PUBLIC CLASS DEFINITION + // =============================== + + var Tooltip = function (element, options) { + this.type = null + this.options = null + this.enabled = null + this.timeout = null + this.hoverState = null + this.$element = null + this.inState = null + + this.init('tooltip', element, options) + } + + Tooltip.VERSION = '3.3.7' + + Tooltip.TRANSITION_DURATION = 150 + + Tooltip.DEFAULTS = { + animation: true, + placement: 'top', + selector: false, + template: '', + trigger: 'hover focus', + title: '', + delay: 0, + html: false, + container: false, + viewport: { + selector: 'body', + padding: 0 + } + } + + Tooltip.prototype.init = function (type, element, options) { + this.enabled = true + this.type = type + this.$element = $(element) + this.options = this.getOptions(options) + this.$viewport = this.options.viewport && $($.isFunction(this.options.viewport) ? this.options.viewport.call(this, this.$element) : (this.options.viewport.selector || this.options.viewport)) + this.inState = { click: false, hover: false, focus: false } + + if (this.$element[0] instanceof document.constructor && !this.options.selector) { + throw new Error('`selector` option must be specified when initializing ' + this.type + ' on the window.document object!') + } + + var triggers = this.options.trigger.split(' ') + + for (var i = triggers.length; i--;) { + var trigger = triggers[i] + + if (trigger == 'click') { + this.$element.on('click.' + this.type, this.options.selector, $.proxy(this.toggle, this)) + } else if (trigger != 'manual') { + var eventIn = trigger == 'hover' ? 'mouseenter' : 'focusin' + var eventOut = trigger == 'hover' ? 'mouseleave' : 'focusout' + + this.$element.on(eventIn + '.' + this.type, this.options.selector, $.proxy(this.enter, this)) + this.$element.on(eventOut + '.' + this.type, this.options.selector, $.proxy(this.leave, this)) + } + } + + this.options.selector ? + (this._options = $.extend({}, this.options, { trigger: 'manual', selector: '' })) : + this.fixTitle() + } + + Tooltip.prototype.getDefaults = function () { + return Tooltip.DEFAULTS + } + + Tooltip.prototype.getOptions = function (options) { + options = $.extend({}, this.getDefaults(), this.$element.data(), options) + + if (options.delay && typeof options.delay == 'number') { + options.delay = { + show: options.delay, + hide: options.delay + } + } + + return options + } + + Tooltip.prototype.getDelegateOptions = function () { + var options = {} + var defaults = this.getDefaults() + + this._options && $.each(this._options, function (key, value) { + if (defaults[key] != value) options[key] = value + }) + + return options + } + + Tooltip.prototype.enter = function (obj) { + var self = obj instanceof this.constructor ? + obj : $(obj.currentTarget).data('bs.' + this.type) + + if (!self) { + self = new this.constructor(obj.currentTarget, this.getDelegateOptions()) + $(obj.currentTarget).data('bs.' + this.type, self) + } + + if (obj instanceof $.Event) { + self.inState[obj.type == 'focusin' ? 'focus' : 'hover'] = true + } + + if (self.tip().hasClass('in') || self.hoverState == 'in') { + self.hoverState = 'in' + return + } + + clearTimeout(self.timeout) + + self.hoverState = 'in' + + if (!self.options.delay || !self.options.delay.show) return self.show() + + self.timeout = setTimeout(function () { + if (self.hoverState == 'in') self.show() + }, self.options.delay.show) + } + + Tooltip.prototype.isInStateTrue = function () { + for (var key in this.inState) { + if (this.inState[key]) return true + } + + return false + } + + Tooltip.prototype.leave = function (obj) { + var self = obj instanceof this.constructor ? + obj : $(obj.currentTarget).data('bs.' + this.type) + + if (!self) { + self = new this.constructor(obj.currentTarget, this.getDelegateOptions()) + $(obj.currentTarget).data('bs.' + this.type, self) + } + + if (obj instanceof $.Event) { + self.inState[obj.type == 'focusout' ? 'focus' : 'hover'] = false + } + + if (self.isInStateTrue()) return + + clearTimeout(self.timeout) + + self.hoverState = 'out' + + if (!self.options.delay || !self.options.delay.hide) return self.hide() + + self.timeout = setTimeout(function () { + if (self.hoverState == 'out') self.hide() + }, self.options.delay.hide) + } + + Tooltip.prototype.show = function () { + var e = $.Event('show.bs.' + this.type) + + if (this.hasContent() && this.enabled) { + this.$element.trigger(e) + + var inDom = $.contains(this.$element[0].ownerDocument.documentElement, this.$element[0]) + if (e.isDefaultPrevented() || !inDom) return + var that = this + + var $tip = this.tip() + + var tipId = this.getUID(this.type) + + this.setContent() + $tip.attr('id', tipId) + this.$element.attr('aria-describedby', tipId) + + if (this.options.animation) $tip.addClass('fade') + + var placement = typeof this.options.placement == 'function' ? + this.options.placement.call(this, $tip[0], this.$element[0]) : + this.options.placement + + var autoToken = /\s?auto?\s?/i + var autoPlace = autoToken.test(placement) + if (autoPlace) placement = placement.replace(autoToken, '') || 'top' + + $tip + .detach() + .css({ top: 0, left: 0, display: 'block' }) + .addClass(placement) + .data('bs.' + this.type, this) + + this.options.container ? $tip.appendTo(this.options.container) : $tip.insertAfter(this.$element) + this.$element.trigger('inserted.bs.' + this.type) + + var pos = this.getPosition() + var actualWidth = $tip[0].offsetWidth + var actualHeight = $tip[0].offsetHeight + + if (autoPlace) { + var orgPlacement = placement + var viewportDim = this.getPosition(this.$viewport) + + placement = placement == 'bottom' && pos.bottom + actualHeight > viewportDim.bottom ? 'top' : + placement == 'top' && pos.top - actualHeight < viewportDim.top ? 'bottom' : + placement == 'right' && pos.right + actualWidth > viewportDim.width ? 'left' : + placement == 'left' && pos.left - actualWidth < viewportDim.left ? 'right' : + placement + + $tip + .removeClass(orgPlacement) + .addClass(placement) + } + + var calculatedOffset = this.getCalculatedOffset(placement, pos, actualWidth, actualHeight) + + this.applyPlacement(calculatedOffset, placement) + + var complete = function () { + var prevHoverState = that.hoverState + that.$element.trigger('shown.bs.' + that.type) + that.hoverState = null + + if (prevHoverState == 'out') that.leave(that) + } + + $.support.transition && this.$tip.hasClass('fade') ? + $tip + .one('bsTransitionEnd', complete) + .emulateTransitionEnd(Tooltip.TRANSITION_DURATION) : + complete() + } + } + + Tooltip.prototype.applyPlacement = function (offset, placement) { + var $tip = this.tip() + var width = $tip[0].offsetWidth + var height = $tip[0].offsetHeight + + // manually read margins because getBoundingClientRect includes difference + var marginTop = parseInt($tip.css('margin-top'), 10) + var marginLeft = parseInt($tip.css('margin-left'), 10) + + // we must check for NaN for ie 8/9 + if (isNaN(marginTop)) marginTop = 0 + if (isNaN(marginLeft)) marginLeft = 0 + + offset.top += marginTop + offset.left += marginLeft + + // $.fn.offset doesn't round pixel values + // so we use setOffset directly with our own function B-0 + $.offset.setOffset($tip[0], $.extend({ + using: function (props) { + $tip.css({ + top: Math.round(props.top), + left: Math.round(props.left) + }) + } + }, offset), 0) + + $tip.addClass('in') + + // check to see if placing tip in new offset caused the tip to resize itself + var actualWidth = $tip[0].offsetWidth + var actualHeight = $tip[0].offsetHeight + + if (placement == 'top' && actualHeight != height) { + offset.top = offset.top + height - actualHeight + } + + var delta = this.getViewportAdjustedDelta(placement, offset, actualWidth, actualHeight) + + if (delta.left) offset.left += delta.left + else offset.top += delta.top + + var isVertical = /top|bottom/.test(placement) + var arrowDelta = isVertical ? delta.left * 2 - width + actualWidth : delta.top * 2 - height + actualHeight + var arrowOffsetPosition = isVertical ? 'offsetWidth' : 'offsetHeight' + + $tip.offset(offset) + this.replaceArrow(arrowDelta, $tip[0][arrowOffsetPosition], isVertical) + } + + Tooltip.prototype.replaceArrow = function (delta, dimension, isVertical) { + this.arrow() + .css(isVertical ? 'left' : 'top', 50 * (1 - delta / dimension) + '%') + .css(isVertical ? 'top' : 'left', '') + } + + Tooltip.prototype.setContent = function () { + var $tip = this.tip() + var title = this.getTitle() + + $tip.find('.tooltip-inner')[this.options.html ? 'html' : 'text'](title) + $tip.removeClass('fade in top bottom left right') + } + + Tooltip.prototype.hide = function (callback) { + var that = this + var $tip = $(this.$tip) + var e = $.Event('hide.bs.' + this.type) + + function complete() { + if (that.hoverState != 'in') $tip.detach() + if (that.$element) { // TODO: Check whether guarding this code with this `if` is really necessary. + that.$element + .removeAttr('aria-describedby') + .trigger('hidden.bs.' + that.type) + } + callback && callback() + } + + this.$element.trigger(e) + + if (e.isDefaultPrevented()) return + + $tip.removeClass('in') + + $.support.transition && $tip.hasClass('fade') ? + $tip + .one('bsTransitionEnd', complete) + .emulateTransitionEnd(Tooltip.TRANSITION_DURATION) : + complete() + + this.hoverState = null + + return this + } + + Tooltip.prototype.fixTitle = function () { + var $e = this.$element + if ($e.attr('title') || typeof $e.attr('data-original-title') != 'string') { + $e.attr('data-original-title', $e.attr('title') || '').attr('title', '') + } + } + + Tooltip.prototype.hasContent = function () { + return this.getTitle() + } + + Tooltip.prototype.getPosition = function ($element) { + $element = $element || this.$element + + var el = $element[0] + var isBody = el.tagName == 'BODY' + + var elRect = el.getBoundingClientRect() + if (elRect.width == null) { + // width and height are missing in IE8, so compute them manually; see https://github.com/twbs/bootstrap/issues/14093 + elRect = $.extend({}, elRect, { width: elRect.right - elRect.left, height: elRect.bottom - elRect.top }) + } + var isSvg = window.SVGElement && el instanceof window.SVGElement + // Avoid using $.offset() on SVGs since it gives incorrect results in jQuery 3. + // See https://github.com/twbs/bootstrap/issues/20280 + var elOffset = isBody ? { top: 0, left: 0 } : (isSvg ? null : $element.offset()) + var scroll = { scroll: isBody ? document.documentElement.scrollTop || document.body.scrollTop : $element.scrollTop() } + var outerDims = isBody ? { width: $(window).width(), height: $(window).height() } : null + + return $.extend({}, elRect, scroll, outerDims, elOffset) + } + + Tooltip.prototype.getCalculatedOffset = function (placement, pos, actualWidth, actualHeight) { + return placement == 'bottom' ? { top: pos.top + pos.height, left: pos.left + pos.width / 2 - actualWidth / 2 } : + placement == 'top' ? { top: pos.top - actualHeight, left: pos.left + pos.width / 2 - actualWidth / 2 } : + placement == 'left' ? { top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left - actualWidth } : + /* placement == 'right' */ { top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left + pos.width } + + } + + Tooltip.prototype.getViewportAdjustedDelta = function (placement, pos, actualWidth, actualHeight) { + var delta = { top: 0, left: 0 } + if (!this.$viewport) return delta + + var viewportPadding = this.options.viewport && this.options.viewport.padding || 0 + var viewportDimensions = this.getPosition(this.$viewport) + + if (/right|left/.test(placement)) { + var topEdgeOffset = pos.top - viewportPadding - viewportDimensions.scroll + var bottomEdgeOffset = pos.top + viewportPadding - viewportDimensions.scroll + actualHeight + if (topEdgeOffset < viewportDimensions.top) { // top overflow + delta.top = viewportDimensions.top - topEdgeOffset + } else if (bottomEdgeOffset > viewportDimensions.top + viewportDimensions.height) { // bottom overflow + delta.top = viewportDimensions.top + viewportDimensions.height - bottomEdgeOffset + } + } else { + var leftEdgeOffset = pos.left - viewportPadding + var rightEdgeOffset = pos.left + viewportPadding + actualWidth + if (leftEdgeOffset < viewportDimensions.left) { // left overflow + delta.left = viewportDimensions.left - leftEdgeOffset + } else if (rightEdgeOffset > viewportDimensions.right) { // right overflow + delta.left = viewportDimensions.left + viewportDimensions.width - rightEdgeOffset + } + } + + return delta + } + + Tooltip.prototype.getTitle = function () { + var title + var $e = this.$element + var o = this.options + + title = $e.attr('data-original-title') + || (typeof o.title == 'function' ? o.title.call($e[0]) : o.title) + + return title + } + + Tooltip.prototype.getUID = function (prefix) { + do prefix += ~~(Math.random() * 1000000) + while (document.getElementById(prefix)) + return prefix + } + + Tooltip.prototype.tip = function () { + if (!this.$tip) { + this.$tip = $(this.options.template) + if (this.$tip.length != 1) { + throw new Error(this.type + ' `template` option must consist of exactly 1 top-level element!') + } + } + return this.$tip + } + + Tooltip.prototype.arrow = function () { + return (this.$arrow = this.$arrow || this.tip().find('.tooltip-arrow')) + } + + Tooltip.prototype.enable = function () { + this.enabled = true + } + + Tooltip.prototype.disable = function () { + this.enabled = false + } + + Tooltip.prototype.toggleEnabled = function () { + this.enabled = !this.enabled + } + + Tooltip.prototype.toggle = function (e) { + var self = this + if (e) { + self = $(e.currentTarget).data('bs.' + this.type) + if (!self) { + self = new this.constructor(e.currentTarget, this.getDelegateOptions()) + $(e.currentTarget).data('bs.' + this.type, self) + } + } + + if (e) { + self.inState.click = !self.inState.click + if (self.isInStateTrue()) self.enter(self) + else self.leave(self) + } else { + self.tip().hasClass('in') ? self.leave(self) : self.enter(self) + } + } + + Tooltip.prototype.destroy = function () { + var that = this + clearTimeout(this.timeout) + this.hide(function () { + that.$element.off('.' + that.type).removeData('bs.' + that.type) + if (that.$tip) { + that.$tip.detach() + } + that.$tip = null + that.$arrow = null + that.$viewport = null + that.$element = null + }) + } + + + // TOOLTIP PLUGIN DEFINITION + // ========================= + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.tooltip') + var options = typeof option == 'object' && option + + if (!data && /destroy|hide/.test(option)) return + if (!data) $this.data('bs.tooltip', (data = new Tooltip(this, options))) + if (typeof option == 'string') data[option]() + }) + } + + var old = $.fn.tooltip + + $.fn.tooltip = Plugin + $.fn.tooltip.Constructor = Tooltip + + + // TOOLTIP NO CONFLICT + // =================== + + $.fn.tooltip.noConflict = function () { + $.fn.tooltip = old + return this + } + +}(jQuery); + +/* ======================================================================== + * Bootstrap: popover.js v3.3.7 + * http://getbootstrap.com/javascript/#popovers + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // POPOVER PUBLIC CLASS DEFINITION + // =============================== + + var Popover = function (element, options) { + this.init('popover', element, options) + } + + if (!$.fn.tooltip) throw new Error('Popover requires tooltip.js') + + Popover.VERSION = '3.3.7' + + Popover.DEFAULTS = $.extend({}, $.fn.tooltip.Constructor.DEFAULTS, { + placement: 'right', + trigger: 'click', + content: '', + template: '' + }) + + + // NOTE: POPOVER EXTENDS tooltip.js + // ================================ + + Popover.prototype = $.extend({}, $.fn.tooltip.Constructor.prototype) + + Popover.prototype.constructor = Popover + + Popover.prototype.getDefaults = function () { + return Popover.DEFAULTS + } + + Popover.prototype.setContent = function () { + var $tip = this.tip() + var title = this.getTitle() + var content = this.getContent() + + $tip.find('.popover-title')[this.options.html ? 'html' : 'text'](title) + $tip.find('.popover-content').children().detach().end()[ // we use append for html objects to maintain js events + this.options.html ? (typeof content == 'string' ? 'html' : 'append') : 'text' + ](content) + + $tip.removeClass('fade top bottom left right in') + + // IE8 doesn't accept hiding via the `:empty` pseudo selector, we have to do + // this manually by checking the contents. + if (!$tip.find('.popover-title').html()) $tip.find('.popover-title').hide() + } + + Popover.prototype.hasContent = function () { + return this.getTitle() || this.getContent() + } + + Popover.prototype.getContent = function () { + var $e = this.$element + var o = this.options + + return $e.attr('data-content') + || (typeof o.content == 'function' ? + o.content.call($e[0]) : + o.content) + } + + Popover.prototype.arrow = function () { + return (this.$arrow = this.$arrow || this.tip().find('.arrow')) + } + + + // POPOVER PLUGIN DEFINITION + // ========================= + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.popover') + var options = typeof option == 'object' && option + + if (!data && /destroy|hide/.test(option)) return + if (!data) $this.data('bs.popover', (data = new Popover(this, options))) + if (typeof option == 'string') data[option]() + }) + } + + var old = $.fn.popover + + $.fn.popover = Plugin + $.fn.popover.Constructor = Popover + + + // POPOVER NO CONFLICT + // =================== + + $.fn.popover.noConflict = function () { + $.fn.popover = old + return this + } + +}(jQuery); + +/* ======================================================================== + * Bootstrap: scrollspy.js v3.3.7 + * http://getbootstrap.com/javascript/#scrollspy + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // SCROLLSPY CLASS DEFINITION + // ========================== + + function ScrollSpy(element, options) { + this.$body = $(document.body) + this.$scrollElement = $(element).is(document.body) ? $(window) : $(element) + this.options = $.extend({}, ScrollSpy.DEFAULTS, options) + this.selector = (this.options.target || '') + ' .nav li > a' + this.offsets = [] + this.targets = [] + this.activeTarget = null + this.scrollHeight = 0 + + this.$scrollElement.on('scroll.bs.scrollspy', $.proxy(this.process, this)) + this.refresh() + this.process() + } + + ScrollSpy.VERSION = '3.3.7' + + ScrollSpy.DEFAULTS = { + offset: 10 + } + + ScrollSpy.prototype.getScrollHeight = function () { + return this.$scrollElement[0].scrollHeight || Math.max(this.$body[0].scrollHeight, document.documentElement.scrollHeight) + } + + ScrollSpy.prototype.refresh = function () { + var that = this + var offsetMethod = 'offset' + var offsetBase = 0 + + this.offsets = [] + this.targets = [] + this.scrollHeight = this.getScrollHeight() + + if (!$.isWindow(this.$scrollElement[0])) { + offsetMethod = 'position' + offsetBase = this.$scrollElement.scrollTop() + } + + this.$body + .find(this.selector) + .map(function () { + var $el = $(this) + var href = $el.data('target') || $el.attr('href') + var $href = /^#./.test(href) && $(href) + + return ($href + && $href.length + && $href.is(':visible') + && [[$href[offsetMethod]().top + offsetBase, href]]) || null + }) + .sort(function (a, b) { return a[0] - b[0] }) + .each(function () { + that.offsets.push(this[0]) + that.targets.push(this[1]) + }) + } + + ScrollSpy.prototype.process = function () { + var scrollTop = this.$scrollElement.scrollTop() + this.options.offset + var scrollHeight = this.getScrollHeight() + var maxScroll = this.options.offset + scrollHeight - this.$scrollElement.height() + var offsets = this.offsets + var targets = this.targets + var activeTarget = this.activeTarget + var i + + if (this.scrollHeight != scrollHeight) { + this.refresh() + } + + if (scrollTop >= maxScroll) { + return activeTarget != (i = targets[targets.length - 1]) && this.activate(i) + } + + if (activeTarget && scrollTop < offsets[0]) { + this.activeTarget = null + return this.clear() + } + + for (i = offsets.length; i--;) { + activeTarget != targets[i] + && scrollTop >= offsets[i] + && (offsets[i + 1] === undefined || scrollTop < offsets[i + 1]) + && this.activate(targets[i]) + } + } + + ScrollSpy.prototype.activate = function (target) { + this.activeTarget = target + + this.clear() + + var selector = this.selector + + '[data-target="' + target + '"],' + + this.selector + '[href="' + target + '"]' + + var active = $(selector) + .parents('li') + .addClass('active') + + if (active.parent('.dropdown-menu').length) { + active = active + .closest('li.dropdown') + .addClass('active') + } + + active.trigger('activate.bs.scrollspy') + } + + ScrollSpy.prototype.clear = function () { + $(this.selector) + .parentsUntil(this.options.target, '.active') + .removeClass('active') + } + + + // SCROLLSPY PLUGIN DEFINITION + // =========================== + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.scrollspy') + var options = typeof option == 'object' && option + + if (!data) $this.data('bs.scrollspy', (data = new ScrollSpy(this, options))) + if (typeof option == 'string') data[option]() + }) + } + + var old = $.fn.scrollspy + + $.fn.scrollspy = Plugin + $.fn.scrollspy.Constructor = ScrollSpy + + + // SCROLLSPY NO CONFLICT + // ===================== + + $.fn.scrollspy.noConflict = function () { + $.fn.scrollspy = old + return this + } + + + // SCROLLSPY DATA-API + // ================== + + $(window).on('load.bs.scrollspy.data-api', function () { + $('[data-spy="scroll"]').each(function () { + var $spy = $(this) + Plugin.call($spy, $spy.data()) + }) + }) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: tab.js v3.3.7 + * http://getbootstrap.com/javascript/#tabs + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // TAB CLASS DEFINITION + // ==================== + + var Tab = function (element) { + // jscs:disable requireDollarBeforejQueryAssignment + this.element = $(element) + // jscs:enable requireDollarBeforejQueryAssignment + } + + Tab.VERSION = '3.3.7' + + Tab.TRANSITION_DURATION = 150 + + Tab.prototype.show = function () { + var $this = this.element + var $ul = $this.closest('ul:not(.dropdown-menu)') + var selector = $this.data('target') + + if (!selector) { + selector = $this.attr('href') + selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7 + } + + if ($this.parent('li').hasClass('active')) return + + var $previous = $ul.find('.active:last a') + var hideEvent = $.Event('hide.bs.tab', { + relatedTarget: $this[0] + }) + var showEvent = $.Event('show.bs.tab', { + relatedTarget: $previous[0] + }) + + $previous.trigger(hideEvent) + $this.trigger(showEvent) + + if (showEvent.isDefaultPrevented() || hideEvent.isDefaultPrevented()) return + + var $target = $(selector) + + this.activate($this.closest('li'), $ul) + this.activate($target, $target.parent(), function () { + $previous.trigger({ + type: 'hidden.bs.tab', + relatedTarget: $this[0] + }) + $this.trigger({ + type: 'shown.bs.tab', + relatedTarget: $previous[0] + }) + }) + } + + Tab.prototype.activate = function (element, container, callback) { + var $active = container.find('> .active') + var transition = callback + && $.support.transition + && ($active.length && $active.hasClass('fade') || !!container.find('> .fade').length) + + function next() { + $active + .removeClass('active') + .find('> .dropdown-menu > .active') + .removeClass('active') + .end() + .find('[data-toggle="tab"]') + .attr('aria-expanded', false) + + element + .addClass('active') + .find('[data-toggle="tab"]') + .attr('aria-expanded', true) + + if (transition) { + element[0].offsetWidth // reflow for transition + element.addClass('in') + } else { + element.removeClass('fade') + } + + if (element.parent('.dropdown-menu').length) { + element + .closest('li.dropdown') + .addClass('active') + .end() + .find('[data-toggle="tab"]') + .attr('aria-expanded', true) + } + + callback && callback() + } + + $active.length && transition ? + $active + .one('bsTransitionEnd', next) + .emulateTransitionEnd(Tab.TRANSITION_DURATION) : + next() + + $active.removeClass('in') + } + + + // TAB PLUGIN DEFINITION + // ===================== + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.tab') + + if (!data) $this.data('bs.tab', (data = new Tab(this))) + if (typeof option == 'string') data[option]() + }) + } + + var old = $.fn.tab + + $.fn.tab = Plugin + $.fn.tab.Constructor = Tab + + + // TAB NO CONFLICT + // =============== + + $.fn.tab.noConflict = function () { + $.fn.tab = old + return this + } + + + // TAB DATA-API + // ============ + + var clickHandler = function (e) { + e.preventDefault() + Plugin.call($(this), 'show') + } + + $(document) + .on('click.bs.tab.data-api', '[data-toggle="tab"]', clickHandler) + .on('click.bs.tab.data-api', '[data-toggle="pill"]', clickHandler) + +}(jQuery); + +/* ======================================================================== + * Bootstrap: affix.js v3.3.7 + * http://getbootstrap.com/javascript/#affix + * ======================================================================== + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * ======================================================================== */ + + ++function ($) { + 'use strict'; + + // AFFIX CLASS DEFINITION + // ====================== + + var Affix = function (element, options) { + this.options = $.extend({}, Affix.DEFAULTS, options) + + this.$target = $(this.options.target) + .on('scroll.bs.affix.data-api', $.proxy(this.checkPosition, this)) + .on('click.bs.affix.data-api', $.proxy(this.checkPositionWithEventLoop, this)) + + this.$element = $(element) + this.affixed = null + this.unpin = null + this.pinnedOffset = null + + this.checkPosition() + } + + Affix.VERSION = '3.3.7' + + Affix.RESET = 'affix affix-top affix-bottom' + + Affix.DEFAULTS = { + offset: 0, + target: window + } + + Affix.prototype.getState = function (scrollHeight, height, offsetTop, offsetBottom) { + var scrollTop = this.$target.scrollTop() + var position = this.$element.offset() + var targetHeight = this.$target.height() + + if (offsetTop != null && this.affixed == 'top') return scrollTop < offsetTop ? 'top' : false + + if (this.affixed == 'bottom') { + if (offsetTop != null) return (scrollTop + this.unpin <= position.top) ? false : 'bottom' + return (scrollTop + targetHeight <= scrollHeight - offsetBottom) ? false : 'bottom' + } + + var initializing = this.affixed == null + var colliderTop = initializing ? scrollTop : position.top + var colliderHeight = initializing ? targetHeight : height + + if (offsetTop != null && scrollTop <= offsetTop) return 'top' + if (offsetBottom != null && (colliderTop + colliderHeight >= scrollHeight - offsetBottom)) return 'bottom' + + return false + } + + Affix.prototype.getPinnedOffset = function () { + if (this.pinnedOffset) return this.pinnedOffset + this.$element.removeClass(Affix.RESET).addClass('affix') + var scrollTop = this.$target.scrollTop() + var position = this.$element.offset() + return (this.pinnedOffset = position.top - scrollTop) + } + + Affix.prototype.checkPositionWithEventLoop = function () { + setTimeout($.proxy(this.checkPosition, this), 1) + } + + Affix.prototype.checkPosition = function () { + if (!this.$element.is(':visible')) return + + var height = this.$element.height() + var offset = this.options.offset + var offsetTop = offset.top + var offsetBottom = offset.bottom + var scrollHeight = Math.max($(document).height(), $(document.body).height()) + + if (typeof offset != 'object') offsetBottom = offsetTop = offset + if (typeof offsetTop == 'function') offsetTop = offset.top(this.$element) + if (typeof offsetBottom == 'function') offsetBottom = offset.bottom(this.$element) + + var affix = this.getState(scrollHeight, height, offsetTop, offsetBottom) + + if (this.affixed != affix) { + if (this.unpin != null) this.$element.css('top', '') + + var affixType = 'affix' + (affix ? '-' + affix : '') + var e = $.Event(affixType + '.bs.affix') + + this.$element.trigger(e) + + if (e.isDefaultPrevented()) return + + this.affixed = affix + this.unpin = affix == 'bottom' ? this.getPinnedOffset() : null + + this.$element + .removeClass(Affix.RESET) + .addClass(affixType) + .trigger(affixType.replace('affix', 'affixed') + '.bs.affix') + } + + if (affix == 'bottom') { + this.$element.offset({ + top: scrollHeight - height - offsetBottom + }) + } + } + + + // AFFIX PLUGIN DEFINITION + // ======================= + + function Plugin(option) { + return this.each(function () { + var $this = $(this) + var data = $this.data('bs.affix') + var options = typeof option == 'object' && option + + if (!data) $this.data('bs.affix', (data = new Affix(this, options))) + if (typeof option == 'string') data[option]() + }) + } + + var old = $.fn.affix + + $.fn.affix = Plugin + $.fn.affix.Constructor = Affix + + + // AFFIX NO CONFLICT + // ================= + + $.fn.affix.noConflict = function () { + $.fn.affix = old + return this + } + + + // AFFIX DATA-API + // ============== + + $(window).on('load', function () { + $('[data-spy="affix"]').each(function () { + var $spy = $(this) + var data = $spy.data() + + data.offset = data.offset || {} + + if (data.offsetBottom != null) data.offset.bottom = data.offsetBottom + if (data.offsetTop != null) data.offset.top = data.offsetTop + + Plugin.call($spy, data) + }) + }) + +}(jQuery); diff --git a/scripts/bootstrap.min.js b/scripts/bootstrap.min.js new file mode 100644 index 0000000..9bcd2fc --- /dev/null +++ b/scripts/bootstrap.min.js @@ -0,0 +1,7 @@ +/*! + * Bootstrap v3.3.7 (http://getbootstrap.com) + * Copyright 2011-2016 Twitter, Inc. + * Licensed under the MIT license + */ +if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");+function(a){"use strict";var b=a.fn.jquery.split(" ")[0].split(".");if(b[0]<2&&b[1]<9||1==b[0]&&9==b[1]&&b[2]<1||b[0]>3)throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4")}(jQuery),+function(a){"use strict";function b(){var a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var c in b)if(void 0!==a.style[c])return{end:b[c]};return!1}a.fn.emulateTransitionEnd=function(b){var c=!1,d=this;a(this).one("bsTransitionEnd",function(){c=!0});var e=function(){c||a(d).trigger(a.support.transition.end)};return setTimeout(e,b),this},a(function(){a.support.transition=b(),a.support.transition&&(a.event.special.bsTransitionEnd={bindType:a.support.transition.end,delegateType:a.support.transition.end,handle:function(b){if(a(b.target).is(this))return b.handleObj.handler.apply(this,arguments)}})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var c=a(this),e=c.data("bs.alert");e||c.data("bs.alert",e=new d(this)),"string"==typeof b&&e[b].call(c)})}var c='[data-dismiss="alert"]',d=function(b){a(b).on("click",c,this.close)};d.VERSION="3.3.7",d.TRANSITION_DURATION=150,d.prototype.close=function(b){function c(){g.detach().trigger("closed.bs.alert").remove()}var e=a(this),f=e.attr("data-target");f||(f=e.attr("href"),f=f&&f.replace(/.*(?=#[^\s]*$)/,""));var g=a("#"===f?[]:f);b&&b.preventDefault(),g.length||(g=e.closest(".alert")),g.trigger(b=a.Event("close.bs.alert")),b.isDefaultPrevented()||(g.removeClass("in"),a.support.transition&&g.hasClass("fade")?g.one("bsTransitionEnd",c).emulateTransitionEnd(d.TRANSITION_DURATION):c())};var e=a.fn.alert;a.fn.alert=b,a.fn.alert.Constructor=d,a.fn.alert.noConflict=function(){return a.fn.alert=e,this},a(document).on("click.bs.alert.data-api",c,d.prototype.close)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.button"),f="object"==typeof b&&b;e||d.data("bs.button",e=new c(this,f)),"toggle"==b?e.toggle():b&&e.setState(b)})}var c=function(b,d){this.$element=a(b),this.options=a.extend({},c.DEFAULTS,d),this.isLoading=!1};c.VERSION="3.3.7",c.DEFAULTS={loadingText:"loading..."},c.prototype.setState=function(b){var c="disabled",d=this.$element,e=d.is("input")?"val":"html",f=d.data();b+="Text",null==f.resetText&&d.data("resetText",d[e]()),setTimeout(a.proxy(function(){d[e](null==f[b]?this.options[b]:f[b]),"loadingText"==b?(this.isLoading=!0,d.addClass(c).attr(c,c).prop(c,!0)):this.isLoading&&(this.isLoading=!1,d.removeClass(c).removeAttr(c).prop(c,!1))},this),0)},c.prototype.toggle=function(){var a=!0,b=this.$element.closest('[data-toggle="buttons"]');if(b.length){var c=this.$element.find("input");"radio"==c.prop("type")?(c.prop("checked")&&(a=!1),b.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==c.prop("type")&&(c.prop("checked")!==this.$element.hasClass("active")&&(a=!1),this.$element.toggleClass("active")),c.prop("checked",this.$element.hasClass("active")),a&&c.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var d=a.fn.button;a.fn.button=b,a.fn.button.Constructor=c,a.fn.button.noConflict=function(){return a.fn.button=d,this},a(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(c){var d=a(c.target).closest(".btn");b.call(d,"toggle"),a(c.target).is('input[type="radio"], input[type="checkbox"]')||(c.preventDefault(),d.is("input,button")?d.trigger("focus"):d.find("input:visible,button:visible").first().trigger("focus"))}).on("focus.bs.button.data-api blur.bs.button.data-api",'[data-toggle^="button"]',function(b){a(b.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(b.type))})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.carousel"),f=a.extend({},c.DEFAULTS,d.data(),"object"==typeof b&&b),g="string"==typeof b?b:f.slide;e||d.data("bs.carousel",e=new c(this,f)),"number"==typeof b?e.to(b):g?e[g]():f.interval&&e.pause().cycle()})}var c=function(b,c){this.$element=a(b),this.$indicators=this.$element.find(".carousel-indicators"),this.options=c,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",a.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",a.proxy(this.pause,this)).on("mouseleave.bs.carousel",a.proxy(this.cycle,this))};c.VERSION="3.3.7",c.TRANSITION_DURATION=600,c.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},c.prototype.keydown=function(a){if(!/input|textarea/i.test(a.target.tagName)){switch(a.which){case 37:this.prev();break;case 39:this.next();break;default:return}a.preventDefault()}},c.prototype.cycle=function(b){return b||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},c.prototype.getItemIndex=function(a){return this.$items=a.parent().children(".item"),this.$items.index(a||this.$active)},c.prototype.getItemForDirection=function(a,b){var c=this.getItemIndex(b),d="prev"==a&&0===c||"next"==a&&c==this.$items.length-1;if(d&&!this.options.wrap)return b;var e="prev"==a?-1:1,f=(c+e)%this.$items.length;return this.$items.eq(f)},c.prototype.to=function(a){var b=this,c=this.getItemIndex(this.$active=this.$element.find(".item.active"));if(!(a>this.$items.length-1||a<0))return this.sliding?this.$element.one("slid.bs.carousel",function(){b.to(a)}):c==a?this.pause().cycle():this.slide(a>c?"next":"prev",this.$items.eq(a))},c.prototype.pause=function(b){return b||(this.paused=!0),this.$element.find(".next, .prev").length&&a.support.transition&&(this.$element.trigger(a.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},c.prototype.next=function(){if(!this.sliding)return this.slide("next")},c.prototype.prev=function(){if(!this.sliding)return this.slide("prev")},c.prototype.slide=function(b,d){var e=this.$element.find(".item.active"),f=d||this.getItemForDirection(b,e),g=this.interval,h="next"==b?"left":"right",i=this;if(f.hasClass("active"))return this.sliding=!1;var j=f[0],k=a.Event("slide.bs.carousel",{relatedTarget:j,direction:h});if(this.$element.trigger(k),!k.isDefaultPrevented()){if(this.sliding=!0,g&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var l=a(this.$indicators.children()[this.getItemIndex(f)]);l&&l.addClass("active")}var m=a.Event("slid.bs.carousel",{relatedTarget:j,direction:h});return a.support.transition&&this.$element.hasClass("slide")?(f.addClass(b),f[0].offsetWidth,e.addClass(h),f.addClass(h),e.one("bsTransitionEnd",function(){f.removeClass([b,h].join(" ")).addClass("active"),e.removeClass(["active",h].join(" ")),i.sliding=!1,setTimeout(function(){i.$element.trigger(m)},0)}).emulateTransitionEnd(c.TRANSITION_DURATION)):(e.removeClass("active"),f.addClass("active"),this.sliding=!1,this.$element.trigger(m)),g&&this.cycle(),this}};var d=a.fn.carousel;a.fn.carousel=b,a.fn.carousel.Constructor=c,a.fn.carousel.noConflict=function(){return a.fn.carousel=d,this};var e=function(c){var d,e=a(this),f=a(e.attr("data-target")||(d=e.attr("href"))&&d.replace(/.*(?=#[^\s]+$)/,""));if(f.hasClass("carousel")){var g=a.extend({},f.data(),e.data()),h=e.attr("data-slide-to");h&&(g.interval=!1),b.call(f,g),h&&f.data("bs.carousel").to(h),c.preventDefault()}};a(document).on("click.bs.carousel.data-api","[data-slide]",e).on("click.bs.carousel.data-api","[data-slide-to]",e),a(window).on("load",function(){a('[data-ride="carousel"]').each(function(){var c=a(this);b.call(c,c.data())})})}(jQuery),+function(a){"use strict";function b(b){var c,d=b.attr("data-target")||(c=b.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,"");return a(d)}function c(b){return this.each(function(){var c=a(this),e=c.data("bs.collapse"),f=a.extend({},d.DEFAULTS,c.data(),"object"==typeof b&&b);!e&&f.toggle&&/show|hide/.test(b)&&(f.toggle=!1),e||c.data("bs.collapse",e=new d(this,f)),"string"==typeof b&&e[b]()})}var d=function(b,c){this.$element=a(b),this.options=a.extend({},d.DEFAULTS,c),this.$trigger=a('[data-toggle="collapse"][href="#'+b.id+'"],[data-toggle="collapse"][data-target="#'+b.id+'"]'),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};d.VERSION="3.3.7",d.TRANSITION_DURATION=350,d.DEFAULTS={toggle:!0},d.prototype.dimension=function(){var a=this.$element.hasClass("width");return a?"width":"height"},d.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var b,e=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(e&&e.length&&(b=e.data("bs.collapse"),b&&b.transitioning))){var f=a.Event("show.bs.collapse");if(this.$element.trigger(f),!f.isDefaultPrevented()){e&&e.length&&(c.call(e,"hide"),b||e.data("bs.collapse",null));var g=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[g](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var h=function(){this.$element.removeClass("collapsing").addClass("collapse in")[g](""),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return h.call(this);var i=a.camelCase(["scroll",g].join("-"));this.$element.one("bsTransitionEnd",a.proxy(h,this)).emulateTransitionEnd(d.TRANSITION_DURATION)[g](this.$element[0][i])}}}},d.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var b=a.Event("hide.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.dimension();this.$element[c](this.$element[c]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var e=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};return a.support.transition?void this.$element[c](0).one("bsTransitionEnd",a.proxy(e,this)).emulateTransitionEnd(d.TRANSITION_DURATION):e.call(this)}}},d.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},d.prototype.getParent=function(){return a(this.options.parent).find('[data-toggle="collapse"][data-parent="'+this.options.parent+'"]').each(a.proxy(function(c,d){var e=a(d);this.addAriaAndCollapsedClass(b(e),e)},this)).end()},d.prototype.addAriaAndCollapsedClass=function(a,b){var c=a.hasClass("in");a.attr("aria-expanded",c),b.toggleClass("collapsed",!c).attr("aria-expanded",c)};var e=a.fn.collapse;a.fn.collapse=c,a.fn.collapse.Constructor=d,a.fn.collapse.noConflict=function(){return a.fn.collapse=e,this},a(document).on("click.bs.collapse.data-api",'[data-toggle="collapse"]',function(d){var e=a(this);e.attr("data-target")||d.preventDefault();var f=b(e),g=f.data("bs.collapse"),h=g?"toggle":e.data();c.call(f,h)})}(jQuery),+function(a){"use strict";function b(b){var c=b.attr("data-target");c||(c=b.attr("href"),c=c&&/#[A-Za-z]/.test(c)&&c.replace(/.*(?=#[^\s]*$)/,""));var d=c&&a(c);return d&&d.length?d:b.parent()}function c(c){c&&3===c.which||(a(e).remove(),a(f).each(function(){var d=a(this),e=b(d),f={relatedTarget:this};e.hasClass("open")&&(c&&"click"==c.type&&/input|textarea/i.test(c.target.tagName)&&a.contains(e[0],c.target)||(e.trigger(c=a.Event("hide.bs.dropdown",f)),c.isDefaultPrevented()||(d.attr("aria-expanded","false"),e.removeClass("open").trigger(a.Event("hidden.bs.dropdown",f)))))}))}function d(b){return this.each(function(){var c=a(this),d=c.data("bs.dropdown");d||c.data("bs.dropdown",d=new g(this)),"string"==typeof b&&d[b].call(c)})}var e=".dropdown-backdrop",f='[data-toggle="dropdown"]',g=function(b){a(b).on("click.bs.dropdown",this.toggle)};g.VERSION="3.3.7",g.prototype.toggle=function(d){var e=a(this);if(!e.is(".disabled, :disabled")){var f=b(e),g=f.hasClass("open");if(c(),!g){"ontouchstart"in document.documentElement&&!f.closest(".navbar-nav").length&&a(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(a(this)).on("click",c);var h={relatedTarget:this};if(f.trigger(d=a.Event("show.bs.dropdown",h)),d.isDefaultPrevented())return;e.trigger("focus").attr("aria-expanded","true"),f.toggleClass("open").trigger(a.Event("shown.bs.dropdown",h))}return!1}},g.prototype.keydown=function(c){if(/(38|40|27|32)/.test(c.which)&&!/input|textarea/i.test(c.target.tagName)){var d=a(this);if(c.preventDefault(),c.stopPropagation(),!d.is(".disabled, :disabled")){var e=b(d),g=e.hasClass("open");if(!g&&27!=c.which||g&&27==c.which)return 27==c.which&&e.find(f).trigger("focus"),d.trigger("click");var h=" li:not(.disabled):visible a",i=e.find(".dropdown-menu"+h);if(i.length){var j=i.index(c.target);38==c.which&&j>0&&j--,40==c.which&&jdocument.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&a?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!a?this.scrollbarWidth:""})},c.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},c.prototype.checkScrollbar=function(){var a=window.innerWidth;if(!a){var b=document.documentElement.getBoundingClientRect();a=b.right-Math.abs(b.left)}this.bodyIsOverflowing=document.body.clientWidth
',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0}},c.prototype.init=function(b,c,d){if(this.enabled=!0,this.type=b,this.$element=a(c),this.options=this.getOptions(d),this.$viewport=this.options.viewport&&a(a.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var e=this.options.trigger.split(" "),f=e.length;f--;){var g=e[f];if("click"==g)this.$element.on("click."+this.type,this.options.selector,a.proxy(this.toggle,this));else if("manual"!=g){var h="hover"==g?"mouseenter":"focusin",i="hover"==g?"mouseleave":"focusout";this.$element.on(h+"."+this.type,this.options.selector,a.proxy(this.enter,this)),this.$element.on(i+"."+this.type,this.options.selector,a.proxy(this.leave,this))}}this.options.selector?this._options=a.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.getOptions=function(b){return b=a.extend({},this.getDefaults(),this.$element.data(),b),b.delay&&"number"==typeof b.delay&&(b.delay={show:b.delay,hide:b.delay}),b},c.prototype.getDelegateOptions=function(){var b={},c=this.getDefaults();return this._options&&a.each(this._options,function(a,d){c[a]!=d&&(b[a]=d)}),b},c.prototype.enter=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);return c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusin"==b.type?"focus":"hover"]=!0),c.tip().hasClass("in")||"in"==c.hoverState?void(c.hoverState="in"):(clearTimeout(c.timeout),c.hoverState="in",c.options.delay&&c.options.delay.show?void(c.timeout=setTimeout(function(){"in"==c.hoverState&&c.show()},c.options.delay.show)):c.show())},c.prototype.isInStateTrue=function(){for(var a in this.inState)if(this.inState[a])return!0;return!1},c.prototype.leave=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);if(c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusout"==b.type?"focus":"hover"]=!1),!c.isInStateTrue())return clearTimeout(c.timeout),c.hoverState="out",c.options.delay&&c.options.delay.hide?void(c.timeout=setTimeout(function(){"out"==c.hoverState&&c.hide()},c.options.delay.hide)):c.hide()},c.prototype.show=function(){var b=a.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(b);var d=a.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(b.isDefaultPrevented()||!d)return;var e=this,f=this.tip(),g=this.getUID(this.type);this.setContent(),f.attr("id",g),this.$element.attr("aria-describedby",g),this.options.animation&&f.addClass("fade");var h="function"==typeof this.options.placement?this.options.placement.call(this,f[0],this.$element[0]):this.options.placement,i=/\s?auto?\s?/i,j=i.test(h);j&&(h=h.replace(i,"")||"top"),f.detach().css({top:0,left:0,display:"block"}).addClass(h).data("bs."+this.type,this),this.options.container?f.appendTo(this.options.container):f.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var k=this.getPosition(),l=f[0].offsetWidth,m=f[0].offsetHeight;if(j){var n=h,o=this.getPosition(this.$viewport);h="bottom"==h&&k.bottom+m>o.bottom?"top":"top"==h&&k.top-mo.width?"left":"left"==h&&k.left-lg.top+g.height&&(e.top=g.top+g.height-i)}else{var j=b.left-f,k=b.left+f+c;jg.right&&(e.left=g.left+g.width-k)}return e},c.prototype.getTitle=function(){var a,b=this.$element,c=this.options;return a=b.attr("data-original-title")||("function"==typeof c.title?c.title.call(b[0]):c.title)},c.prototype.getUID=function(a){do a+=~~(1e6*Math.random());while(document.getElementById(a));return a},c.prototype.tip=function(){if(!this.$tip&&(this.$tip=a(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},c.prototype.enable=function(){this.enabled=!0},c.prototype.disable=function(){this.enabled=!1},c.prototype.toggleEnabled=function(){this.enabled=!this.enabled},c.prototype.toggle=function(b){var c=this;b&&(c=a(b.currentTarget).data("bs."+this.type),c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c))),b?(c.inState.click=!c.inState.click,c.isInStateTrue()?c.enter(c):c.leave(c)):c.tip().hasClass("in")?c.leave(c):c.enter(c)},c.prototype.destroy=function(){var a=this;clearTimeout(this.timeout),this.hide(function(){a.$element.off("."+a.type).removeData("bs."+a.type),a.$tip&&a.$tip.detach(),a.$tip=null,a.$arrow=null,a.$viewport=null,a.$element=null})};var d=a.fn.tooltip;a.fn.tooltip=b,a.fn.tooltip.Constructor=c,a.fn.tooltip.noConflict=function(){return a.fn.tooltip=d,this}}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.popover"),f="object"==typeof b&&b;!e&&/destroy|hide/.test(b)||(e||d.data("bs.popover",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.init("popover",a,b)};if(!a.fn.tooltip)throw new Error("Popover requires tooltip.js");c.VERSION="3.3.7",c.DEFAULTS=a.extend({},a.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:''}),c.prototype=a.extend({},a.fn.tooltip.Constructor.prototype),c.prototype.constructor=c,c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle(),c=this.getContent();a.find(".popover-title")[this.options.html?"html":"text"](b),a.find(".popover-content").children().detach().end()[this.options.html?"string"==typeof c?"html":"append":"text"](c),a.removeClass("fade top bottom left right in"),a.find(".popover-title").html()||a.find(".popover-title").hide()},c.prototype.hasContent=function(){return this.getTitle()||this.getContent()},c.prototype.getContent=function(){var a=this.$element,b=this.options;return a.attr("data-content")||("function"==typeof b.content?b.content.call(a[0]):b.content)},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var d=a.fn.popover;a.fn.popover=b,a.fn.popover.Constructor=c,a.fn.popover.noConflict=function(){return a.fn.popover=d,this}}(jQuery),+function(a){"use strict";function b(c,d){this.$body=a(document.body),this.$scrollElement=a(a(c).is(document.body)?window:c),this.options=a.extend({},b.DEFAULTS,d),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",a.proxy(this.process,this)),this.refresh(),this.process()}function c(c){return this.each(function(){var d=a(this),e=d.data("bs.scrollspy"),f="object"==typeof c&&c;e||d.data("bs.scrollspy",e=new b(this,f)),"string"==typeof c&&e[c]()})}b.VERSION="3.3.7",b.DEFAULTS={offset:10},b.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},b.prototype.refresh=function(){var b=this,c="offset",d=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),a.isWindow(this.$scrollElement[0])||(c="position",d=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var b=a(this),e=b.data("target")||b.attr("href"),f=/^#./.test(e)&&a(e);return f&&f.length&&f.is(":visible")&&[[f[c]().top+d,e]]||null}).sort(function(a,b){return a[0]-b[0]}).each(function(){b.offsets.push(this[0]),b.targets.push(this[1])})},b.prototype.process=function(){var a,b=this.$scrollElement.scrollTop()+this.options.offset,c=this.getScrollHeight(),d=this.options.offset+c-this.$scrollElement.height(),e=this.offsets,f=this.targets,g=this.activeTarget;if(this.scrollHeight!=c&&this.refresh(),b>=d)return g!=(a=f[f.length-1])&&this.activate(a);if(g&&b=e[a]&&(void 0===e[a+1]||b .dropdown-menu > .active").removeClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!1),b.addClass("active").find('[data-toggle="tab"]').attr("aria-expanded",!0),h?(b[0].offsetWidth,b.addClass("in")):b.removeClass("fade"),b.parent(".dropdown-menu").length&&b.closest("li.dropdown").addClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!0),e&&e()}var g=d.find("> .active"),h=e&&a.support.transition&&(g.length&&g.hasClass("fade")||!!d.find("> .fade").length);g.length&&h?g.one("bsTransitionEnd",f).emulateTransitionEnd(c.TRANSITION_DURATION):f(),g.removeClass("in")};var d=a.fn.tab;a.fn.tab=b,a.fn.tab.Constructor=c,a.fn.tab.noConflict=function(){return a.fn.tab=d,this};var e=function(c){c.preventDefault(),b.call(a(this),"show")};a(document).on("click.bs.tab.data-api",'[data-toggle="tab"]',e).on("click.bs.tab.data-api",'[data-toggle="pill"]',e)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.affix"),f="object"==typeof b&&b;e||d.data("bs.affix",e=new c(this,f)),"string"==typeof b&&e[b]()})}var c=function(b,d){this.options=a.extend({},c.DEFAULTS,d),this.$target=a(this.options.target).on("scroll.bs.affix.data-api",a.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",a.proxy(this.checkPositionWithEventLoop,this)),this.$element=a(b),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};c.VERSION="3.3.7",c.RESET="affix affix-top affix-bottom",c.DEFAULTS={offset:0,target:window},c.prototype.getState=function(a,b,c,d){var e=this.$target.scrollTop(),f=this.$element.offset(),g=this.$target.height();if(null!=c&&"top"==this.affixed)return e=a-d&&"bottom"},c.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(c.RESET).addClass("affix");var a=this.$target.scrollTop(),b=this.$element.offset();return this.pinnedOffset=b.top-a},c.prototype.checkPositionWithEventLoop=function(){setTimeout(a.proxy(this.checkPosition,this),1)},c.prototype.checkPosition=function(){if(this.$element.is(":visible")){var b=this.$element.height(),d=this.options.offset,e=d.top,f=d.bottom,g=Math.max(a(document).height(),a(document.body).height());"object"!=typeof d&&(f=e=d),"function"==typeof e&&(e=d.top(this.$element)),"function"==typeof f&&(f=d.bottom(this.$element));var h=this.getState(g,b,e,f);if(this.affixed!=h){null!=this.unpin&&this.$element.css("top","");var i="affix"+(h?"-"+h:""),j=a.Event(i+".bs.affix");if(this.$element.trigger(j),j.isDefaultPrevented())return;this.affixed=h,this.unpin="bottom"==h?this.getPinnedOffset():null,this.$element.removeClass(c.RESET).addClass(i).trigger(i.replace("affix","affixed")+".bs.affix")}"bottom"==h&&this.$element.offset({top:g-b-f})}};var d=a.fn.affix;a.fn.affix=b,a.fn.affix.Constructor=c,a.fn.affix.noConflict=function(){return a.fn.affix=d,this},a(window).on("load",function(){a('[data-spy="affix"]').each(function(){var c=a(this),d=c.data();d.offset=d.offset||{},null!=d.offsetBottom&&(d.offset.bottom=d.offsetBottom),null!=d.offsetTop&&(d.offset.top=d.offsetTop),b.call(c,d)})})}(jQuery); \ No newline at end of file diff --git a/scripts/npm.js b/scripts/npm.js new file mode 100644 index 0000000..bf6aa80 --- /dev/null +++ b/scripts/npm.js @@ -0,0 +1,13 @@ +// This file is autogenerated via the `commonjs` Grunt task. You can require() this file in a CommonJS environment. +require('../../js/transition.js') +require('../../js/alert.js') +require('../../js/button.js') +require('../../js/carousel.js') +require('../../js/collapse.js') +require('../../js/dropdown.js') +require('../../js/modal.js') +require('../../js/tooltip.js') +require('../../js/popover.js') +require('../../js/scrollspy.js') +require('../../js/tab.js') +require('../../js/affix.js') \ No newline at end of file diff --git a/scripts/script.js b/scripts/script.js new file mode 100644 index 0000000..d8508b1 --- /dev/null +++ b/scripts/script.js @@ -0,0 +1,125 @@ +(function() { + var dehighlightPubPills, highlightPubPills; + + dehighlightPubPills = function() { + return $('.pub_badge').removeClass('active'); + }; + + highlightPubPills = function(tags) { + return tags.forEach(function(t) { + return $('#pub_' + t).addClass('active'); + }); + }; + + this.util = { + hasField: function(obj, field, values) { + var ovs; + if (obj[field]) { + ovs = obj[field]; + if (typeof ovs === 'string') { + ovs = ovs.split(','); + } + if (typeof values === 'string') { + values = values.split(','); + } + return !values.every(function(v) { + return ovs.indexOf(v) < 0; + }); + } + return false; + }, + hasAllFields: function(obj, field, values) { + var ovs; + if (obj[field]) { + ovs = obj[field]; + if (typeof ovs === 'string') { + ovs = ovs.split(','); + } + if (typeof values === 'string') { + values = values.split(','); + } + return values.every(function(v) { + if (v.startsWith('!')) { + return ovs.indexOf(v.slice(1)) < 0; + } else { + return ovs.indexOf(v) >= 0; + } + }); + } + return false; + }, + hasSet: function(obj, field, sets) { + if (obj[field]) { + if (!Array.isArray(sets)) { + sets = [sets]; + } + return !sets.every(function(vs) { + return !util.hasAllFields(obj, field, vs); + }); + } + return false; + }, + showPubs: function(field, values) { + if (!Array.isArray(values)) { + values = [values]; + } + dehighlightPubPills(); + highlightPubPills(values); + $('.year').each(function(x, i) { + return $(this).show(); + }); + $('.paper').each(function(x, i) { + var d; + d = $(this).data(); + if (util.hasField(d, field, values)) { + return $(this).show(); + } else { + return $(this).hide(); + } + }); + return $('.year').each(function(x, i) { + if ($(this).find('.paper:visible').size() > 0) { + return $(this).show(); + } else { + return $(this).hide(); + } + }); + }, + showPubsInTopic: function(topic, tagsets) { + if (!Array.isArray(tagsets)) { + tagsets = [tagsets]; + } + dehighlightPubPills(); + $('.year').each(function(x, i) { + return $(this).show(); + }); + $('.paper').each(function(x, i) { + var d; + d = $(this).data(); + if (util.hasSet(d, 'tags', tagsets)) { + return $(this).show(); + } else { + return $(this).hide(); + } + }); + return $('.year').each(function(x, i) { + if ($(this).find('.paper:visible').size() > 0) { + return $(this).show(); + } else { + return $(this).hide(); + } + }); + }, + showAllPubs: function() { + dehighlightPubPills(); + highlightPubPills(['all']); + $('.paper').each(function(x, i) { + return $(this).show(); + }); + return $('.year').each(function(x, i) { + return $(this).show(); + }); + } + }; + +}).call(this); diff --git a/styles/style.css b/styles/style.css new file mode 100644 index 0000000..464cd55 --- /dev/null +++ b/styles/style.css @@ -0,0 +1,2 @@ +@import url("https://fonts.googleapis.com/css?family=Lato"); +body{font-family:Lato}h1,h2{color:#8c1515;margin-top:10px}a,a:hover{color:#8c1515}h3,h4{font-weight:bold}.red{color:#8c1515}.vertical-center,.paper-img,.project-img{display:-webkit-box;display:-moz-box;display:-webkit-flex;display:-ms-flexbox;display:box;display:flex;-webkit-box-align:center;-moz-box-align:center;-o-box-align:center;-ms-flex-align:center;-webkit-align-items:center;align-items:center}.panel-body{font-size:12pt}.paper-img{min-height:150px}.project-img{min-height:200px}.selectable{cursor:pointer}.badge.active{background-color:#ffa500 !important;border-color:#ff0 !important}.oneline-li{display:inline-block;margin:0 10px} \ No newline at end of file diff --git a/styles/twitter-bootstrap.css b/styles/twitter-bootstrap.css new file mode 100644 index 0000000..4cc888a --- /dev/null +++ b/styles/twitter-bootstrap.css @@ -0,0 +1,5 @@ +/*! + * Bootstrap v3.3.7 (http://getbootstrap.com) + * Copyright 2011-2016 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + *//*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */html{font-family:sans-serif;-ms-text-size-adjust:100%;-webkit-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:bold}dfn{font-style:italic}h1{font-size:2em;margin:.67em 0}mark{background:#ff0;color:#000}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-0.5em}sub{bottom:-0.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em 40px}hr{box-sizing:content-box;height:0}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace,monospace;font-size:1em}button,input,optgroup,select,textarea{color:inherit;font:inherit;margin:0}button{overflow:visible}button,select{text-transform:none}button,html input[type="button"],input[type="reset"],input[type="submit"]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}input{line-height:normal}input[type="checkbox"],input[type="radio"]{box-sizing:border-box;padding:0}input[type="number"]::-webkit-inner-spin-button,input[type="number"]::-webkit-outer-spin-button{height:auto}input[type="search"]{-webkit-appearance:textfield;box-sizing:content-box}input[type="search"]::-webkit-search-cancel-button,input[type="search"]::-webkit-search-decoration{-webkit-appearance:none}fieldset{border:1px solid #c0c0c0;margin:0 2px;padding:.35em .625em .75em}legend{border:0;padding:0}textarea{overflow:auto}optgroup{font-weight:bold}table{border-collapse:collapse;border-spacing:0}td,th{padding:0}/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */@media print{*,*:before,*:after{background:transparent !important;color:#000 !important;box-shadow:none !important;text-shadow:none !important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}a[href^="#"]:after,a[href^="javascript:"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100% !important}p,h2,h3{orphans:3;widows:3}h2,h3{page-break-after:avoid}.navbar{display:none}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000 !important}.label{border:1px solid #000}.table{border-collapse:collapse !important}.table td,.table th{background-color:#fff !important}.table-bordered th,.table-bordered td{border:1px solid #ddd !important}}@font-face{font-family:'Glyphicons Halflings';src:url('../fonts/glyphicons-halflings-regular.eot');src:url('../fonts/glyphicons-halflings-regular.eot?#iefix') format('embedded-opentype'),url('../fonts/glyphicons-halflings-regular.woff2') format('woff2'),url('../fonts/glyphicons-halflings-regular.woff') format('woff'),url('../fonts/glyphicons-halflings-regular.ttf') format('truetype'),url('../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular') format('svg')}.glyphicon{position:relative;top:1px;display:inline-block;font-family:'Glyphicons Halflings';font-style:normal;font-weight:normal;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.glyphicon-asterisk:before{content:"\002a"}.glyphicon-plus:before{content:"\002b"}.glyphicon-euro:before,.glyphicon-eur:before{content:"\20ac"}.glyphicon-minus:before{content:"\2212"}.glyphicon-cloud:before{content:"\2601"}.glyphicon-envelope:before{content:"\2709"}.glyphicon-pencil:before{content:"\270f"}.glyphicon-glass:before{content:"\e001"}.glyphicon-music:before{content:"\e002"}.glyphicon-search:before{content:"\e003"}.glyphicon-heart:before{content:"\e005"}.glyphicon-star:before{content:"\e006"}.glyphicon-star-empty:before{content:"\e007"}.glyphicon-user:before{content:"\e008"}.glyphicon-film:before{content:"\e009"}.glyphicon-th-large:before{content:"\e010"}.glyphicon-th:before{content:"\e011"}.glyphicon-th-list:before{content:"\e012"}.glyphicon-ok:before{content:"\e013"}.glyphicon-remove:before{content:"\e014"}.glyphicon-zoom-in:before{content:"\e015"}.glyphicon-zoom-out:before{content:"\e016"}.glyphicon-off:before{content:"\e017"}.glyphicon-signal:before{content:"\e018"}.glyphicon-cog:before{content:"\e019"}.glyphicon-trash:before{content:"\e020"}.glyphicon-home:before{content:"\e021"}.glyphicon-file:before{content:"\e022"}.glyphicon-time:before{content:"\e023"}.glyphicon-road:before{content:"\e024"}.glyphicon-download-alt:before{content:"\e025"}.glyphicon-download:before{content:"\e026"}.glyphicon-upload:before{content:"\e027"}.glyphicon-inbox:before{content:"\e028"}.glyphicon-play-circle:before{content:"\e029"}.glyphicon-repeat:before{content:"\e030"}.glyphicon-refresh:before{content:"\e031"}.glyphicon-list-alt:before{content:"\e032"}.glyphicon-lock:before{content:"\e033"}.glyphicon-flag:before{content:"\e034"}.glyphicon-headphones:before{content:"\e035"}.glyphicon-volume-off:before{content:"\e036"}.glyphicon-volume-down:before{content:"\e037"}.glyphicon-volume-up:before{content:"\e038"}.glyphicon-qrcode:before{content:"\e039"}.glyphicon-barcode:before{content:"\e040"}.glyphicon-tag:before{content:"\e041"}.glyphicon-tags:before{content:"\e042"}.glyphicon-book:before{content:"\e043"}.glyphicon-bookmark:before{content:"\e044"}.glyphicon-print:before{content:"\e045"}.glyphicon-camera:before{content:"\e046"}.glyphicon-font:before{content:"\e047"}.glyphicon-bold:before{content:"\e048"}.glyphicon-italic:before{content:"\e049"}.glyphicon-text-height:before{content:"\e050"}.glyphicon-text-width:before{content:"\e051"}.glyphicon-align-left:before{content:"\e052"}.glyphicon-align-center:before{content:"\e053"}.glyphicon-align-right:before{content:"\e054"}.glyphicon-align-justify:before{content:"\e055"}.glyphicon-list:before{content:"\e056"}.glyphicon-indent-left:before{content:"\e057"}.glyphicon-indent-right:before{content:"\e058"}.glyphicon-facetime-video:before{content:"\e059"}.glyphicon-picture:before{content:"\e060"}.glyphicon-map-marker:before{content:"\e062"}.glyphicon-adjust:before{content:"\e063"}.glyphicon-tint:before{content:"\e064"}.glyphicon-edit:before{content:"\e065"}.glyphicon-share:before{content:"\e066"}.glyphicon-check:before{content:"\e067"}.glyphicon-move:before{content:"\e068"}.glyphicon-step-backward:before{content:"\e069"}.glyphicon-fast-backward:before{content:"\e070"}.glyphicon-backward:before{content:"\e071"}.glyphicon-play:before{content:"\e072"}.glyphicon-pause:before{content:"\e073"}.glyphicon-stop:before{content:"\e074"}.glyphicon-forward:before{content:"\e075"}.glyphicon-fast-forward:before{content:"\e076"}.glyphicon-step-forward:before{content:"\e077"}.glyphicon-eject:before{content:"\e078"}.glyphicon-chevron-left:before{content:"\e079"}.glyphicon-chevron-right:before{content:"\e080"}.glyphicon-plus-sign:before{content:"\e081"}.glyphicon-minus-sign:before{content:"\e082"}.glyphicon-remove-sign:before{content:"\e083"}.glyphicon-ok-sign:before{content:"\e084"}.glyphicon-question-sign:before{content:"\e085"}.glyphicon-info-sign:before{content:"\e086"}.glyphicon-screenshot:before{content:"\e087"}.glyphicon-remove-circle:before{content:"\e088"}.glyphicon-ok-circle:before{content:"\e089"}.glyphicon-ban-circle:before{content:"\e090"}.glyphicon-arrow-left:before{content:"\e091"}.glyphicon-arrow-right:before{content:"\e092"}.glyphicon-arrow-up:before{content:"\e093"}.glyphicon-arrow-down:before{content:"\e094"}.glyphicon-share-alt:before{content:"\e095"}.glyphicon-resize-full:before{content:"\e096"}.glyphicon-resize-small:before{content:"\e097"}.glyphicon-exclamation-sign:before{content:"\e101"}.glyphicon-gift:before{content:"\e102"}.glyphicon-leaf:before{content:"\e103"}.glyphicon-fire:before{content:"\e104"}.glyphicon-eye-open:before{content:"\e105"}.glyphicon-eye-close:before{content:"\e106"}.glyphicon-warning-sign:before{content:"\e107"}.glyphicon-plane:before{content:"\e108"}.glyphicon-calendar:before{content:"\e109"}.glyphicon-random:before{content:"\e110"}.glyphicon-comment:before{content:"\e111"}.glyphicon-magnet:before{content:"\e112"}.glyphicon-chevron-up:before{content:"\e113"}.glyphicon-chevron-down:before{content:"\e114"}.glyphicon-retweet:before{content:"\e115"}.glyphicon-shopping-cart:before{content:"\e116"}.glyphicon-folder-close:before{content:"\e117"}.glyphicon-folder-open:before{content:"\e118"}.glyphicon-resize-vertical:before{content:"\e119"}.glyphicon-resize-horizontal:before{content:"\e120"}.glyphicon-hdd:before{content:"\e121"}.glyphicon-bullhorn:before{content:"\e122"}.glyphicon-bell:before{content:"\e123"}.glyphicon-certificate:before{content:"\e124"}.glyphicon-thumbs-up:before{content:"\e125"}.glyphicon-thumbs-down:before{content:"\e126"}.glyphicon-hand-right:before{content:"\e127"}.glyphicon-hand-left:before{content:"\e128"}.glyphicon-hand-up:before{content:"\e129"}.glyphicon-hand-down:before{content:"\e130"}.glyphicon-circle-arrow-right:before{content:"\e131"}.glyphicon-circle-arrow-left:before{content:"\e132"}.glyphicon-circle-arrow-up:before{content:"\e133"}.glyphicon-circle-arrow-down:before{content:"\e134"}.glyphicon-globe:before{content:"\e135"}.glyphicon-wrench:before{content:"\e136"}.glyphicon-tasks:before{content:"\e137"}.glyphicon-filter:before{content:"\e138"}.glyphicon-briefcase:before{content:"\e139"}.glyphicon-fullscreen:before{content:"\e140"}.glyphicon-dashboard:before{content:"\e141"}.glyphicon-paperclip:before{content:"\e142"}.glyphicon-heart-empty:before{content:"\e143"}.glyphicon-link:before{content:"\e144"}.glyphicon-phone:before{content:"\e145"}.glyphicon-pushpin:before{content:"\e146"}.glyphicon-usd:before{content:"\e148"}.glyphicon-gbp:before{content:"\e149"}.glyphicon-sort:before{content:"\e150"}.glyphicon-sort-by-alphabet:before{content:"\e151"}.glyphicon-sort-by-alphabet-alt:before{content:"\e152"}.glyphicon-sort-by-order:before{content:"\e153"}.glyphicon-sort-by-order-alt:before{content:"\e154"}.glyphicon-sort-by-attributes:before{content:"\e155"}.glyphicon-sort-by-attributes-alt:before{content:"\e156"}.glyphicon-unchecked:before{content:"\e157"}.glyphicon-expand:before{content:"\e158"}.glyphicon-collapse-down:before{content:"\e159"}.glyphicon-collapse-up:before{content:"\e160"}.glyphicon-log-in:before{content:"\e161"}.glyphicon-flash:before{content:"\e162"}.glyphicon-log-out:before{content:"\e163"}.glyphicon-new-window:before{content:"\e164"}.glyphicon-record:before{content:"\e165"}.glyphicon-save:before{content:"\e166"}.glyphicon-open:before{content:"\e167"}.glyphicon-saved:before{content:"\e168"}.glyphicon-import:before{content:"\e169"}.glyphicon-export:before{content:"\e170"}.glyphicon-send:before{content:"\e171"}.glyphicon-floppy-disk:before{content:"\e172"}.glyphicon-floppy-saved:before{content:"\e173"}.glyphicon-floppy-remove:before{content:"\e174"}.glyphicon-floppy-save:before{content:"\e175"}.glyphicon-floppy-open:before{content:"\e176"}.glyphicon-credit-card:before{content:"\e177"}.glyphicon-transfer:before{content:"\e178"}.glyphicon-cutlery:before{content:"\e179"}.glyphicon-header:before{content:"\e180"}.glyphicon-compressed:before{content:"\e181"}.glyphicon-earphone:before{content:"\e182"}.glyphicon-phone-alt:before{content:"\e183"}.glyphicon-tower:before{content:"\e184"}.glyphicon-stats:before{content:"\e185"}.glyphicon-sd-video:before{content:"\e186"}.glyphicon-hd-video:before{content:"\e187"}.glyphicon-subtitles:before{content:"\e188"}.glyphicon-sound-stereo:before{content:"\e189"}.glyphicon-sound-dolby:before{content:"\e190"}.glyphicon-sound-5-1:before{content:"\e191"}.glyphicon-sound-6-1:before{content:"\e192"}.glyphicon-sound-7-1:before{content:"\e193"}.glyphicon-copyright-mark:before{content:"\e194"}.glyphicon-registration-mark:before{content:"\e195"}.glyphicon-cloud-download:before{content:"\e197"}.glyphicon-cloud-upload:before{content:"\e198"}.glyphicon-tree-conifer:before{content:"\e199"}.glyphicon-tree-deciduous:before{content:"\e200"}.glyphicon-cd:before{content:"\e201"}.glyphicon-save-file:before{content:"\e202"}.glyphicon-open-file:before{content:"\e203"}.glyphicon-level-up:before{content:"\e204"}.glyphicon-copy:before{content:"\e205"}.glyphicon-paste:before{content:"\e206"}.glyphicon-alert:before{content:"\e209"}.glyphicon-equalizer:before{content:"\e210"}.glyphicon-king:before{content:"\e211"}.glyphicon-queen:before{content:"\e212"}.glyphicon-pawn:before{content:"\e213"}.glyphicon-bishop:before{content:"\e214"}.glyphicon-knight:before{content:"\e215"}.glyphicon-baby-formula:before{content:"\e216"}.glyphicon-tent:before{content:"\26fa"}.glyphicon-blackboard:before{content:"\e218"}.glyphicon-bed:before{content:"\e219"}.glyphicon-apple:before{content:"\f8ff"}.glyphicon-erase:before{content:"\e221"}.glyphicon-hourglass:before{content:"\231b"}.glyphicon-lamp:before{content:"\e223"}.glyphicon-duplicate:before{content:"\e224"}.glyphicon-piggy-bank:before{content:"\e225"}.glyphicon-scissors:before{content:"\e226"}.glyphicon-bitcoin:before{content:"\e227"}.glyphicon-btc:before{content:"\e227"}.glyphicon-xbt:before{content:"\e227"}.glyphicon-yen:before{content:"\00a5"}.glyphicon-jpy:before{content:"\00a5"}.glyphicon-ruble:before{content:"\20bd"}.glyphicon-rub:before{content:"\20bd"}.glyphicon-scale:before{content:"\e230"}.glyphicon-ice-lolly:before{content:"\e231"}.glyphicon-ice-lolly-tasted:before{content:"\e232"}.glyphicon-education:before{content:"\e233"}.glyphicon-option-horizontal:before{content:"\e234"}.glyphicon-option-vertical:before{content:"\e235"}.glyphicon-menu-hamburger:before{content:"\e236"}.glyphicon-modal-window:before{content:"\e237"}.glyphicon-oil:before{content:"\e238"}.glyphicon-grain:before{content:"\e239"}.glyphicon-sunglasses:before{content:"\e240"}.glyphicon-text-size:before{content:"\e241"}.glyphicon-text-color:before{content:"\e242"}.glyphicon-text-background:before{content:"\e243"}.glyphicon-object-align-top:before{content:"\e244"}.glyphicon-object-align-bottom:before{content:"\e245"}.glyphicon-object-align-horizontal:before{content:"\e246"}.glyphicon-object-align-left:before{content:"\e247"}.glyphicon-object-align-vertical:before{content:"\e248"}.glyphicon-object-align-right:before{content:"\e249"}.glyphicon-triangle-right:before{content:"\e250"}.glyphicon-triangle-left:before{content:"\e251"}.glyphicon-triangle-bottom:before{content:"\e252"}.glyphicon-triangle-top:before{content:"\e253"}.glyphicon-console:before{content:"\e254"}.glyphicon-superscript:before{content:"\e255"}.glyphicon-subscript:before{content:"\e256"}.glyphicon-menu-left:before{content:"\e257"}.glyphicon-menu-right:before{content:"\e258"}.glyphicon-menu-down:before{content:"\e259"}.glyphicon-menu-up:before{content:"\e260"}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}*:before,*:after{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:10px;-webkit-tap-highlight-color:rgba(0,0,0,0)}body{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:1.42857143;color:#333;background-color:#fff}input,button,select,textarea{font-family:inherit;font-size:inherit;line-height:inherit}a{color:#337ab7;text-decoration:none}a:hover,a:focus{color:#23527c;text-decoration:underline}a:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}figure{margin:0}img{vertical-align:middle}.img-responsive,.thumbnail>img,.thumbnail a>img,.carousel-inner>.item>img,.carousel-inner>.item>a>img{display:block;max-width:100%;height:auto}.img-rounded{border-radius:6px}.img-thumbnail{padding:4px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:all .2s ease-in-out;-o-transition:all .2s ease-in-out;transition:all .2s ease-in-out;display:inline-block;max-width:100%;height:auto}.img-circle{border-radius:50%}hr{margin-top:20px;margin-bottom:20px;border:0;border-top:1px solid #eee}.sr-only{position:absolute;width:1px;height:1px;margin:-1px;padding:0;overflow:hidden;clip:rect(0, 0, 0, 0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}[role="button"]{cursor:pointer}h1,h2,h3,h4,h5,h6,.h1,.h2,.h3,.h4,.h5,.h6{font-family:inherit;font-weight:500;line-height:1.1;color:inherit}h1 small,h2 small,h3 small,h4 small,h5 small,h6 small,.h1 small,.h2 small,.h3 small,.h4 small,.h5 small,.h6 small,h1 .small,h2 .small,h3 .small,h4 .small,h5 .small,h6 .small,.h1 .small,.h2 .small,.h3 .small,.h4 .small,.h5 .small,.h6 .small{font-weight:normal;line-height:1;color:#777}h1,.h1,h2,.h2,h3,.h3{margin-top:20px;margin-bottom:10px}h1 small,.h1 small,h2 small,.h2 small,h3 small,.h3 small,h1 .small,.h1 .small,h2 .small,.h2 .small,h3 .small,.h3 .small{font-size:65%}h4,.h4,h5,.h5,h6,.h6{margin-top:10px;margin-bottom:10px}h4 small,.h4 small,h5 small,.h5 small,h6 small,.h6 small,h4 .small,.h4 .small,h5 .small,.h5 .small,h6 .small,.h6 .small{font-size:75%}h1,.h1{font-size:36px}h2,.h2{font-size:30px}h3,.h3{font-size:24px}h4,.h4{font-size:18px}h5,.h5{font-size:14px}h6,.h6{font-size:12px}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:16px;font-weight:300;line-height:1.4}@media (min-width:768px){.lead{font-size:21px}}small,.small{font-size:85%}mark,.mark{background-color:#fcf8e3;padding:.2em}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}.text-justify{text-align:justify}.text-nowrap{white-space:nowrap}.text-lowercase{text-transform:lowercase}.text-uppercase{text-transform:uppercase}.text-capitalize{text-transform:capitalize}.text-muted{color:#777}.text-primary{color:#337ab7}a.text-primary:hover,a.text-primary:focus{color:#286090}.text-success{color:#3c763d}a.text-success:hover,a.text-success:focus{color:#2b542c}.text-info{color:#31708f}a.text-info:hover,a.text-info:focus{color:#245269}.text-warning{color:#8a6d3b}a.text-warning:hover,a.text-warning:focus{color:#66512c}.text-danger{color:#a94442}a.text-danger:hover,a.text-danger:focus{color:#843534}.bg-primary{color:#fff;background-color:#337ab7}a.bg-primary:hover,a.bg-primary:focus{background-color:#286090}.bg-success{background-color:#dff0d8}a.bg-success:hover,a.bg-success:focus{background-color:#c1e2b3}.bg-info{background-color:#d9edf7}a.bg-info:hover,a.bg-info:focus{background-color:#afd9ee}.bg-warning{background-color:#fcf8e3}a.bg-warning:hover,a.bg-warning:focus{background-color:#f7ecb5}.bg-danger{background-color:#f2dede}a.bg-danger:hover,a.bg-danger:focus{background-color:#e4b9b9}.page-header{padding-bottom:9px;margin:40px 0 20px;border-bottom:1px solid #eee}ul,ol{margin-top:0;margin-bottom:10px}ul ul,ol ul,ul ol,ol ol{margin-bottom:0}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;list-style:none;margin-left:-5px}.list-inline>li{display:inline-block;padding-left:5px;padding-right:5px}dl{margin-top:0;margin-bottom:20px}dt,dd{line-height:1.42857143}dt{font-weight:bold}dd{margin-left:0}@media (min-width:768px){.dl-horizontal dt{float:left;width:160px;clear:left;text-align:right;overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}}abbr[title],abbr[data-original-title]{cursor:help;border-bottom:1px dotted #777}.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:10px 20px;margin:0 0 20px;font-size:17.5px;border-left:5px solid #eee}blockquote p:last-child,blockquote ul:last-child,blockquote ol:last-child{margin-bottom:0}blockquote footer,blockquote small,blockquote .small{display:block;font-size:80%;line-height:1.42857143;color:#777}blockquote footer:before,blockquote small:before,blockquote .small:before{content:'\2014 \00A0'}.blockquote-reverse,blockquote.pull-right{padding-right:15px;padding-left:0;border-right:5px solid #eee;border-left:0;text-align:right}.blockquote-reverse footer:before,blockquote.pull-right footer:before,.blockquote-reverse small:before,blockquote.pull-right small:before,.blockquote-reverse .small:before,blockquote.pull-right .small:before{content:''}.blockquote-reverse footer:after,blockquote.pull-right footer:after,.blockquote-reverse small:after,blockquote.pull-right small:after,.blockquote-reverse .small:after,blockquote.pull-right .small:after{content:'\00A0 \2014'}address{margin-bottom:20px;font-style:normal;line-height:1.42857143}code,kbd,pre,samp{font-family:Menlo,Monaco,Consolas,"Courier New",monospace}code{padding:2px 4px;font-size:90%;color:#c7254e;background-color:#f9f2f4;border-radius:4px}kbd{padding:2px 4px;font-size:90%;color:#fff;background-color:#333;border-radius:3px;box-shadow:inset 0 -1px 0 rgba(0,0,0,0.25)}kbd kbd{padding:0;font-size:100%;font-weight:bold;box-shadow:none}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:1.42857143;word-break:break-all;word-wrap:break-word;color:#333;background-color:#f5f5f5;border:1px solid #ccc;border-radius:4px}pre code{padding:0;font-size:inherit;color:inherit;white-space:pre-wrap;background-color:transparent;border-radius:0}.pre-scrollable{max-height:340px;overflow-y:scroll}.container{margin-right:auto;margin-left:auto;padding-left:15px;padding-right:15px}@media (min-width:768px){.container{width:750px}}@media (min-width:992px){.container{width:970px}}@media (min-width:1200px){.container{width:1170px}}.container-fluid{margin-right:auto;margin-left:auto;padding-left:15px;padding-right:15px}.row{margin-left:-15px;margin-right:-15px}.col-xs-1, .col-sm-1, .col-md-1, .col-lg-1, .col-xs-2, .col-sm-2, .col-md-2, .col-lg-2, .col-xs-3, .col-sm-3, .col-md-3, .col-lg-3, .col-xs-4, .col-sm-4, .col-md-4, .col-lg-4, .col-xs-5, .col-sm-5, .col-md-5, .col-lg-5, .col-xs-6, .col-sm-6, .col-md-6, .col-lg-6, .col-xs-7, .col-sm-7, .col-md-7, .col-lg-7, .col-xs-8, .col-sm-8, .col-md-8, .col-lg-8, .col-xs-9, .col-sm-9, .col-md-9, .col-lg-9, .col-xs-10, .col-sm-10, .col-md-10, .col-lg-10, .col-xs-11, .col-sm-11, .col-md-11, .col-lg-11, .col-xs-12, .col-sm-12, .col-md-12, .col-lg-12{position:relative;min-height:1px;padding-left:15px;padding-right:15px}.col-xs-1, .col-xs-2, .col-xs-3, .col-xs-4, .col-xs-5, .col-xs-6, .col-xs-7, .col-xs-8, .col-xs-9, .col-xs-10, .col-xs-11, .col-xs-12{float:left}.col-xs-12{width:100%}.col-xs-11{width:91.66666667%}.col-xs-10{width:83.33333333%}.col-xs-9{width:75%}.col-xs-8{width:66.66666667%}.col-xs-7{width:58.33333333%}.col-xs-6{width:50%}.col-xs-5{width:41.66666667%}.col-xs-4{width:33.33333333%}.col-xs-3{width:25%}.col-xs-2{width:16.66666667%}.col-xs-1{width:8.33333333%}.col-xs-pull-12{right:100%}.col-xs-pull-11{right:91.66666667%}.col-xs-pull-10{right:83.33333333%}.col-xs-pull-9{right:75%}.col-xs-pull-8{right:66.66666667%}.col-xs-pull-7{right:58.33333333%}.col-xs-pull-6{right:50%}.col-xs-pull-5{right:41.66666667%}.col-xs-pull-4{right:33.33333333%}.col-xs-pull-3{right:25%}.col-xs-pull-2{right:16.66666667%}.col-xs-pull-1{right:8.33333333%}.col-xs-pull-0{right:auto}.col-xs-push-12{left:100%}.col-xs-push-11{left:91.66666667%}.col-xs-push-10{left:83.33333333%}.col-xs-push-9{left:75%}.col-xs-push-8{left:66.66666667%}.col-xs-push-7{left:58.33333333%}.col-xs-push-6{left:50%}.col-xs-push-5{left:41.66666667%}.col-xs-push-4{left:33.33333333%}.col-xs-push-3{left:25%}.col-xs-push-2{left:16.66666667%}.col-xs-push-1{left:8.33333333%}.col-xs-push-0{left:auto}.col-xs-offset-12{margin-left:100%}.col-xs-offset-11{margin-left:91.66666667%}.col-xs-offset-10{margin-left:83.33333333%}.col-xs-offset-9{margin-left:75%}.col-xs-offset-8{margin-left:66.66666667%}.col-xs-offset-7{margin-left:58.33333333%}.col-xs-offset-6{margin-left:50%}.col-xs-offset-5{margin-left:41.66666667%}.col-xs-offset-4{margin-left:33.33333333%}.col-xs-offset-3{margin-left:25%}.col-xs-offset-2{margin-left:16.66666667%}.col-xs-offset-1{margin-left:8.33333333%}.col-xs-offset-0{margin-left:0}@media (min-width:768px){.col-sm-1, .col-sm-2, .col-sm-3, .col-sm-4, .col-sm-5, .col-sm-6, .col-sm-7, .col-sm-8, .col-sm-9, .col-sm-10, .col-sm-11, .col-sm-12{float:left}.col-sm-12{width:100%}.col-sm-11{width:91.66666667%}.col-sm-10{width:83.33333333%}.col-sm-9{width:75%}.col-sm-8{width:66.66666667%}.col-sm-7{width:58.33333333%}.col-sm-6{width:50%}.col-sm-5{width:41.66666667%}.col-sm-4{width:33.33333333%}.col-sm-3{width:25%}.col-sm-2{width:16.66666667%}.col-sm-1{width:8.33333333%}.col-sm-pull-12{right:100%}.col-sm-pull-11{right:91.66666667%}.col-sm-pull-10{right:83.33333333%}.col-sm-pull-9{right:75%}.col-sm-pull-8{right:66.66666667%}.col-sm-pull-7{right:58.33333333%}.col-sm-pull-6{right:50%}.col-sm-pull-5{right:41.66666667%}.col-sm-pull-4{right:33.33333333%}.col-sm-pull-3{right:25%}.col-sm-pull-2{right:16.66666667%}.col-sm-pull-1{right:8.33333333%}.col-sm-pull-0{right:auto}.col-sm-push-12{left:100%}.col-sm-push-11{left:91.66666667%}.col-sm-push-10{left:83.33333333%}.col-sm-push-9{left:75%}.col-sm-push-8{left:66.66666667%}.col-sm-push-7{left:58.33333333%}.col-sm-push-6{left:50%}.col-sm-push-5{left:41.66666667%}.col-sm-push-4{left:33.33333333%}.col-sm-push-3{left:25%}.col-sm-push-2{left:16.66666667%}.col-sm-push-1{left:8.33333333%}.col-sm-push-0{left:auto}.col-sm-offset-12{margin-left:100%}.col-sm-offset-11{margin-left:91.66666667%}.col-sm-offset-10{margin-left:83.33333333%}.col-sm-offset-9{margin-left:75%}.col-sm-offset-8{margin-left:66.66666667%}.col-sm-offset-7{margin-left:58.33333333%}.col-sm-offset-6{margin-left:50%}.col-sm-offset-5{margin-left:41.66666667%}.col-sm-offset-4{margin-left:33.33333333%}.col-sm-offset-3{margin-left:25%}.col-sm-offset-2{margin-left:16.66666667%}.col-sm-offset-1{margin-left:8.33333333%}.col-sm-offset-0{margin-left:0}}@media (min-width:992px){.col-md-1, .col-md-2, .col-md-3, .col-md-4, .col-md-5, .col-md-6, .col-md-7, .col-md-8, .col-md-9, .col-md-10, .col-md-11, .col-md-12{float:left}.col-md-12{width:100%}.col-md-11{width:91.66666667%}.col-md-10{width:83.33333333%}.col-md-9{width:75%}.col-md-8{width:66.66666667%}.col-md-7{width:58.33333333%}.col-md-6{width:50%}.col-md-5{width:41.66666667%}.col-md-4{width:33.33333333%}.col-md-3{width:25%}.col-md-2{width:16.66666667%}.col-md-1{width:8.33333333%}.col-md-pull-12{right:100%}.col-md-pull-11{right:91.66666667%}.col-md-pull-10{right:83.33333333%}.col-md-pull-9{right:75%}.col-md-pull-8{right:66.66666667%}.col-md-pull-7{right:58.33333333%}.col-md-pull-6{right:50%}.col-md-pull-5{right:41.66666667%}.col-md-pull-4{right:33.33333333%}.col-md-pull-3{right:25%}.col-md-pull-2{right:16.66666667%}.col-md-pull-1{right:8.33333333%}.col-md-pull-0{right:auto}.col-md-push-12{left:100%}.col-md-push-11{left:91.66666667%}.col-md-push-10{left:83.33333333%}.col-md-push-9{left:75%}.col-md-push-8{left:66.66666667%}.col-md-push-7{left:58.33333333%}.col-md-push-6{left:50%}.col-md-push-5{left:41.66666667%}.col-md-push-4{left:33.33333333%}.col-md-push-3{left:25%}.col-md-push-2{left:16.66666667%}.col-md-push-1{left:8.33333333%}.col-md-push-0{left:auto}.col-md-offset-12{margin-left:100%}.col-md-offset-11{margin-left:91.66666667%}.col-md-offset-10{margin-left:83.33333333%}.col-md-offset-9{margin-left:75%}.col-md-offset-8{margin-left:66.66666667%}.col-md-offset-7{margin-left:58.33333333%}.col-md-offset-6{margin-left:50%}.col-md-offset-5{margin-left:41.66666667%}.col-md-offset-4{margin-left:33.33333333%}.col-md-offset-3{margin-left:25%}.col-md-offset-2{margin-left:16.66666667%}.col-md-offset-1{margin-left:8.33333333%}.col-md-offset-0{margin-left:0}}@media (min-width:1200px){.col-lg-1, .col-lg-2, .col-lg-3, .col-lg-4, .col-lg-5, .col-lg-6, .col-lg-7, .col-lg-8, .col-lg-9, .col-lg-10, .col-lg-11, .col-lg-12{float:left}.col-lg-12{width:100%}.col-lg-11{width:91.66666667%}.col-lg-10{width:83.33333333%}.col-lg-9{width:75%}.col-lg-8{width:66.66666667%}.col-lg-7{width:58.33333333%}.col-lg-6{width:50%}.col-lg-5{width:41.66666667%}.col-lg-4{width:33.33333333%}.col-lg-3{width:25%}.col-lg-2{width:16.66666667%}.col-lg-1{width:8.33333333%}.col-lg-pull-12{right:100%}.col-lg-pull-11{right:91.66666667%}.col-lg-pull-10{right:83.33333333%}.col-lg-pull-9{right:75%}.col-lg-pull-8{right:66.66666667%}.col-lg-pull-7{right:58.33333333%}.col-lg-pull-6{right:50%}.col-lg-pull-5{right:41.66666667%}.col-lg-pull-4{right:33.33333333%}.col-lg-pull-3{right:25%}.col-lg-pull-2{right:16.66666667%}.col-lg-pull-1{right:8.33333333%}.col-lg-pull-0{right:auto}.col-lg-push-12{left:100%}.col-lg-push-11{left:91.66666667%}.col-lg-push-10{left:83.33333333%}.col-lg-push-9{left:75%}.col-lg-push-8{left:66.66666667%}.col-lg-push-7{left:58.33333333%}.col-lg-push-6{left:50%}.col-lg-push-5{left:41.66666667%}.col-lg-push-4{left:33.33333333%}.col-lg-push-3{left:25%}.col-lg-push-2{left:16.66666667%}.col-lg-push-1{left:8.33333333%}.col-lg-push-0{left:auto}.col-lg-offset-12{margin-left:100%}.col-lg-offset-11{margin-left:91.66666667%}.col-lg-offset-10{margin-left:83.33333333%}.col-lg-offset-9{margin-left:75%}.col-lg-offset-8{margin-left:66.66666667%}.col-lg-offset-7{margin-left:58.33333333%}.col-lg-offset-6{margin-left:50%}.col-lg-offset-5{margin-left:41.66666667%}.col-lg-offset-4{margin-left:33.33333333%}.col-lg-offset-3{margin-left:25%}.col-lg-offset-2{margin-left:16.66666667%}.col-lg-offset-1{margin-left:8.33333333%}.col-lg-offset-0{margin-left:0}}table{background-color:transparent}caption{padding-top:8px;padding-bottom:8px;color:#777;text-align:left}th{text-align:left}.table{width:100%;max-width:100%;margin-bottom:20px}.table>thead>tr>th,.table>tbody>tr>th,.table>tfoot>tr>th,.table>thead>tr>td,.table>tbody>tr>td,.table>tfoot>tr>td{padding:8px;line-height:1.42857143;vertical-align:top;border-top:1px solid #ddd}.table>thead>tr>th{vertical-align:bottom;border-bottom:2px solid #ddd}.table>caption+thead>tr:first-child>th,.table>colgroup+thead>tr:first-child>th,.table>thead:first-child>tr:first-child>th,.table>caption+thead>tr:first-child>td,.table>colgroup+thead>tr:first-child>td,.table>thead:first-child>tr:first-child>td{border-top:0}.table>tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#fff}.table-condensed>thead>tr>th,.table-condensed>tbody>tr>th,.table-condensed>tfoot>tr>th,.table-condensed>thead>tr>td,.table-condensed>tbody>tr>td,.table-condensed>tfoot>tr>td{padding:5px}.table-bordered{border:1px solid #ddd}.table-bordered>thead>tr>th,.table-bordered>tbody>tr>th,.table-bordered>tfoot>tr>th,.table-bordered>thead>tr>td,.table-bordered>tbody>tr>td,.table-bordered>tfoot>tr>td{border:1px solid #ddd}.table-bordered>thead>tr>th,.table-bordered>thead>tr>td{border-bottom-width:2px}.table-striped>tbody>tr:nth-of-type(odd){background-color:#f9f9f9}.table-hover>tbody>tr:hover{background-color:#f5f5f5}table col[class*="col-"]{position:static;float:none;display:table-column}table td[class*="col-"],table th[class*="col-"]{position:static;float:none;display:table-cell}.table>thead>tr>td.active,.table>tbody>tr>td.active,.table>tfoot>tr>td.active,.table>thead>tr>th.active,.table>tbody>tr>th.active,.table>tfoot>tr>th.active,.table>thead>tr.active>td,.table>tbody>tr.active>td,.table>tfoot>tr.active>td,.table>thead>tr.active>th,.table>tbody>tr.active>th,.table>tfoot>tr.active>th{background-color:#f5f5f5}.table-hover>tbody>tr>td.active:hover,.table-hover>tbody>tr>th.active:hover,.table-hover>tbody>tr.active:hover>td,.table-hover>tbody>tr:hover>.active,.table-hover>tbody>tr.active:hover>th{background-color:#e8e8e8}.table>thead>tr>td.success,.table>tbody>tr>td.success,.table>tfoot>tr>td.success,.table>thead>tr>th.success,.table>tbody>tr>th.success,.table>tfoot>tr>th.success,.table>thead>tr.success>td,.table>tbody>tr.success>td,.table>tfoot>tr.success>td,.table>thead>tr.success>th,.table>tbody>tr.success>th,.table>tfoot>tr.success>th{background-color:#dff0d8}.table-hover>tbody>tr>td.success:hover,.table-hover>tbody>tr>th.success:hover,.table-hover>tbody>tr.success:hover>td,.table-hover>tbody>tr:hover>.success,.table-hover>tbody>tr.success:hover>th{background-color:#d0e9c6}.table>thead>tr>td.info,.table>tbody>tr>td.info,.table>tfoot>tr>td.info,.table>thead>tr>th.info,.table>tbody>tr>th.info,.table>tfoot>tr>th.info,.table>thead>tr.info>td,.table>tbody>tr.info>td,.table>tfoot>tr.info>td,.table>thead>tr.info>th,.table>tbody>tr.info>th,.table>tfoot>tr.info>th{background-color:#d9edf7}.table-hover>tbody>tr>td.info:hover,.table-hover>tbody>tr>th.info:hover,.table-hover>tbody>tr.info:hover>td,.table-hover>tbody>tr:hover>.info,.table-hover>tbody>tr.info:hover>th{background-color:#c4e3f3}.table>thead>tr>td.warning,.table>tbody>tr>td.warning,.table>tfoot>tr>td.warning,.table>thead>tr>th.warning,.table>tbody>tr>th.warning,.table>tfoot>tr>th.warning,.table>thead>tr.warning>td,.table>tbody>tr.warning>td,.table>tfoot>tr.warning>td,.table>thead>tr.warning>th,.table>tbody>tr.warning>th,.table>tfoot>tr.warning>th{background-color:#fcf8e3}.table-hover>tbody>tr>td.warning:hover,.table-hover>tbody>tr>th.warning:hover,.table-hover>tbody>tr.warning:hover>td,.table-hover>tbody>tr:hover>.warning,.table-hover>tbody>tr.warning:hover>th{background-color:#faf2cc}.table>thead>tr>td.danger,.table>tbody>tr>td.danger,.table>tfoot>tr>td.danger,.table>thead>tr>th.danger,.table>tbody>tr>th.danger,.table>tfoot>tr>th.danger,.table>thead>tr.danger>td,.table>tbody>tr.danger>td,.table>tfoot>tr.danger>td,.table>thead>tr.danger>th,.table>tbody>tr.danger>th,.table>tfoot>tr.danger>th{background-color:#f2dede}.table-hover>tbody>tr>td.danger:hover,.table-hover>tbody>tr>th.danger:hover,.table-hover>tbody>tr.danger:hover>td,.table-hover>tbody>tr:hover>.danger,.table-hover>tbody>tr.danger:hover>th{background-color:#ebcccc}.table-responsive{overflow-x:auto;min-height:.01%}@media screen and (max-width:767px){.table-responsive{width:100%;margin-bottom:15px;overflow-y:hidden;-ms-overflow-style:-ms-autohiding-scrollbar;border:1px solid #ddd}.table-responsive>.table{margin-bottom:0}.table-responsive>.table>thead>tr>th,.table-responsive>.table>tbody>tr>th,.table-responsive>.table>tfoot>tr>th,.table-responsive>.table>thead>tr>td,.table-responsive>.table>tbody>tr>td,.table-responsive>.table>tfoot>tr>td{white-space:nowrap}.table-responsive>.table-bordered{border:0}.table-responsive>.table-bordered>thead>tr>th:first-child,.table-responsive>.table-bordered>tbody>tr>th:first-child,.table-responsive>.table-bordered>tfoot>tr>th:first-child,.table-responsive>.table-bordered>thead>tr>td:first-child,.table-responsive>.table-bordered>tbody>tr>td:first-child,.table-responsive>.table-bordered>tfoot>tr>td:first-child{border-left:0}.table-responsive>.table-bordered>thead>tr>th:last-child,.table-responsive>.table-bordered>tbody>tr>th:last-child,.table-responsive>.table-bordered>tfoot>tr>th:last-child,.table-responsive>.table-bordered>thead>tr>td:last-child,.table-responsive>.table-bordered>tbody>tr>td:last-child,.table-responsive>.table-bordered>tfoot>tr>td:last-child{border-right:0}.table-responsive>.table-bordered>tbody>tr:last-child>th,.table-responsive>.table-bordered>tfoot>tr:last-child>th,.table-responsive>.table-bordered>tbody>tr:last-child>td,.table-responsive>.table-bordered>tfoot>tr:last-child>td{border-bottom:0}}fieldset{padding:0;margin:0;border:0;min-width:0}legend{display:block;width:100%;padding:0;margin-bottom:20px;font-size:21px;line-height:inherit;color:#333;border:0;border-bottom:1px solid #e5e5e5}label{display:inline-block;max-width:100%;margin-bottom:5px;font-weight:bold}input[type="search"]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type="radio"],input[type="checkbox"]{margin:4px 0 0;margin-top:1px \9;line-height:normal}input[type="file"]{display:block}input[type="range"]{display:block;width:100%}select[multiple],select[size]{height:auto}input[type="file"]:focus,input[type="radio"]:focus,input[type="checkbox"]:focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}output{display:block;padding-top:7px;font-size:14px;line-height:1.42857143;color:#555}.form-control{display:block;width:100%;height:34px;padding:6px 12px;font-size:14px;line-height:1.42857143;color:#555;background-color:#fff;background-image:none;border:1px solid #ccc;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-webkit-transition:border-color ease-in-out .15s, box-shadow ease-in-out .15s;-o-transition:border-color ease-in-out .15s, box-shadow ease-in-out .15s;transition:border-color ease-in-out .15s, box-shadow ease-in-out .15s}.form-control:focus{border-color:#66afe9;outline:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6);box-shadow:inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6)}.form-control::-moz-placeholder{color:#999;opacity:1}.form-control:-ms-input-placeholder{color:#999}.form-control::-webkit-input-placeholder{color:#999}.form-control::-ms-expand{border:0;background-color:transparent}.form-control[disabled],.form-control[readonly],fieldset[disabled] .form-control{background-color:#eee;opacity:1}.form-control[disabled],fieldset[disabled] .form-control{cursor:not-allowed}textarea.form-control{height:auto}input[type="search"]{-webkit-appearance:none}@media screen and (-webkit-min-device-pixel-ratio:0){input[type="date"].form-control,input[type="time"].form-control,input[type="datetime-local"].form-control,input[type="month"].form-control{line-height:34px}input[type="date"].input-sm,input[type="time"].input-sm,input[type="datetime-local"].input-sm,input[type="month"].input-sm,.input-group-sm input[type="date"],.input-group-sm input[type="time"],.input-group-sm input[type="datetime-local"],.input-group-sm input[type="month"]{line-height:30px}input[type="date"].input-lg,input[type="time"].input-lg,input[type="datetime-local"].input-lg,input[type="month"].input-lg,.input-group-lg input[type="date"],.input-group-lg input[type="time"],.input-group-lg input[type="datetime-local"],.input-group-lg input[type="month"]{line-height:46px}}.form-group{margin-bottom:15px}.radio,.checkbox{position:relative;display:block;margin-top:10px;margin-bottom:10px}.radio label,.checkbox label{min-height:20px;padding-left:20px;margin-bottom:0;font-weight:normal;cursor:pointer}.radio input[type="radio"],.radio-inline input[type="radio"],.checkbox input[type="checkbox"],.checkbox-inline input[type="checkbox"]{position:absolute;margin-left:-20px;margin-top:4px \9}.radio+.radio,.checkbox+.checkbox{margin-top:-5px}.radio-inline,.checkbox-inline{position:relative;display:inline-block;padding-left:20px;margin-bottom:0;vertical-align:middle;font-weight:normal;cursor:pointer}.radio-inline+.radio-inline,.checkbox-inline+.checkbox-inline{margin-top:0;margin-left:10px}input[type="radio"][disabled],input[type="checkbox"][disabled],input[type="radio"].disabled,input[type="checkbox"].disabled,fieldset[disabled] input[type="radio"],fieldset[disabled] input[type="checkbox"]{cursor:not-allowed}.radio-inline.disabled,.checkbox-inline.disabled,fieldset[disabled] .radio-inline,fieldset[disabled] .checkbox-inline{cursor:not-allowed}.radio.disabled label,.checkbox.disabled label,fieldset[disabled] .radio label,fieldset[disabled] .checkbox label{cursor:not-allowed}.form-control-static{padding-top:7px;padding-bottom:7px;margin-bottom:0;min-height:34px}.form-control-static.input-lg,.form-control-static.input-sm{padding-left:0;padding-right:0}.input-sm{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-sm{height:30px;line-height:30px}textarea.input-sm,select[multiple].input-sm{height:auto}.form-group-sm .form-control{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.form-group-sm select.form-control{height:30px;line-height:30px}.form-group-sm textarea.form-control,.form-group-sm select[multiple].form-control{height:auto}.form-group-sm .form-control-static{height:30px;min-height:32px;padding:6px 10px;font-size:12px;line-height:1.5}.input-lg{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}select.input-lg{height:46px;line-height:46px}textarea.input-lg,select[multiple].input-lg{height:auto}.form-group-lg .form-control{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}.form-group-lg select.form-control{height:46px;line-height:46px}.form-group-lg textarea.form-control,.form-group-lg select[multiple].form-control{height:auto}.form-group-lg .form-control-static{height:46px;min-height:38px;padding:11px 16px;font-size:18px;line-height:1.3333333}.has-feedback{position:relative}.has-feedback .form-control{padding-right:42.5px}.form-control-feedback{position:absolute;top:0;right:0;z-index:2;display:block;width:34px;height:34px;line-height:34px;text-align:center;pointer-events:none}.input-lg+.form-control-feedback,.input-group-lg+.form-control-feedback,.form-group-lg .form-control+.form-control-feedback{width:46px;height:46px;line-height:46px}.input-sm+.form-control-feedback,.input-group-sm+.form-control-feedback,.form-group-sm .form-control+.form-control-feedback{width:30px;height:30px;line-height:30px}.has-success .help-block,.has-success .control-label,.has-success .radio,.has-success .checkbox,.has-success .radio-inline,.has-success .checkbox-inline,.has-success.radio label,.has-success.checkbox label,.has-success.radio-inline label,.has-success.checkbox-inline label{color:#3c763d}.has-success .form-control{border-color:#3c763d;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.has-success .form-control:focus{border-color:#2b542c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #67b168;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #67b168}.has-success .input-group-addon{color:#3c763d;border-color:#3c763d;background-color:#dff0d8}.has-success .form-control-feedback{color:#3c763d}.has-warning .help-block,.has-warning .control-label,.has-warning .radio,.has-warning .checkbox,.has-warning .radio-inline,.has-warning .checkbox-inline,.has-warning.radio label,.has-warning.checkbox label,.has-warning.radio-inline label,.has-warning.checkbox-inline label{color:#8a6d3b}.has-warning .form-control{border-color:#8a6d3b;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.has-warning .form-control:focus{border-color:#66512c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #c0a16b;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #c0a16b}.has-warning .input-group-addon{color:#8a6d3b;border-color:#8a6d3b;background-color:#fcf8e3}.has-warning .form-control-feedback{color:#8a6d3b}.has-error .help-block,.has-error .control-label,.has-error .radio,.has-error .checkbox,.has-error .radio-inline,.has-error .checkbox-inline,.has-error.radio label,.has-error.checkbox label,.has-error.radio-inline label,.has-error.checkbox-inline label{color:#a94442}.has-error .form-control{border-color:#a94442;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.has-error .form-control:focus{border-color:#843534;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #ce8483;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #ce8483}.has-error .input-group-addon{color:#a94442;border-color:#a94442;background-color:#f2dede}.has-error .form-control-feedback{color:#a94442}.has-feedback label~.form-control-feedback{top:25px}.has-feedback label.sr-only~.form-control-feedback{top:0}.help-block{display:block;margin-top:5px;margin-bottom:10px;color:#737373}@media (min-width:768px){.form-inline .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.form-inline .form-control{display:inline-block;width:auto;vertical-align:middle}.form-inline .form-control-static{display:inline-block}.form-inline .input-group{display:inline-table;vertical-align:middle}.form-inline .input-group .input-group-addon,.form-inline .input-group .input-group-btn,.form-inline .input-group .form-control{width:auto}.form-inline .input-group>.form-control{width:100%}.form-inline .control-label{margin-bottom:0;vertical-align:middle}.form-inline .radio,.form-inline .checkbox{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.form-inline .radio label,.form-inline .checkbox label{padding-left:0}.form-inline .radio input[type="radio"],.form-inline .checkbox input[type="checkbox"]{position:relative;margin-left:0}.form-inline .has-feedback .form-control-feedback{top:0}}.form-horizontal .radio,.form-horizontal .checkbox,.form-horizontal .radio-inline,.form-horizontal .checkbox-inline{margin-top:0;margin-bottom:0;padding-top:7px}.form-horizontal .radio,.form-horizontal .checkbox{min-height:27px}.form-horizontal .form-group{margin-left:-15px;margin-right:-15px}@media (min-width:768px){.form-horizontal .control-label{text-align:right;margin-bottom:0;padding-top:7px}}.form-horizontal .has-feedback .form-control-feedback{right:15px}@media (min-width:768px){.form-horizontal .form-group-lg .control-label{padding-top:11px;font-size:18px}}@media (min-width:768px){.form-horizontal .form-group-sm .control-label{padding-top:6px;font-size:12px}}.btn{display:inline-block;margin-bottom:0;font-weight:normal;text-align:center;vertical-align:middle;touch-action:manipulation;cursor:pointer;background-image:none;border:1px solid transparent;white-space:nowrap;padding:6px 12px;font-size:14px;line-height:1.42857143;border-radius:4px;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.btn:focus,.btn:active:focus,.btn.active:focus,.btn.focus,.btn:active.focus,.btn.active.focus{outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn:hover,.btn:focus,.btn.focus{color:#333;text-decoration:none}.btn:active,.btn.active{outline:0;background-image:none;-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,0.125);box-shadow:inset 0 3px 5px rgba(0,0,0,0.125)}.btn.disabled,.btn[disabled],fieldset[disabled] .btn{cursor:not-allowed;opacity:.65;filter:alpha(opacity=65);-webkit-box-shadow:none;box-shadow:none}a.btn.disabled,fieldset[disabled] a.btn{pointer-events:none}.btn-default{color:#333;background-color:#fff;border-color:#ccc}.btn-default:focus,.btn-default.focus{color:#333;background-color:#e6e6e6;border-color:#8c8c8c}.btn-default:hover{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default:active,.btn-default.active,.open>.dropdown-toggle.btn-default{color:#333;background-color:#e6e6e6;border-color:#adadad}.btn-default:active:hover,.btn-default.active:hover,.open>.dropdown-toggle.btn-default:hover,.btn-default:active:focus,.btn-default.active:focus,.open>.dropdown-toggle.btn-default:focus,.btn-default:active.focus,.btn-default.active.focus,.open>.dropdown-toggle.btn-default.focus{color:#333;background-color:#d4d4d4;border-color:#8c8c8c}.btn-default:active,.btn-default.active,.open>.dropdown-toggle.btn-default{background-image:none}.btn-default.disabled:hover,.btn-default[disabled]:hover,fieldset[disabled] .btn-default:hover,.btn-default.disabled:focus,.btn-default[disabled]:focus,fieldset[disabled] .btn-default:focus,.btn-default.disabled.focus,.btn-default[disabled].focus,fieldset[disabled] .btn-default.focus{background-color:#fff;border-color:#ccc}.btn-default .badge{color:#fff;background-color:#333}.btn-primary{color:#fff;background-color:#337ab7;border-color:#2e6da4}.btn-primary:focus,.btn-primary.focus{color:#fff;background-color:#286090;border-color:#122b40}.btn-primary:hover{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary:active,.btn-primary.active,.open>.dropdown-toggle.btn-primary{color:#fff;background-color:#286090;border-color:#204d74}.btn-primary:active:hover,.btn-primary.active:hover,.open>.dropdown-toggle.btn-primary:hover,.btn-primary:active:focus,.btn-primary.active:focus,.open>.dropdown-toggle.btn-primary:focus,.btn-primary:active.focus,.btn-primary.active.focus,.open>.dropdown-toggle.btn-primary.focus{color:#fff;background-color:#204d74;border-color:#122b40}.btn-primary:active,.btn-primary.active,.open>.dropdown-toggle.btn-primary{background-image:none}.btn-primary.disabled:hover,.btn-primary[disabled]:hover,fieldset[disabled] .btn-primary:hover,.btn-primary.disabled:focus,.btn-primary[disabled]:focus,fieldset[disabled] .btn-primary:focus,.btn-primary.disabled.focus,.btn-primary[disabled].focus,fieldset[disabled] .btn-primary.focus{background-color:#337ab7;border-color:#2e6da4}.btn-primary .badge{color:#337ab7;background-color:#fff}.btn-success{color:#fff;background-color:#5cb85c;border-color:#4cae4c}.btn-success:focus,.btn-success.focus{color:#fff;background-color:#449d44;border-color:#255625}.btn-success:hover{color:#fff;background-color:#449d44;border-color:#398439}.btn-success:active,.btn-success.active,.open>.dropdown-toggle.btn-success{color:#fff;background-color:#449d44;border-color:#398439}.btn-success:active:hover,.btn-success.active:hover,.open>.dropdown-toggle.btn-success:hover,.btn-success:active:focus,.btn-success.active:focus,.open>.dropdown-toggle.btn-success:focus,.btn-success:active.focus,.btn-success.active.focus,.open>.dropdown-toggle.btn-success.focus{color:#fff;background-color:#398439;border-color:#255625}.btn-success:active,.btn-success.active,.open>.dropdown-toggle.btn-success{background-image:none}.btn-success.disabled:hover,.btn-success[disabled]:hover,fieldset[disabled] .btn-success:hover,.btn-success.disabled:focus,.btn-success[disabled]:focus,fieldset[disabled] .btn-success:focus,.btn-success.disabled.focus,.btn-success[disabled].focus,fieldset[disabled] .btn-success.focus{background-color:#5cb85c;border-color:#4cae4c}.btn-success .badge{color:#5cb85c;background-color:#fff}.btn-info{color:#fff;background-color:#5bc0de;border-color:#46b8da}.btn-info:focus,.btn-info.focus{color:#fff;background-color:#31b0d5;border-color:#1b6d85}.btn-info:hover{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info:active,.btn-info.active,.open>.dropdown-toggle.btn-info{color:#fff;background-color:#31b0d5;border-color:#269abc}.btn-info:active:hover,.btn-info.active:hover,.open>.dropdown-toggle.btn-info:hover,.btn-info:active:focus,.btn-info.active:focus,.open>.dropdown-toggle.btn-info:focus,.btn-info:active.focus,.btn-info.active.focus,.open>.dropdown-toggle.btn-info.focus{color:#fff;background-color:#269abc;border-color:#1b6d85}.btn-info:active,.btn-info.active,.open>.dropdown-toggle.btn-info{background-image:none}.btn-info.disabled:hover,.btn-info[disabled]:hover,fieldset[disabled] .btn-info:hover,.btn-info.disabled:focus,.btn-info[disabled]:focus,fieldset[disabled] .btn-info:focus,.btn-info.disabled.focus,.btn-info[disabled].focus,fieldset[disabled] .btn-info.focus{background-color:#5bc0de;border-color:#46b8da}.btn-info .badge{color:#5bc0de;background-color:#fff}.btn-warning{color:#fff;background-color:#f0ad4e;border-color:#eea236}.btn-warning:focus,.btn-warning.focus{color:#fff;background-color:#ec971f;border-color:#985f0d}.btn-warning:hover{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning:active,.btn-warning.active,.open>.dropdown-toggle.btn-warning{color:#fff;background-color:#ec971f;border-color:#d58512}.btn-warning:active:hover,.btn-warning.active:hover,.open>.dropdown-toggle.btn-warning:hover,.btn-warning:active:focus,.btn-warning.active:focus,.open>.dropdown-toggle.btn-warning:focus,.btn-warning:active.focus,.btn-warning.active.focus,.open>.dropdown-toggle.btn-warning.focus{color:#fff;background-color:#d58512;border-color:#985f0d}.btn-warning:active,.btn-warning.active,.open>.dropdown-toggle.btn-warning{background-image:none}.btn-warning.disabled:hover,.btn-warning[disabled]:hover,fieldset[disabled] .btn-warning:hover,.btn-warning.disabled:focus,.btn-warning[disabled]:focus,fieldset[disabled] .btn-warning:focus,.btn-warning.disabled.focus,.btn-warning[disabled].focus,fieldset[disabled] .btn-warning.focus{background-color:#f0ad4e;border-color:#eea236}.btn-warning .badge{color:#f0ad4e;background-color:#fff}.btn-danger{color:#fff;background-color:#d9534f;border-color:#d43f3a}.btn-danger:focus,.btn-danger.focus{color:#fff;background-color:#c9302c;border-color:#761c19}.btn-danger:hover{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger:active,.btn-danger.active,.open>.dropdown-toggle.btn-danger{color:#fff;background-color:#c9302c;border-color:#ac2925}.btn-danger:active:hover,.btn-danger.active:hover,.open>.dropdown-toggle.btn-danger:hover,.btn-danger:active:focus,.btn-danger.active:focus,.open>.dropdown-toggle.btn-danger:focus,.btn-danger:active.focus,.btn-danger.active.focus,.open>.dropdown-toggle.btn-danger.focus{color:#fff;background-color:#ac2925;border-color:#761c19}.btn-danger:active,.btn-danger.active,.open>.dropdown-toggle.btn-danger{background-image:none}.btn-danger.disabled:hover,.btn-danger[disabled]:hover,fieldset[disabled] .btn-danger:hover,.btn-danger.disabled:focus,.btn-danger[disabled]:focus,fieldset[disabled] .btn-danger:focus,.btn-danger.disabled.focus,.btn-danger[disabled].focus,fieldset[disabled] .btn-danger.focus{background-color:#d9534f;border-color:#d43f3a}.btn-danger .badge{color:#d9534f;background-color:#fff}.btn-link{color:#337ab7;font-weight:normal;border-radius:0}.btn-link,.btn-link:active,.btn-link.active,.btn-link[disabled],fieldset[disabled] .btn-link{background-color:transparent;-webkit-box-shadow:none;box-shadow:none}.btn-link,.btn-link:hover,.btn-link:focus,.btn-link:active{border-color:transparent}.btn-link:hover,.btn-link:focus{color:#23527c;text-decoration:underline;background-color:transparent}.btn-link[disabled]:hover,fieldset[disabled] .btn-link:hover,.btn-link[disabled]:focus,fieldset[disabled] .btn-link:focus{color:#777;text-decoration:none}.btn-lg,.btn-group-lg>.btn{padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}.btn-sm,.btn-group-sm>.btn{padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}.btn-xs,.btn-group-xs>.btn{padding:1px 5px;font-size:12px;line-height:1.5;border-radius:3px}.btn-block{display:block;width:100%}.btn-block+.btn-block{margin-top:5px}input[type="submit"].btn-block,input[type="reset"].btn-block,input[type="button"].btn-block{width:100%}.fade{opacity:0;-webkit-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{display:none}.collapse.in{display:block}tr.collapse.in{display:table-row}tbody.collapse.in{display:table-row-group}.collapsing{position:relative;height:0;overflow:hidden;-webkit-transition-property:height, visibility;transition-property:height, visibility;-webkit-transition-duration:.35s;transition-duration:.35s;-webkit-transition-timing-function:ease;transition-timing-function:ease}.caret{display:inline-block;width:0;height:0;margin-left:2px;vertical-align:middle;border-top:4px dashed;border-top:4px solid \9;border-right:4px solid transparent;border-left:4px solid transparent}.dropup,.dropdown{position:relative}.dropdown-toggle:focus{outline:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;list-style:none;font-size:14px;text-align:left;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.15);border-radius:4px;-webkit-box-shadow:0 6px 12px rgba(0,0,0,0.175);box-shadow:0 6px 12px rgba(0,0,0,0.175);background-clip:padding-box}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.dropdown-menu>li>a{display:block;padding:3px 20px;clear:both;font-weight:normal;line-height:1.42857143;color:#333;white-space:nowrap}.dropdown-menu>li>a:hover,.dropdown-menu>li>a:focus{text-decoration:none;color:#262626;background-color:#f5f5f5}.dropdown-menu>.active>a,.dropdown-menu>.active>a:hover,.dropdown-menu>.active>a:focus{color:#fff;text-decoration:none;outline:0;background-color:#337ab7}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:hover,.dropdown-menu>.disabled>a:focus{color:#777}.dropdown-menu>.disabled>a:hover,.dropdown-menu>.disabled>a:focus{text-decoration:none;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);cursor:not-allowed}.open>.dropdown-menu{display:block}.open>a{outline:0}.dropdown-menu-right{left:auto;right:0}.dropdown-menu-left{left:0;right:auto}.dropdown-header{display:block;padding:3px 20px;font-size:12px;line-height:1.42857143;color:#777;white-space:nowrap}.dropdown-backdrop{position:fixed;left:0;right:0;bottom:0;top:0;z-index:990}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{border-top:0;border-bottom:4px dashed;border-bottom:4px solid \9;content:""}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:2px}@media (min-width:768px){.navbar-right .dropdown-menu{left:auto;right:0}.navbar-right .dropdown-menu-left{left:0;right:auto}}.btn-group,.btn-group-vertical{position:relative;display:inline-block;vertical-align:middle}.btn-group>.btn,.btn-group-vertical>.btn{position:relative;float:left}.btn-group>.btn:hover,.btn-group-vertical>.btn:hover,.btn-group>.btn:focus,.btn-group-vertical>.btn:focus,.btn-group>.btn:active,.btn-group-vertical>.btn:active,.btn-group>.btn.active,.btn-group-vertical>.btn.active{z-index:2}.btn-group .btn+.btn,.btn-group .btn+.btn-group,.btn-group .btn-group+.btn,.btn-group .btn-group+.btn-group{margin-left:-1px}.btn-toolbar{margin-left:-5px}.btn-toolbar .btn,.btn-toolbar .btn-group,.btn-toolbar .input-group{float:left}.btn-toolbar>.btn,.btn-toolbar>.btn-group,.btn-toolbar>.input-group{margin-left:5px}.btn-group>.btn:not(:first-child):not(:last-child):not(.dropdown-toggle){border-radius:0}.btn-group>.btn:first-child{margin-left:0}.btn-group>.btn:first-child:not(:last-child):not(.dropdown-toggle){border-bottom-right-radius:0;border-top-right-radius:0}.btn-group>.btn:last-child:not(:first-child),.btn-group>.dropdown-toggle:not(:first-child){border-bottom-left-radius:0;border-top-left-radius:0}.btn-group>.btn-group{float:left}.btn-group>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-top-right-radius:0}.btn-group>.btn-group:last-child:not(:first-child)>.btn:first-child{border-bottom-left-radius:0;border-top-left-radius:0}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{padding-left:8px;padding-right:8px}.btn-group>.btn-lg+.dropdown-toggle{padding-left:12px;padding-right:12px}.btn-group.open .dropdown-toggle{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,0.125);box-shadow:inset 0 3px 5px rgba(0,0,0,0.125)}.btn-group.open .dropdown-toggle.btn-link{-webkit-box-shadow:none;box-shadow:none}.btn .caret{margin-left:0}.btn-lg .caret{border-width:5px 5px 0;border-bottom-width:0}.dropup .btn-lg .caret{border-width:0 5px 5px}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group,.btn-group-vertical>.btn-group>.btn{display:block;float:none;width:100%;max-width:100%}.btn-group-vertical>.btn-group>.btn{float:none}.btn-group-vertical>.btn+.btn,.btn-group-vertical>.btn+.btn-group,.btn-group-vertical>.btn-group+.btn,.btn-group-vertical>.btn-group+.btn-group{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:not(:first-child):not(:last-child){border-radius:0}.btn-group-vertical>.btn:first-child:not(:last-child){border-top-right-radius:4px;border-top-left-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:last-child:not(:first-child){border-top-right-radius:0;border-top-left-radius:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}.btn-group-vertical>.btn-group:not(:first-child):not(:last-child)>.btn{border-radius:0}.btn-group-vertical>.btn-group:first-child:not(:last-child)>.btn:last-child,.btn-group-vertical>.btn-group:first-child:not(:last-child)>.dropdown-toggle{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:last-child:not(:first-child)>.btn:first-child{border-top-right-radius:0;border-top-left-radius:0}.btn-group-justified{display:table;width:100%;table-layout:fixed;border-collapse:separate}.btn-group-justified>.btn,.btn-group-justified>.btn-group{float:none;display:table-cell;width:1%}.btn-group-justified>.btn-group .btn{width:100%}.btn-group-justified>.btn-group .dropdown-menu{left:auto}[data-toggle="buttons"]>.btn input[type="radio"],[data-toggle="buttons"]>.btn-group>.btn input[type="radio"],[data-toggle="buttons"]>.btn input[type="checkbox"],[data-toggle="buttons"]>.btn-group>.btn input[type="checkbox"]{position:absolute;clip:rect(0, 0, 0, 0);pointer-events:none}.input-group{position:relative;display:table;border-collapse:separate}.input-group[class*="col-"]{float:none;padding-left:0;padding-right:0}.input-group .form-control{position:relative;z-index:2;float:left;width:100%;margin-bottom:0}.input-group .form-control:focus{z-index:3}.input-group-lg>.form-control,.input-group-lg>.input-group-addon,.input-group-lg>.input-group-btn>.btn{height:46px;padding:10px 16px;font-size:18px;line-height:1.3333333;border-radius:6px}select.input-group-lg>.form-control,select.input-group-lg>.input-group-addon,select.input-group-lg>.input-group-btn>.btn{height:46px;line-height:46px}textarea.input-group-lg>.form-control,textarea.input-group-lg>.input-group-addon,textarea.input-group-lg>.input-group-btn>.btn,select[multiple].input-group-lg>.form-control,select[multiple].input-group-lg>.input-group-addon,select[multiple].input-group-lg>.input-group-btn>.btn{height:auto}.input-group-sm>.form-control,.input-group-sm>.input-group-addon,.input-group-sm>.input-group-btn>.btn{height:30px;padding:5px 10px;font-size:12px;line-height:1.5;border-radius:3px}select.input-group-sm>.form-control,select.input-group-sm>.input-group-addon,select.input-group-sm>.input-group-btn>.btn{height:30px;line-height:30px}textarea.input-group-sm>.form-control,textarea.input-group-sm>.input-group-addon,textarea.input-group-sm>.input-group-btn>.btn,select[multiple].input-group-sm>.form-control,select[multiple].input-group-sm>.input-group-addon,select[multiple].input-group-sm>.input-group-btn>.btn{height:auto}.input-group-addon,.input-group-btn,.input-group .form-control{display:table-cell}.input-group-addon:not(:first-child):not(:last-child),.input-group-btn:not(:first-child):not(:last-child),.input-group .form-control:not(:first-child):not(:last-child){border-radius:0}.input-group-addon,.input-group-btn{width:1%;white-space:nowrap;vertical-align:middle}.input-group-addon{padding:6px 12px;font-size:14px;font-weight:normal;line-height:1;color:#555;text-align:center;background-color:#eee;border:1px solid #ccc;border-radius:4px}.input-group-addon.input-sm{padding:5px 10px;font-size:12px;border-radius:3px}.input-group-addon.input-lg{padding:10px 16px;font-size:18px;border-radius:6px}.input-group-addon input[type="radio"],.input-group-addon input[type="checkbox"]{margin-top:0}.input-group .form-control:first-child,.input-group-addon:first-child,.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group>.btn,.input-group-btn:first-child>.dropdown-toggle,.input-group-btn:last-child>.btn:not(:last-child):not(.dropdown-toggle),.input-group-btn:last-child>.btn-group:not(:last-child)>.btn{border-bottom-right-radius:0;border-top-right-radius:0}.input-group-addon:first-child{border-right:0}.input-group .form-control:last-child,.input-group-addon:last-child,.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group>.btn,.input-group-btn:last-child>.dropdown-toggle,.input-group-btn:first-child>.btn:not(:first-child),.input-group-btn:first-child>.btn-group:not(:first-child)>.btn{border-bottom-left-radius:0;border-top-left-radius:0}.input-group-addon:last-child{border-left:0}.input-group-btn{position:relative;font-size:0;white-space:nowrap}.input-group-btn>.btn{position:relative}.input-group-btn>.btn+.btn{margin-left:-1px}.input-group-btn>.btn:hover,.input-group-btn>.btn:focus,.input-group-btn>.btn:active{z-index:2}.input-group-btn:first-child>.btn,.input-group-btn:first-child>.btn-group{margin-right:-1px}.input-group-btn:last-child>.btn,.input-group-btn:last-child>.btn-group{z-index:2;margin-left:-1px}.nav{margin-bottom:0;padding-left:0;list-style:none}.nav>li{position:relative;display:block}.nav>li>a{position:relative;display:block;padding:10px 15px}.nav>li>a:hover,.nav>li>a:focus{text-decoration:none;background-color:#eee}.nav>li.disabled>a{color:#777}.nav>li.disabled>a:hover,.nav>li.disabled>a:focus{color:#777;text-decoration:none;background-color:transparent;cursor:not-allowed}.nav .open>a,.nav .open>a:hover,.nav .open>a:focus{background-color:#eee;border-color:#337ab7}.nav .nav-divider{height:1px;margin:9px 0;overflow:hidden;background-color:#e5e5e5}.nav>li>a>img{max-width:none}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{float:left;margin-bottom:-1px}.nav-tabs>li>a{margin-right:2px;line-height:1.42857143;border:1px solid transparent;border-radius:4px 4px 0 0}.nav-tabs>li>a:hover{border-color:#eee #eee #ddd}.nav-tabs>li.active>a,.nav-tabs>li.active>a:hover,.nav-tabs>li.active>a:focus{color:#555;background-color:#fff;border:1px solid #ddd;border-bottom-color:transparent;cursor:default}.nav-tabs.nav-justified{width:100%;border-bottom:0}.nav-tabs.nav-justified>li{float:none}.nav-tabs.nav-justified>li>a{text-align:center;margin-bottom:5px}.nav-tabs.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-tabs.nav-justified>li{display:table-cell;width:1%}.nav-tabs.nav-justified>li>a{margin-bottom:0}}.nav-tabs.nav-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:hover,.nav-tabs.nav-justified>.active>a:focus{border:1px solid #ddd}@media (min-width:768px){.nav-tabs.nav-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs.nav-justified>.active>a,.nav-tabs.nav-justified>.active>a:hover,.nav-tabs.nav-justified>.active>a:focus{border-bottom-color:#fff}}.nav-pills>li{float:left}.nav-pills>li>a{border-radius:4px}.nav-pills>li+li{margin-left:2px}.nav-pills>li.active>a,.nav-pills>li.active>a:hover,.nav-pills>li.active>a:focus{color:#fff;background-color:#337ab7}.nav-stacked>li{float:none}.nav-stacked>li+li{margin-top:2px;margin-left:0}.nav-justified{width:100%}.nav-justified>li{float:none}.nav-justified>li>a{text-align:center;margin-bottom:5px}.nav-justified>.dropdown .dropdown-menu{top:auto;left:auto}@media (min-width:768px){.nav-justified>li{display:table-cell;width:1%}.nav-justified>li>a{margin-bottom:0}}.nav-tabs-justified{border-bottom:0}.nav-tabs-justified>li>a{margin-right:0;border-radius:4px}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:hover,.nav-tabs-justified>.active>a:focus{border:1px solid #ddd}@media (min-width:768px){.nav-tabs-justified>li>a{border-bottom:1px solid #ddd;border-radius:4px 4px 0 0}.nav-tabs-justified>.active>a,.nav-tabs-justified>.active>a:hover,.nav-tabs-justified>.active>a:focus{border-bottom-color:#fff}}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-right-radius:0;border-top-left-radius:0}.navbar{position:relative;min-height:50px;margin-bottom:20px;border:1px solid transparent}@media (min-width:768px){.navbar{border-radius:4px}}@media (min-width:768px){.navbar-header{float:left}}.navbar-collapse{overflow-x:visible;padding-right:15px;padding-left:15px;border-top:1px solid transparent;box-shadow:inset 0 1px 0 rgba(255,255,255,0.1);-webkit-overflow-scrolling:touch}.navbar-collapse.in{overflow-y:auto}@media (min-width:768px){.navbar-collapse{width:auto;border-top:0;box-shadow:none}.navbar-collapse.collapse{display:block !important;height:auto !important;padding-bottom:0;overflow:visible !important}.navbar-collapse.in{overflow-y:visible}.navbar-fixed-top .navbar-collapse,.navbar-static-top .navbar-collapse,.navbar-fixed-bottom .navbar-collapse{padding-left:0;padding-right:0}}.navbar-fixed-top .navbar-collapse,.navbar-fixed-bottom .navbar-collapse{max-height:340px}@media (max-device-width:480px) and (orientation:landscape){.navbar-fixed-top .navbar-collapse,.navbar-fixed-bottom .navbar-collapse{max-height:200px}}.container>.navbar-header,.container-fluid>.navbar-header,.container>.navbar-collapse,.container-fluid>.navbar-collapse{margin-right:-15px;margin-left:-15px}@media (min-width:768px){.container>.navbar-header,.container-fluid>.navbar-header,.container>.navbar-collapse,.container-fluid>.navbar-collapse{margin-right:0;margin-left:0}}.navbar-static-top{z-index:1000;border-width:0 0 1px}@media (min-width:768px){.navbar-static-top{border-radius:0}}.navbar-fixed-top,.navbar-fixed-bottom{position:fixed;right:0;left:0;z-index:1030}@media (min-width:768px){.navbar-fixed-top,.navbar-fixed-bottom{border-radius:0}}.navbar-fixed-top{top:0;border-width:0 0 1px}.navbar-fixed-bottom{bottom:0;margin-bottom:0;border-width:1px 0 0}.navbar-brand{float:left;padding:15px 15px;font-size:18px;line-height:20px;height:50px}.navbar-brand:hover,.navbar-brand:focus{text-decoration:none}.navbar-brand>img{display:block}@media (min-width:768px){.navbar>.container .navbar-brand,.navbar>.container-fluid .navbar-brand{margin-left:-15px}}.navbar-toggle{position:relative;float:right;margin-right:15px;padding:9px 10px;margin-top:8px;margin-bottom:8px;background-color:transparent;background-image:none;border:1px solid transparent;border-radius:4px}.navbar-toggle:focus{outline:0}.navbar-toggle .icon-bar{display:block;width:22px;height:2px;border-radius:1px}.navbar-toggle .icon-bar+.icon-bar{margin-top:4px}@media (min-width:768px){.navbar-toggle{display:none}}.navbar-nav{margin:7.5px -15px}.navbar-nav>li>a{padding-top:10px;padding-bottom:10px;line-height:20px}@media (max-width:767px){.navbar-nav .open .dropdown-menu{position:static;float:none;width:auto;margin-top:0;background-color:transparent;border:0;box-shadow:none}.navbar-nav .open .dropdown-menu>li>a,.navbar-nav .open .dropdown-menu .dropdown-header{padding:5px 15px 5px 25px}.navbar-nav .open .dropdown-menu>li>a{line-height:20px}.navbar-nav .open .dropdown-menu>li>a:hover,.navbar-nav .open .dropdown-menu>li>a:focus{background-image:none}}@media (min-width:768px){.navbar-nav{float:left;margin:0}.navbar-nav>li{float:left}.navbar-nav>li>a{padding-top:15px;padding-bottom:15px}}.navbar-form{margin-left:-15px;margin-right:-15px;padding:10px 15px;border-top:1px solid transparent;border-bottom:1px solid transparent;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.1);box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.1);margin-top:8px;margin-bottom:8px}@media (min-width:768px){.navbar-form .form-group{display:inline-block;margin-bottom:0;vertical-align:middle}.navbar-form .form-control{display:inline-block;width:auto;vertical-align:middle}.navbar-form .form-control-static{display:inline-block}.navbar-form .input-group{display:inline-table;vertical-align:middle}.navbar-form .input-group .input-group-addon,.navbar-form .input-group .input-group-btn,.navbar-form .input-group .form-control{width:auto}.navbar-form .input-group>.form-control{width:100%}.navbar-form .control-label{margin-bottom:0;vertical-align:middle}.navbar-form .radio,.navbar-form .checkbox{display:inline-block;margin-top:0;margin-bottom:0;vertical-align:middle}.navbar-form .radio label,.navbar-form .checkbox label{padding-left:0}.navbar-form .radio input[type="radio"],.navbar-form .checkbox input[type="checkbox"]{position:relative;margin-left:0}.navbar-form .has-feedback .form-control-feedback{top:0}}@media (max-width:767px){.navbar-form .form-group{margin-bottom:5px}.navbar-form .form-group:last-child{margin-bottom:0}}@media (min-width:768px){.navbar-form{width:auto;border:0;margin-left:0;margin-right:0;padding-top:0;padding-bottom:0;-webkit-box-shadow:none;box-shadow:none}}.navbar-nav>li>.dropdown-menu{margin-top:0;border-top-right-radius:0;border-top-left-radius:0}.navbar-fixed-bottom .navbar-nav>li>.dropdown-menu{margin-bottom:0;border-top-right-radius:4px;border-top-left-radius:4px;border-bottom-right-radius:0;border-bottom-left-radius:0}.navbar-btn{margin-top:8px;margin-bottom:8px}.navbar-btn.btn-sm{margin-top:10px;margin-bottom:10px}.navbar-btn.btn-xs{margin-top:14px;margin-bottom:14px}.navbar-text{margin-top:15px;margin-bottom:15px}@media (min-width:768px){.navbar-text{float:left;margin-left:15px;margin-right:15px}}@media (min-width:768px){.navbar-left{float:left !important}.navbar-right{float:right !important;margin-right:-15px}.navbar-right~.navbar-right{margin-right:0}}.navbar-default{background-color:#f8f8f8;border-color:#e7e7e7}.navbar-default .navbar-brand{color:#777}.navbar-default .navbar-brand:hover,.navbar-default .navbar-brand:focus{color:#5e5e5e;background-color:transparent}.navbar-default .navbar-text{color:#777}.navbar-default .navbar-nav>li>a{color:#777}.navbar-default .navbar-nav>li>a:hover,.navbar-default .navbar-nav>li>a:focus{color:#333;background-color:transparent}.navbar-default .navbar-nav>.active>a,.navbar-default .navbar-nav>.active>a:hover,.navbar-default .navbar-nav>.active>a:focus{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav>.disabled>a,.navbar-default .navbar-nav>.disabled>a:hover,.navbar-default .navbar-nav>.disabled>a:focus{color:#ccc;background-color:transparent}.navbar-default .navbar-toggle{border-color:#ddd}.navbar-default .navbar-toggle:hover,.navbar-default .navbar-toggle:focus{background-color:#ddd}.navbar-default .navbar-toggle .icon-bar{background-color:#888}.navbar-default .navbar-collapse,.navbar-default .navbar-form{border-color:#e7e7e7}.navbar-default .navbar-nav>.open>a,.navbar-default .navbar-nav>.open>a:hover,.navbar-default .navbar-nav>.open>a:focus{background-color:#e7e7e7;color:#555}@media (max-width:767px){.navbar-default .navbar-nav .open .dropdown-menu>li>a{color:#777}.navbar-default .navbar-nav .open .dropdown-menu>li>a:hover,.navbar-default .navbar-nav .open .dropdown-menu>li>a:focus{color:#333;background-color:transparent}.navbar-default .navbar-nav .open .dropdown-menu>.active>a,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:hover,.navbar-default .navbar-nav .open .dropdown-menu>.active>a:focus{color:#555;background-color:#e7e7e7}.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:hover,.navbar-default .navbar-nav .open .dropdown-menu>.disabled>a:focus{color:#ccc;background-color:transparent}}.navbar-default .navbar-link{color:#777}.navbar-default .navbar-link:hover{color:#333}.navbar-default .btn-link{color:#777}.navbar-default .btn-link:hover,.navbar-default .btn-link:focus{color:#333}.navbar-default .btn-link[disabled]:hover,fieldset[disabled] .navbar-default .btn-link:hover,.navbar-default .btn-link[disabled]:focus,fieldset[disabled] .navbar-default .btn-link:focus{color:#ccc}.navbar-inverse{background-color:#222;border-color:#080808}.navbar-inverse .navbar-brand{color:#9d9d9d}.navbar-inverse .navbar-brand:hover,.navbar-inverse .navbar-brand:focus{color:#fff;background-color:transparent}.navbar-inverse .navbar-text{color:#9d9d9d}.navbar-inverse .navbar-nav>li>a{color:#9d9d9d}.navbar-inverse .navbar-nav>li>a:hover,.navbar-inverse .navbar-nav>li>a:focus{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav>.active>a,.navbar-inverse .navbar-nav>.active>a:hover,.navbar-inverse .navbar-nav>.active>a:focus{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav>.disabled>a,.navbar-inverse .navbar-nav>.disabled>a:hover,.navbar-inverse .navbar-nav>.disabled>a:focus{color:#444;background-color:transparent}.navbar-inverse .navbar-toggle{border-color:#333}.navbar-inverse .navbar-toggle:hover,.navbar-inverse .navbar-toggle:focus{background-color:#333}.navbar-inverse .navbar-toggle .icon-bar{background-color:#fff}.navbar-inverse .navbar-collapse,.navbar-inverse .navbar-form{border-color:#101010}.navbar-inverse .navbar-nav>.open>a,.navbar-inverse .navbar-nav>.open>a:hover,.navbar-inverse .navbar-nav>.open>a:focus{background-color:#080808;color:#fff}@media (max-width:767px){.navbar-inverse .navbar-nav .open .dropdown-menu>.dropdown-header{border-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu .divider{background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a{color:#9d9d9d}.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:hover,.navbar-inverse .navbar-nav .open .dropdown-menu>li>a:focus{color:#fff;background-color:transparent}.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:hover,.navbar-inverse .navbar-nav .open .dropdown-menu>.active>a:focus{color:#fff;background-color:#080808}.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:hover,.navbar-inverse .navbar-nav .open .dropdown-menu>.disabled>a:focus{color:#444;background-color:transparent}}.navbar-inverse .navbar-link{color:#9d9d9d}.navbar-inverse .navbar-link:hover{color:#fff}.navbar-inverse .btn-link{color:#9d9d9d}.navbar-inverse .btn-link:hover,.navbar-inverse .btn-link:focus{color:#fff}.navbar-inverse .btn-link[disabled]:hover,fieldset[disabled] .navbar-inverse .btn-link:hover,.navbar-inverse .btn-link[disabled]:focus,fieldset[disabled] .navbar-inverse .btn-link:focus{color:#444}.breadcrumb{padding:8px 15px;margin-bottom:20px;list-style:none;background-color:#f5f5f5;border-radius:4px}.breadcrumb>li{display:inline-block}.breadcrumb>li+li:before{content:"/\00a0";padding:0 5px;color:#ccc}.breadcrumb>.active{color:#777}.pagination{display:inline-block;padding-left:0;margin:20px 0;border-radius:4px}.pagination>li{display:inline}.pagination>li>a,.pagination>li>span{position:relative;float:left;padding:6px 12px;line-height:1.42857143;text-decoration:none;color:#337ab7;background-color:#fff;border:1px solid #ddd;margin-left:-1px}.pagination>li:first-child>a,.pagination>li:first-child>span{margin-left:0;border-bottom-left-radius:4px;border-top-left-radius:4px}.pagination>li:last-child>a,.pagination>li:last-child>span{border-bottom-right-radius:4px;border-top-right-radius:4px}.pagination>li>a:hover,.pagination>li>span:hover,.pagination>li>a:focus,.pagination>li>span:focus{z-index:2;color:#23527c;background-color:#eee;border-color:#ddd}.pagination>.active>a,.pagination>.active>span,.pagination>.active>a:hover,.pagination>.active>span:hover,.pagination>.active>a:focus,.pagination>.active>span:focus{z-index:3;color:#fff;background-color:#337ab7;border-color:#337ab7;cursor:default}.pagination>.disabled>span,.pagination>.disabled>span:hover,.pagination>.disabled>span:focus,.pagination>.disabled>a,.pagination>.disabled>a:hover,.pagination>.disabled>a:focus{color:#777;background-color:#fff;border-color:#ddd;cursor:not-allowed}.pagination-lg>li>a,.pagination-lg>li>span{padding:10px 16px;font-size:18px;line-height:1.3333333}.pagination-lg>li:first-child>a,.pagination-lg>li:first-child>span{border-bottom-left-radius:6px;border-top-left-radius:6px}.pagination-lg>li:last-child>a,.pagination-lg>li:last-child>span{border-bottom-right-radius:6px;border-top-right-radius:6px}.pagination-sm>li>a,.pagination-sm>li>span{padding:5px 10px;font-size:12px;line-height:1.5}.pagination-sm>li:first-child>a,.pagination-sm>li:first-child>span{border-bottom-left-radius:3px;border-top-left-radius:3px}.pagination-sm>li:last-child>a,.pagination-sm>li:last-child>span{border-bottom-right-radius:3px;border-top-right-radius:3px}.pager{padding-left:0;margin:20px 0;list-style:none;text-align:center}.pager li{display:inline}.pager li>a,.pager li>span{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;border-radius:15px}.pager li>a:hover,.pager li>a:focus{text-decoration:none;background-color:#eee}.pager .next>a,.pager .next>span{float:right}.pager .previous>a,.pager .previous>span{float:left}.pager .disabled>a,.pager .disabled>a:hover,.pager .disabled>a:focus,.pager .disabled>span{color:#777;background-color:#fff;cursor:not-allowed}.label{display:inline;padding:.2em .6em .3em;font-size:75%;font-weight:bold;line-height:1;color:#fff;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25em}a.label:hover,a.label:focus{color:#fff;text-decoration:none;cursor:pointer}.label:empty{display:none}.btn .label{position:relative;top:-1px}.label-default{background-color:#777}.label-default[href]:hover,.label-default[href]:focus{background-color:#5e5e5e}.label-primary{background-color:#337ab7}.label-primary[href]:hover,.label-primary[href]:focus{background-color:#286090}.label-success{background-color:#5cb85c}.label-success[href]:hover,.label-success[href]:focus{background-color:#449d44}.label-info{background-color:#5bc0de}.label-info[href]:hover,.label-info[href]:focus{background-color:#31b0d5}.label-warning{background-color:#f0ad4e}.label-warning[href]:hover,.label-warning[href]:focus{background-color:#ec971f}.label-danger{background-color:#d9534f}.label-danger[href]:hover,.label-danger[href]:focus{background-color:#c9302c}.badge{display:inline-block;min-width:10px;padding:3px 7px;font-size:12px;font-weight:bold;color:#fff;line-height:1;vertical-align:middle;white-space:nowrap;text-align:center;background-color:#777;border-radius:10px}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.btn-xs .badge,.btn-group-xs>.btn .badge{top:0;padding:1px 5px}a.badge:hover,a.badge:focus{color:#fff;text-decoration:none;cursor:pointer}.list-group-item.active>.badge,.nav-pills>.active>a>.badge{color:#337ab7;background-color:#fff}.list-group-item>.badge{float:right}.list-group-item>.badge+.badge{margin-right:5px}.nav-pills>li>a>.badge{margin-left:3px}.jumbotron{padding-top:30px;padding-bottom:30px;margin-bottom:30px;color:inherit;background-color:#eee}.jumbotron h1,.jumbotron .h1{color:inherit}.jumbotron p{margin-bottom:15px;font-size:21px;font-weight:200}.jumbotron>hr{border-top-color:#d5d5d5}.container .jumbotron,.container-fluid .jumbotron{border-radius:6px;padding-left:15px;padding-right:15px}.jumbotron .container{max-width:100%}@media screen and (min-width:768px){.jumbotron{padding-top:48px;padding-bottom:48px}.container .jumbotron,.container-fluid .jumbotron{padding-left:60px;padding-right:60px}.jumbotron h1,.jumbotron .h1{font-size:63px}}.thumbnail{display:block;padding:4px;margin-bottom:20px;line-height:1.42857143;background-color:#fff;border:1px solid #ddd;border-radius:4px;-webkit-transition:border .2s ease-in-out;-o-transition:border .2s ease-in-out;transition:border .2s ease-in-out}.thumbnail>img,.thumbnail a>img{margin-left:auto;margin-right:auto}a.thumbnail:hover,a.thumbnail:focus,a.thumbnail.active{border-color:#337ab7}.thumbnail .caption{padding:9px;color:#333}.alert{padding:15px;margin-bottom:20px;border:1px solid transparent;border-radius:4px}.alert h4{margin-top:0;color:inherit}.alert .alert-link{font-weight:bold}.alert>p,.alert>ul{margin-bottom:0}.alert>p+p{margin-top:5px}.alert-dismissable,.alert-dismissible{padding-right:35px}.alert-dismissable .close,.alert-dismissible .close{position:relative;top:-2px;right:-21px;color:inherit}.alert-success{background-color:#dff0d8;border-color:#d6e9c6;color:#3c763d}.alert-success hr{border-top-color:#c9e2b3}.alert-success .alert-link{color:#2b542c}.alert-info{background-color:#d9edf7;border-color:#bce8f1;color:#31708f}.alert-info hr{border-top-color:#a6e1ec}.alert-info .alert-link{color:#245269}.alert-warning{background-color:#fcf8e3;border-color:#faebcc;color:#8a6d3b}.alert-warning hr{border-top-color:#f7e1b5}.alert-warning .alert-link{color:#66512c}.alert-danger{background-color:#f2dede;border-color:#ebccd1;color:#a94442}.alert-danger hr{border-top-color:#e4b9c0}.alert-danger .alert-link{color:#843534}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{overflow:hidden;height:20px;margin-bottom:20px;background-color:#f5f5f5;border-radius:4px;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1);box-shadow:inset 0 1px 2px rgba(0,0,0,0.1)}.progress-bar{float:left;width:0;height:100%;font-size:12px;line-height:20px;color:#fff;text-align:center;background-color:#337ab7;-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);-webkit-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}.progress-striped .progress-bar,.progress-bar-striped{background-image:-webkit-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-size:40px 40px}.progress.active .progress-bar,.progress-bar.active{-webkit-animation:progress-bar-stripes 2s linear infinite;-o-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-bar-success{background-color:#5cb85c}.progress-striped .progress-bar-success{background-image:-webkit-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent)}.progress-bar-info{background-color:#5bc0de}.progress-striped .progress-bar-info{background-image:-webkit-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent)}.progress-bar-warning{background-color:#f0ad4e}.progress-striped .progress-bar-warning{background-image:-webkit-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent)}.progress-bar-danger{background-color:#d9534f}.progress-striped .progress-bar-danger{background-image:-webkit-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(45deg, rgba(255,255,255,0.15) 25%, transparent 25%, transparent 50%, rgba(255,255,255,0.15) 50%, rgba(255,255,255,0.15) 75%, transparent 75%, transparent)}.media{margin-top:15px}.media:first-child{margin-top:0}.media,.media-body{zoom:1;overflow:hidden}.media-body{width:10000px}.media-object{display:block}.media-object.img-thumbnail{max-width:none}.media-right,.media>.pull-right{padding-left:10px}.media-left,.media>.pull-left{padding-right:10px}.media-left,.media-right,.media-body{display:table-cell;vertical-align:top}.media-middle{vertical-align:middle}.media-bottom{vertical-align:bottom}.media-heading{margin-top:0;margin-bottom:5px}.media-list{padding-left:0;list-style:none}.list-group{margin-bottom:20px;padding-left:0}.list-group-item{position:relative;display:block;padding:10px 15px;margin-bottom:-1px;background-color:#fff;border:1px solid #ddd}.list-group-item:first-child{border-top-right-radius:4px;border-top-left-radius:4px}.list-group-item:last-child{margin-bottom:0;border-bottom-right-radius:4px;border-bottom-left-radius:4px}a.list-group-item,button.list-group-item{color:#555}a.list-group-item .list-group-item-heading,button.list-group-item .list-group-item-heading{color:#333}a.list-group-item:hover,button.list-group-item:hover,a.list-group-item:focus,button.list-group-item:focus{text-decoration:none;color:#555;background-color:#f5f5f5}button.list-group-item{width:100%;text-align:left}.list-group-item.disabled,.list-group-item.disabled:hover,.list-group-item.disabled:focus{background-color:#eee;color:#777;cursor:not-allowed}.list-group-item.disabled .list-group-item-heading,.list-group-item.disabled:hover .list-group-item-heading,.list-group-item.disabled:focus .list-group-item-heading{color:inherit}.list-group-item.disabled .list-group-item-text,.list-group-item.disabled:hover .list-group-item-text,.list-group-item.disabled:focus .list-group-item-text{color:#777}.list-group-item.active,.list-group-item.active:hover,.list-group-item.active:focus{z-index:2;color:#fff;background-color:#337ab7;border-color:#337ab7}.list-group-item.active .list-group-item-heading,.list-group-item.active:hover .list-group-item-heading,.list-group-item.active:focus .list-group-item-heading,.list-group-item.active .list-group-item-heading>small,.list-group-item.active:hover .list-group-item-heading>small,.list-group-item.active:focus .list-group-item-heading>small,.list-group-item.active .list-group-item-heading>.small,.list-group-item.active:hover .list-group-item-heading>.small,.list-group-item.active:focus .list-group-item-heading>.small{color:inherit}.list-group-item.active .list-group-item-text,.list-group-item.active:hover .list-group-item-text,.list-group-item.active:focus .list-group-item-text{color:#c7ddef}.list-group-item-success{color:#3c763d;background-color:#dff0d8}a.list-group-item-success,button.list-group-item-success{color:#3c763d}a.list-group-item-success .list-group-item-heading,button.list-group-item-success .list-group-item-heading{color:inherit}a.list-group-item-success:hover,button.list-group-item-success:hover,a.list-group-item-success:focus,button.list-group-item-success:focus{color:#3c763d;background-color:#d0e9c6}a.list-group-item-success.active,button.list-group-item-success.active,a.list-group-item-success.active:hover,button.list-group-item-success.active:hover,a.list-group-item-success.active:focus,button.list-group-item-success.active:focus{color:#fff;background-color:#3c763d;border-color:#3c763d}.list-group-item-info{color:#31708f;background-color:#d9edf7}a.list-group-item-info,button.list-group-item-info{color:#31708f}a.list-group-item-info .list-group-item-heading,button.list-group-item-info .list-group-item-heading{color:inherit}a.list-group-item-info:hover,button.list-group-item-info:hover,a.list-group-item-info:focus,button.list-group-item-info:focus{color:#31708f;background-color:#c4e3f3}a.list-group-item-info.active,button.list-group-item-info.active,a.list-group-item-info.active:hover,button.list-group-item-info.active:hover,a.list-group-item-info.active:focus,button.list-group-item-info.active:focus{color:#fff;background-color:#31708f;border-color:#31708f}.list-group-item-warning{color:#8a6d3b;background-color:#fcf8e3}a.list-group-item-warning,button.list-group-item-warning{color:#8a6d3b}a.list-group-item-warning .list-group-item-heading,button.list-group-item-warning .list-group-item-heading{color:inherit}a.list-group-item-warning:hover,button.list-group-item-warning:hover,a.list-group-item-warning:focus,button.list-group-item-warning:focus{color:#8a6d3b;background-color:#faf2cc}a.list-group-item-warning.active,button.list-group-item-warning.active,a.list-group-item-warning.active:hover,button.list-group-item-warning.active:hover,a.list-group-item-warning.active:focus,button.list-group-item-warning.active:focus{color:#fff;background-color:#8a6d3b;border-color:#8a6d3b}.list-group-item-danger{color:#a94442;background-color:#f2dede}a.list-group-item-danger,button.list-group-item-danger{color:#a94442}a.list-group-item-danger .list-group-item-heading,button.list-group-item-danger .list-group-item-heading{color:inherit}a.list-group-item-danger:hover,button.list-group-item-danger:hover,a.list-group-item-danger:focus,button.list-group-item-danger:focus{color:#a94442;background-color:#ebcccc}a.list-group-item-danger.active,button.list-group-item-danger.active,a.list-group-item-danger.active:hover,button.list-group-item-danger.active:hover,a.list-group-item-danger.active:focus,button.list-group-item-danger.active:focus{color:#fff;background-color:#a94442;border-color:#a94442}.list-group-item-heading{margin-top:0;margin-bottom:5px}.list-group-item-text{margin-bottom:0;line-height:1.3}.panel{margin-bottom:20px;background-color:#fff;border:1px solid transparent;border-radius:4px;-webkit-box-shadow:0 1px 1px rgba(0,0,0,0.05);box-shadow:0 1px 1px rgba(0,0,0,0.05)}.panel-body{padding:15px}.panel-heading{padding:10px 15px;border-bottom:1px solid transparent;border-top-right-radius:3px;border-top-left-radius:3px}.panel-heading>.dropdown .dropdown-toggle{color:inherit}.panel-title{margin-top:0;margin-bottom:0;font-size:16px;color:inherit}.panel-title>a,.panel-title>small,.panel-title>.small,.panel-title>small>a,.panel-title>.small>a{color:inherit}.panel-footer{padding:10px 15px;background-color:#f5f5f5;border-top:1px solid #ddd;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.list-group,.panel>.panel-collapse>.list-group{margin-bottom:0}.panel>.list-group .list-group-item,.panel>.panel-collapse>.list-group .list-group-item{border-width:1px 0;border-radius:0}.panel>.list-group:first-child .list-group-item:first-child,.panel>.panel-collapse>.list-group:first-child .list-group-item:first-child{border-top:0;border-top-right-radius:3px;border-top-left-radius:3px}.panel>.list-group:last-child .list-group-item:last-child,.panel>.panel-collapse>.list-group:last-child .list-group-item:last-child{border-bottom:0;border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.panel-heading+.panel-collapse>.list-group .list-group-item:first-child{border-top-right-radius:0;border-top-left-radius:0}.panel-heading+.list-group .list-group-item:first-child{border-top-width:0}.list-group+.panel-footer{border-top-width:0}.panel>.table,.panel>.table-responsive>.table,.panel>.panel-collapse>.table{margin-bottom:0}.panel>.table caption,.panel>.table-responsive>.table caption,.panel>.panel-collapse>.table caption{padding-left:15px;padding-right:15px}.panel>.table:first-child,.panel>.table-responsive:first-child>.table:first-child{border-top-right-radius:3px;border-top-left-radius:3px}.panel>.table:first-child>thead:first-child>tr:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child{border-top-left-radius:3px;border-top-right-radius:3px}.panel>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:first-child,.panel>.table:first-child>thead:first-child>tr:first-child th:first-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:first-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:first-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:first-child{border-top-left-radius:3px}.panel>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child td:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child td:last-child,.panel>.table:first-child>thead:first-child>tr:first-child th:last-child,.panel>.table-responsive:first-child>.table:first-child>thead:first-child>tr:first-child th:last-child,.panel>.table:first-child>tbody:first-child>tr:first-child th:last-child,.panel>.table-responsive:first-child>.table:first-child>tbody:first-child>tr:first-child th:last-child{border-top-right-radius:3px}.panel>.table:last-child,.panel>.table-responsive:last-child>.table:last-child{border-bottom-right-radius:3px;border-bottom-left-radius:3px}.panel>.table:last-child>tbody:last-child>tr:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child{border-bottom-left-radius:3px;border-bottom-right-radius:3px}.panel>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:first-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:first-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:first-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:first-child{border-bottom-left-radius:3px}.panel>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child td:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child td:last-child,.panel>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table-responsive:last-child>.table:last-child>tbody:last-child>tr:last-child th:last-child,.panel>.table:last-child>tfoot:last-child>tr:last-child th:last-child,.panel>.table-responsive:last-child>.table:last-child>tfoot:last-child>tr:last-child th:last-child{border-bottom-right-radius:3px}.panel>.panel-body+.table,.panel>.panel-body+.table-responsive,.panel>.table+.panel-body,.panel>.table-responsive+.panel-body{border-top:1px solid #ddd}.panel>.table>tbody:first-child>tr:first-child th,.panel>.table>tbody:first-child>tr:first-child td{border-top:0}.panel>.table-bordered,.panel>.table-responsive>.table-bordered{border:0}.panel>.table-bordered>thead>tr>th:first-child,.panel>.table-responsive>.table-bordered>thead>tr>th:first-child,.panel>.table-bordered>tbody>tr>th:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:first-child,.panel>.table-bordered>tfoot>tr>th:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:first-child,.panel>.table-bordered>thead>tr>td:first-child,.panel>.table-responsive>.table-bordered>thead>tr>td:first-child,.panel>.table-bordered>tbody>tr>td:first-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:first-child,.panel>.table-bordered>tfoot>tr>td:first-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:first-child{border-left:0}.panel>.table-bordered>thead>tr>th:last-child,.panel>.table-responsive>.table-bordered>thead>tr>th:last-child,.panel>.table-bordered>tbody>tr>th:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>th:last-child,.panel>.table-bordered>tfoot>tr>th:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>th:last-child,.panel>.table-bordered>thead>tr>td:last-child,.panel>.table-responsive>.table-bordered>thead>tr>td:last-child,.panel>.table-bordered>tbody>tr>td:last-child,.panel>.table-responsive>.table-bordered>tbody>tr>td:last-child,.panel>.table-bordered>tfoot>tr>td:last-child,.panel>.table-responsive>.table-bordered>tfoot>tr>td:last-child{border-right:0}.panel>.table-bordered>thead>tr:first-child>td,.panel>.table-responsive>.table-bordered>thead>tr:first-child>td,.panel>.table-bordered>tbody>tr:first-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>td,.panel>.table-bordered>thead>tr:first-child>th,.panel>.table-responsive>.table-bordered>thead>tr:first-child>th,.panel>.table-bordered>tbody>tr:first-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:first-child>th{border-bottom:0}.panel>.table-bordered>tbody>tr:last-child>td,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>td,.panel>.table-bordered>tfoot>tr:last-child>td,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>td,.panel>.table-bordered>tbody>tr:last-child>th,.panel>.table-responsive>.table-bordered>tbody>tr:last-child>th,.panel>.table-bordered>tfoot>tr:last-child>th,.panel>.table-responsive>.table-bordered>tfoot>tr:last-child>th{border-bottom:0}.panel>.table-responsive{border:0;margin-bottom:0}.panel-group{margin-bottom:20px}.panel-group .panel{margin-bottom:0;border-radius:4px}.panel-group .panel+.panel{margin-top:5px}.panel-group .panel-heading{border-bottom:0}.panel-group .panel-heading+.panel-collapse>.panel-body,.panel-group .panel-heading+.panel-collapse>.list-group{border-top:1px solid #ddd}.panel-group .panel-footer{border-top:0}.panel-group .panel-footer+.panel-collapse .panel-body{border-bottom:1px solid #ddd}.panel-default{border-color:#ddd}.panel-default>.panel-heading{color:#333;background-color:#f5f5f5;border-color:#ddd}.panel-default>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ddd}.panel-default>.panel-heading .badge{color:#f5f5f5;background-color:#333}.panel-default>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ddd}.panel-primary{border-color:#337ab7}.panel-primary>.panel-heading{color:#fff;background-color:#337ab7;border-color:#337ab7}.panel-primary>.panel-heading+.panel-collapse>.panel-body{border-top-color:#337ab7}.panel-primary>.panel-heading .badge{color:#337ab7;background-color:#fff}.panel-primary>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#337ab7}.panel-success{border-color:#d6e9c6}.panel-success>.panel-heading{color:#3c763d;background-color:#dff0d8;border-color:#d6e9c6}.panel-success>.panel-heading+.panel-collapse>.panel-body{border-top-color:#d6e9c6}.panel-success>.panel-heading .badge{color:#dff0d8;background-color:#3c763d}.panel-success>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#d6e9c6}.panel-info{border-color:#bce8f1}.panel-info>.panel-heading{color:#31708f;background-color:#d9edf7;border-color:#bce8f1}.panel-info>.panel-heading+.panel-collapse>.panel-body{border-top-color:#bce8f1}.panel-info>.panel-heading .badge{color:#d9edf7;background-color:#31708f}.panel-info>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#bce8f1}.panel-warning{border-color:#faebcc}.panel-warning>.panel-heading{color:#8a6d3b;background-color:#fcf8e3;border-color:#faebcc}.panel-warning>.panel-heading+.panel-collapse>.panel-body{border-top-color:#faebcc}.panel-warning>.panel-heading .badge{color:#fcf8e3;background-color:#8a6d3b}.panel-warning>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#faebcc}.panel-danger{border-color:#ebccd1}.panel-danger>.panel-heading{color:#a94442;background-color:#f2dede;border-color:#ebccd1}.panel-danger>.panel-heading+.panel-collapse>.panel-body{border-top-color:#ebccd1}.panel-danger>.panel-heading .badge{color:#f2dede;background-color:#a94442}.panel-danger>.panel-footer+.panel-collapse>.panel-body{border-bottom-color:#ebccd1}.embed-responsive{position:relative;display:block;height:0;padding:0;overflow:hidden}.embed-responsive .embed-responsive-item,.embed-responsive iframe,.embed-responsive embed,.embed-responsive object,.embed-responsive video{position:absolute;top:0;left:0;bottom:0;height:100%;width:100%;border:0}.embed-responsive-16by9{padding-bottom:56.25%}.embed-responsive-4by3{padding-bottom:75%}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.05);box-shadow:inset 0 1px 1px rgba(0,0,0,0.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,0.15)}.well-lg{padding:24px;border-radius:6px}.well-sm{padding:9px;border-radius:3px}.close{float:right;font-size:21px;font-weight:bold;line-height:1;color:#000;text-shadow:0 1px 0 #fff;opacity:.2;filter:alpha(opacity=20)}.close:hover,.close:focus{color:#000;text-decoration:none;cursor:pointer;opacity:.5;filter:alpha(opacity=50)}button.close{padding:0;cursor:pointer;background:transparent;border:0;-webkit-appearance:none}.modal-open{overflow:hidden}.modal{display:none;overflow:hidden;position:fixed;top:0;right:0;bottom:0;left:0;z-index:1050;-webkit-overflow-scrolling:touch;outline:0}.modal.fade .modal-dialog{-webkit-transform:translate(0, -25%);-ms-transform:translate(0, -25%);-o-transform:translate(0, -25%);transform:translate(0, -25%);-webkit-transition:-webkit-transform 0.3s ease-out;-moz-transition:-moz-transform 0.3s ease-out;-o-transition:-o-transform 0.3s ease-out;transition:transform 0.3s ease-out}.modal.in .modal-dialog{-webkit-transform:translate(0, 0);-ms-transform:translate(0, 0);-o-transform:translate(0, 0);transform:translate(0, 0)}.modal-open .modal{overflow-x:hidden;overflow-y:auto}.modal-dialog{position:relative;width:auto;margin:10px}.modal-content{position:relative;background-color:#fff;border:1px solid #999;border:1px solid rgba(0,0,0,0.2);border-radius:6px;-webkit-box-shadow:0 3px 9px rgba(0,0,0,0.5);box-shadow:0 3px 9px rgba(0,0,0,0.5);background-clip:padding-box;outline:0}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000}.modal-backdrop.fade{opacity:0;filter:alpha(opacity=0)}.modal-backdrop.in{opacity:.5;filter:alpha(opacity=50)}.modal-header{padding:15px;border-bottom:1px solid #e5e5e5}.modal-header .close{margin-top:-2px}.modal-title{margin:0;line-height:1.42857143}.modal-body{position:relative;padding:15px}.modal-footer{padding:15px;text-align:right;border-top:1px solid #e5e5e5}.modal-footer .btn+.btn{margin-left:5px;margin-bottom:0}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}.modal-scrollbar-measure{position:absolute;top:-9999px;width:50px;height:50px;overflow:scroll}@media (min-width:768px){.modal-dialog{width:600px;margin:30px auto}.modal-content{-webkit-box-shadow:0 5px 15px rgba(0,0,0,0.5);box-shadow:0 5px 15px rgba(0,0,0,0.5)}.modal-sm{width:300px}}@media (min-width:992px){.modal-lg{width:900px}}.tooltip{position:absolute;z-index:1070;display:block;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-style:normal;font-weight:normal;letter-spacing:normal;line-break:auto;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;white-space:normal;word-break:normal;word-spacing:normal;word-wrap:normal;font-size:12px;opacity:0;filter:alpha(opacity=0)}.tooltip.in{opacity:.9;filter:alpha(opacity=90)}.tooltip.top{margin-top:-3px;padding:5px 0}.tooltip.right{margin-left:3px;padding:0 5px}.tooltip.bottom{margin-top:3px;padding:5px 0}.tooltip.left{margin-left:-3px;padding:0 5px}.tooltip-inner{max-width:200px;padding:3px 8px;color:#fff;text-align:center;background-color:#000;border-radius:4px}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-left .tooltip-arrow{bottom:0;right:5px;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.top-right .tooltip-arrow{bottom:0;left:5px;margin-bottom:-5px;border-width:5px 5px 0;border-top-color:#000}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-width:5px 5px 5px 0;border-right-color:#000}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-width:5px 0 5px 5px;border-left-color:#000}.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-left .tooltip-arrow{top:0;right:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.tooltip.bottom-right .tooltip-arrow{top:0;left:5px;margin-top:-5px;border-width:0 5px 5px;border-bottom-color:#000}.popover{position:absolute;top:0;left:0;z-index:1060;display:none;max-width:276px;padding:1px;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-style:normal;font-weight:normal;letter-spacing:normal;line-break:auto;line-height:1.42857143;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;white-space:normal;word-break:normal;word-spacing:normal;word-wrap:normal;font-size:14px;background-color:#fff;background-clip:padding-box;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,0.2);box-shadow:0 5px 10px rgba(0,0,0,0.2)}.popover.top{margin-top:-10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-left:-10px}.popover-title{margin:0;padding:8px 14px;font-size:14px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;border-radius:5px 5px 0 0}.popover-content{padding:9px 14px}.popover>.arrow,.popover>.arrow:after{position:absolute;display:block;width:0;height:0;border-color:transparent;border-style:solid}.popover>.arrow{border-width:11px}.popover>.arrow:after{border-width:10px;content:""}.popover.top>.arrow{left:50%;margin-left:-11px;border-bottom-width:0;border-top-color:#999;border-top-color:rgba(0,0,0,0.25);bottom:-11px}.popover.top>.arrow:after{content:" ";bottom:1px;margin-left:-10px;border-bottom-width:0;border-top-color:#fff}.popover.right>.arrow{top:50%;left:-11px;margin-top:-11px;border-left-width:0;border-right-color:#999;border-right-color:rgba(0,0,0,0.25)}.popover.right>.arrow:after{content:" ";left:1px;bottom:-10px;border-left-width:0;border-right-color:#fff}.popover.bottom>.arrow{left:50%;margin-left:-11px;border-top-width:0;border-bottom-color:#999;border-bottom-color:rgba(0,0,0,0.25);top:-11px}.popover.bottom>.arrow:after{content:" ";top:1px;margin-left:-10px;border-top-width:0;border-bottom-color:#fff}.popover.left>.arrow{top:50%;right:-11px;margin-top:-11px;border-right-width:0;border-left-color:#999;border-left-color:rgba(0,0,0,0.25)}.popover.left>.arrow:after{content:" ";right:1px;border-right-width:0;border-left-color:#fff;bottom:-10px}.carousel{position:relative}.carousel-inner{position:relative;overflow:hidden;width:100%}.carousel-inner>.item{display:none;position:relative;-webkit-transition:.6s ease-in-out left;-o-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel-inner>.item>img,.carousel-inner>.item>a>img{line-height:1}@media all and (transform-3d),(-webkit-transform-3d){.carousel-inner>.item{-webkit-transition:-webkit-transform 0.6s ease-in-out;-moz-transition:-moz-transform 0.6s ease-in-out;-o-transition:-o-transform 0.6s ease-in-out;transition:transform 0.6s ease-in-out;-webkit-backface-visibility:hidden;-moz-backface-visibility:hidden;backface-visibility:hidden;-webkit-perspective:1000px;-moz-perspective:1000px;perspective:1000px}.carousel-inner>.item.next,.carousel-inner>.item.active.right{-webkit-transform:translate3d(100%, 0, 0);transform:translate3d(100%, 0, 0);left:0}.carousel-inner>.item.prev,.carousel-inner>.item.active.left{-webkit-transform:translate3d(-100%, 0, 0);transform:translate3d(-100%, 0, 0);left:0}.carousel-inner>.item.next.left,.carousel-inner>.item.prev.right,.carousel-inner>.item.active{-webkit-transform:translate3d(0, 0, 0);transform:translate3d(0, 0, 0);left:0}}.carousel-inner>.active,.carousel-inner>.next,.carousel-inner>.prev{display:block}.carousel-inner>.active{left:0}.carousel-inner>.next,.carousel-inner>.prev{position:absolute;top:0;width:100%}.carousel-inner>.next{left:100%}.carousel-inner>.prev{left:-100%}.carousel-inner>.next.left,.carousel-inner>.prev.right{left:0}.carousel-inner>.active.left{left:-100%}.carousel-inner>.active.right{left:100%}.carousel-control{position:absolute;top:0;left:0;bottom:0;width:15%;opacity:.5;filter:alpha(opacity=50);font-size:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,0.6);background-color:rgba(0,0,0,0)}.carousel-control.left{background-image:-webkit-linear-gradient(left, rgba(0,0,0,0.5) 0, rgba(0,0,0,0.0001) 100%);background-image:-o-linear-gradient(left, rgba(0,0,0,0.5) 0, rgba(0,0,0,0.0001) 100%);background-image:linear-gradient(to right, rgba(0,0,0,0.5) 0, rgba(0,0,0,0.0001) 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#80000000', endColorstr='#00000000', GradientType=1)}.carousel-control.right{left:auto;right:0;background-image:-webkit-linear-gradient(left, rgba(0,0,0,0.0001) 0, rgba(0,0,0,0.5) 100%);background-image:-o-linear-gradient(left, rgba(0,0,0,0.0001) 0, rgba(0,0,0,0.5) 100%);background-image:linear-gradient(to right, rgba(0,0,0,0.0001) 0, rgba(0,0,0,0.5) 100%);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#00000000', endColorstr='#80000000', GradientType=1)}.carousel-control:hover,.carousel-control:focus{outline:0;color:#fff;text-decoration:none;opacity:.9;filter:alpha(opacity=90)}.carousel-control .icon-prev,.carousel-control .icon-next,.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right{position:absolute;top:50%;margin-top:-10px;z-index:5;display:inline-block}.carousel-control .icon-prev,.carousel-control .glyphicon-chevron-left{left:50%;margin-left:-10px}.carousel-control .icon-next,.carousel-control .glyphicon-chevron-right{right:50%;margin-right:-10px}.carousel-control .icon-prev,.carousel-control .icon-next{width:20px;height:20px;line-height:1;font-family:serif}.carousel-control .icon-prev:before{content:'\2039'}.carousel-control .icon-next:before{content:'\203a'}.carousel-indicators{position:absolute;bottom:10px;left:50%;z-index:15;width:60%;margin-left:-30%;padding-left:0;list-style:none;text-align:center}.carousel-indicators li{display:inline-block;width:10px;height:10px;margin:1px;text-indent:-999px;border:1px solid #fff;border-radius:10px;cursor:pointer;background-color:#000 \9;background-color:rgba(0,0,0,0)}.carousel-indicators .active{margin:0;width:12px;height:12px;background-color:#fff}.carousel-caption{position:absolute;left:15%;right:15%;bottom:20px;z-index:10;padding-top:20px;padding-bottom:20px;color:#fff;text-align:center;text-shadow:0 1px 2px rgba(0,0,0,0.6)}.carousel-caption .btn{text-shadow:none}@media screen and (min-width:768px){.carousel-control .glyphicon-chevron-left,.carousel-control .glyphicon-chevron-right,.carousel-control .icon-prev,.carousel-control .icon-next{width:30px;height:30px;margin-top:-10px;font-size:30px}.carousel-control .glyphicon-chevron-left,.carousel-control .icon-prev{margin-left:-10px}.carousel-control .glyphicon-chevron-right,.carousel-control .icon-next{margin-right:-10px}.carousel-caption{left:20%;right:20%;padding-bottom:30px}.carousel-indicators{bottom:20px}}.clearfix:before,.clearfix:after,.dl-horizontal dd:before,.dl-horizontal dd:after,.container:before,.container:after,.container-fluid:before,.container-fluid:after,.row:before,.row:after,.form-horizontal .form-group:before,.form-horizontal .form-group:after,.btn-toolbar:before,.btn-toolbar:after,.btn-group-vertical>.btn-group:before,.btn-group-vertical>.btn-group:after,.nav:before,.nav:after,.navbar:before,.navbar:after,.navbar-header:before,.navbar-header:after,.navbar-collapse:before,.navbar-collapse:after,.pager:before,.pager:after,.panel-body:before,.panel-body:after,.modal-header:before,.modal-header:after,.modal-footer:before,.modal-footer:after{content:" ";display:table}.clearfix:after,.dl-horizontal dd:after,.container:after,.container-fluid:after,.row:after,.form-horizontal .form-group:after,.btn-toolbar:after,.btn-group-vertical>.btn-group:after,.nav:after,.navbar:after,.navbar-header:after,.navbar-collapse:after,.pager:after,.panel-body:after,.modal-header:after,.modal-footer:after{clear:both}.center-block{display:block;margin-left:auto;margin-right:auto}.pull-right{float:right !important}.pull-left{float:left !important}.hide{display:none !important}.show{display:block !important}.invisible{visibility:hidden}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.hidden{display:none !important}.affix{position:fixed}@-ms-viewport{width:device-width}.visible-xs,.visible-sm,.visible-md,.visible-lg{display:none !important}.visible-xs-block,.visible-xs-inline,.visible-xs-inline-block,.visible-sm-block,.visible-sm-inline,.visible-sm-inline-block,.visible-md-block,.visible-md-inline,.visible-md-inline-block,.visible-lg-block,.visible-lg-inline,.visible-lg-inline-block{display:none !important}@media (max-width:767px){.visible-xs{display:block !important}table.visible-xs{display:table !important}tr.visible-xs{display:table-row !important}th.visible-xs,td.visible-xs{display:table-cell !important}}@media (max-width:767px){.visible-xs-block{display:block !important}}@media (max-width:767px){.visible-xs-inline{display:inline !important}}@media (max-width:767px){.visible-xs-inline-block{display:inline-block !important}}@media (min-width:768px) and (max-width:991px){.visible-sm{display:block !important}table.visible-sm{display:table !important}tr.visible-sm{display:table-row !important}th.visible-sm,td.visible-sm{display:table-cell !important}}@media (min-width:768px) and (max-width:991px){.visible-sm-block{display:block !important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline{display:inline !important}}@media (min-width:768px) and (max-width:991px){.visible-sm-inline-block{display:inline-block !important}}@media (min-width:992px) and (max-width:1199px){.visible-md{display:block !important}table.visible-md{display:table !important}tr.visible-md{display:table-row !important}th.visible-md,td.visible-md{display:table-cell !important}}@media (min-width:992px) and (max-width:1199px){.visible-md-block{display:block !important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline{display:inline !important}}@media (min-width:992px) and (max-width:1199px){.visible-md-inline-block{display:inline-block !important}}@media (min-width:1200px){.visible-lg{display:block !important}table.visible-lg{display:table !important}tr.visible-lg{display:table-row !important}th.visible-lg,td.visible-lg{display:table-cell !important}}@media (min-width:1200px){.visible-lg-block{display:block !important}}@media (min-width:1200px){.visible-lg-inline{display:inline !important}}@media (min-width:1200px){.visible-lg-inline-block{display:inline-block !important}}@media (max-width:767px){.hidden-xs{display:none !important}}@media (min-width:768px) and (max-width:991px){.hidden-sm{display:none !important}}@media (min-width:992px) and (max-width:1199px){.hidden-md{display:none !important}}@media (min-width:1200px){.hidden-lg{display:none !important}}.visible-print{display:none !important}@media print{.visible-print{display:block !important}table.visible-print{display:table !important}tr.visible-print{display:table-row !important}th.visible-print,td.visible-print{display:table-cell !important}}.visible-print-block{display:none !important}@media print{.visible-print-block{display:block !important}}.visible-print-inline{display:none !important}@media print{.visible-print-inline{display:inline !important}}.visible-print-inline-block{display:none !important}@media print{.visible-print-inline-block{display:inline-block !important}}@media print{.hidden-print{display:none !important}} \ No newline at end of file diff --git a/teaching.html b/teaching.html new file mode 100644 index 0000000..3f29638 --- /dev/null +++ b/teaching.html @@ -0,0 +1,172 @@ + + + + + + + + + + Teaching and Service | Angel Xuan Chang + + + + + + + + + + + + + +
+
+ + +

Angel Xuan Chang

+ +
+
+

+ I am an Associate Professor at Simon Fraser University. + Prior to this, I was a visiting research scientist at Facebook AI Research and a research scientist at Eloquent Labs working on dialogue. I received my Ph.D. in Computer Science from Stanford, where I was part of the Natural Language Processing Group and advised by Chris Manning. + My research focuses on connecting language to 3D representations of shapes and scenes and grounding of language for embodied agents in indoor environments. I have worked on methods for synthesizing 3D scenes and shapes from natural language, and various datasets for 3D scene understanding. In general, I am interested in the semantics of shapes and scenes, the representation and acquisition of common sense knowledge, and reasoning using probabilistic models. + Some of my other interests include drawing and dance. +

+

+ +

+
+
+ Angel Xuan Chang +
+ angelx-{at}-sfu-[dot]-ca +

+ Associate Professor
+ School of Computing Science
+ Simon Fraser University
+ 3DLG + | GrUVi + | SFU NatLang
+ SFU AI/ML + | VINCI
+ Canada CIFAR AI Chair (Amii)
+ TUM-IAS Hans Fischer Fellow (2018-2022)
+ Google Scholar +
+
+
+ + + +
+
+ +
+
+

Classes

+
+
+ Spring 2025 - CMPT 413/713 Natural Language Processing
+ Spring 2025 - CMPT 839 Advanced NLP
+ Spring 2024 - CMPT 413/713 Natural Language Processing
+ Spring 2023 - CMPT 983 Grounded Natural Language Understanding
+ Spring 2023 - CMPT 713 Natural Language Processing
+ Fall 2022 - CMPT 413/713 Natural Language Processing
+ Spring 2022 - CMPT 983 Grounded Natural Language Understanding
+ Fall 2021 - CMPT 413/713 Natural Language Processing
+ Spring 2021 - CMPT 983 Grounded Natural Language Understanding
+ Fall 2020 - CMPT 413/825 Natural Language Processing
+ Spring 2020 - CMPT 825 Natural Language Processing +
+
+ +
+
+

Service

+
+
+

Conference and workshop organization

+ Conferences + + Workshops + +

Area chair / senior program committee

+
    +
  • ICLR (2020-2021,2023-2024), NeurIPS (2023-2024), NeurIPS datasets and benchmarks (2023-2024)
  • +
  • CVPR (2020-2021,2023-2024), ICCV (2023), 3DV (2024)
  • +
  • SIGGRAPH Asia (2020)
  • +
  • IJCAI (2021), AAAI (2021,2023)
  • +
+

Journal / rolling reviews editor

+
    +
  • ARR action editor
  • +
  • CGF associate editor (2021-2024)
  • +
+

Reviewer

+
    +
  • ACL (outstanding reviewer 2018), NACCL, EMNLP, EACL, Coling, AKBC, SLSP
  • +
  • AAAI, ICML (top reviewer 2020,2022), NeurIPS (top reviewer 2020), TMLR
  • +
  • SIGGRAPH, SIGGRAPH Asia, Eurographics, 3DV, UIST, GI, CRV
  • +
+
+
+ +
+
+
+
+

+ Last updated at 2025-01-04T18:18:50.725Z +

+
+
+ + diff --git a/topics.json b/topics.json new file mode 100644 index 0000000..ac7ec89 --- /dev/null +++ b/topics.json @@ -0,0 +1,44 @@ +[ + { + "name": "Grounding language to 3D", + "description": "I'm interested in how can we create AI systems for describing and localizing 3D objects in scenes.", + "tagsets": [["vl", "3d"]], + "img": "" + }, + { + "name": "Language based content creation", + "description": "I'm also interested in using language to assist with the creation of 3D content. 3D interfaces can be challenging to use and natural language interfaces can make it easier for anyone to create 3D objects and generate 3D scenes.", + "tagsets": [["text2shape"], ["text2scene"]], + "img": "" + }, + { + "name": "Embodied AI", + "description": "In embodied AI, we want to study how an artificial agent can learn to act based on sensory perception.", + "tagsets": [["embodied", "!simulator", "!dataset"]], + "img": "" + }, + { + "name": "Simulation platforms", + "description": "To foster research in the area of embodied AI, we need to have easy-to-use simulation platforms.", + "tagsets": [["simulator"]], + "img": "" + }, + { + "name": "Articulated objects for interactive environments", + "description": "", + "tagsets": [["articulations", "!simulator"]], + "img": "" + }, + { + "name": "Large-scale datasets for 3D deep learning", + "description": "Another focus of my work is the creation of large-scale 3D datasets. With data, we can develop new models and tasks for 3D understanding and generation. These assets are also essential for training embodied AI agents to learn to move and interact in 3D environments.", + "tagsets": [["3d", "dataset"]], + "img": "" + }, + { + "name": "3D scene understanding and generation", + "description": "", + "tagsets": [["3d", "scene", "!dataset"]], + "img": "" + } +]