{"data":{"featured":{"edges":[{"node":{"frontmatter":{"title":"VQA in Pathology","cover":{"childImageSharp":{"gatsbyImageData":{"layout":"constrained","placeholder":{"fallback":"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABQAAAAPCAYAAADkmO9VAAAACXBIWXMAABYlAAAWJQFJUiTwAAAEJUlEQVQ4yyWRbVDTBRzH/73orvP0urLe1HV2athpoRd6h4Et9VBL7EVpdYlyAqbHAHkYJhtoJj4wAQczJ0+D5RNsMB2wB/fA2BibYIGgNmCDDZXKTtFKLcX8dKwXn/u++H3u++L7Ezo6nYjFYk59fwL5gYOIszLJztlFTk4O9aoqVAoFUlkBB+QlZOZLOFxUREPxd5gq5RTIpOyTySgslJGalYW+1YBQUVHBqoULKfnycyRrE4meG4UgCMS+8y7b13/G8cJDZKakMHvOG8S/+goF8+exN2oeLRtikMUsQBI1j6Kli5n1/HOk7spCMOv1HEvZTLM0D9XOLAqTdxATvZidm7Yiz/8WfXUjNu0FVsXFIv8ogcPxcSjj42hP2oArOZH6tSJ0W75AvGwJuVIpQn2dmtyN63AqCmgsykYuTiMhbjm7tyVTszeXqv15NJ4sJ255LN+I4ildvRJlXCzWLevp2JrI6U8TKP9gBdvefos8mRRBd+409dIkLtXuobu2gPNlOSRtWIOmKIMzJTnoFBIaymRExbyHaO5cZMuWUit6n9bElVxIFFG3egUVqz7k9dkvkz1dWKmsZGvCMqyKbAyl2Xz1sYiZM2eQ/skaysRJ7NuxCdHypby5aCHCjBeY9dKLrJgzh43RS1i3KJo18xew4PXXIrtPP02wO+ykpaWhrVOhPHqEnenppIvT2ZOfx/HSEnZL8pDkSyjcv5/tGRl8nZVJam4+4kOVbCtWklx4mNTMDDanpnBWp0OYmJjAaDRhaDNiNFmwWCzYbDYuGFoxWS7SbjTh83rp7e2lrbUNk9GEqb0dp92Gt8uNx+3C5ezE4/EQDocRQqExzp75AX1LE826c2ibzmI0GrBZTTjsFizmNjq7OvH5PFjM7disZqxWEy6XA5+vC2+3G5/XjdvVQTAQRAjfvIP2vAuzrQ+zrR+z/QrdveN4esN4esJ09YQZ8P1MV7cfk60Ph9uP0zOC+1IIl3cUl28Mly9Ep8/P2M0bCIPBCY6oWzmhc6JqcUXQWPrRWPoiqM19mJodNBjcHFEbUDbZOXm+C83FfhosfRE0FwdQG51cvtaDEPz1CtVtB2h0VaDzKCPYhzQ4hjWRtA030NejwXy5lrr2YrTuYxj7q3AMqbH71ZF0BDQ4Lh/nxnUnwoPbP3LNVkjIK+dWz1EeB1QQruHpWBXPQtU8CZ7gYU8Vvw/U4O8o5lZvOfcGlTBex7PQtFfNVLCGPwMKJu/aEP4e/YnRcwcZ1ysYay5jpElOqLmMXwyVhFrKeWA9yZTrDJPOUwSajhJuURDUljGiLSWsV3DHrOKJS8Mjq4p//G6Eqft3mBy6wh/B69wPXGNyeJB7I1f5a/Q69wJXeTw+xL+/hXh0M8jdoYH/nZFB7g4PRO4PQ36e3gowNe1N3uY/a/ZDVTENq0sAAAAASUVORK5CYII="},"images":{"fallback":{"src":"/static/bd9ec1e23e2909dbce4863b0793dd082/16bc0/VQA.png","srcSet":"/static/bd9ec1e23e2909dbce4863b0793dd082/d8ee9/VQA.png 175w,\n/static/bd9ec1e23e2909dbce4863b0793dd082/5c755/VQA.png 350w,\n/static/bd9ec1e23e2909dbce4863b0793dd082/16bc0/VQA.png 700w","sizes":"(min-width: 700px) 700px, 100vw"},"sources":[{"srcSet":"/static/bd9ec1e23e2909dbce4863b0793dd082/ea096/VQA.avif 175w,\n/static/bd9ec1e23e2909dbce4863b0793dd082/758bc/VQA.avif 350w,\n/static/bd9ec1e23e2909dbce4863b0793dd082/23e81/VQA.avif 700w","type":"image/avif","sizes":"(min-width: 700px) 700px, 100vw"},{"srcSet":"/static/bd9ec1e23e2909dbce4863b0793dd082/93fe5/VQA.webp 175w,\n/static/bd9ec1e23e2909dbce4863b0793dd082/a9c60/VQA.webp 350w,\n/static/bd9ec1e23e2909dbce4863b0793dd082/e1299/VQA.webp 700w","type":"image/webp","sizes":"(min-width: 700px) 700px, 100vw"}]},"width":700,"height":525}}},"tech":["Pytorch","Pathology Dataset","Python"],"github":"","external":"https://paperswithcode.com/paper/towards-visual-question-answering-on","cta":null},"html":"<p>In this paper, we aim to develop a pathological visual question answering framework to analyze pathology images and answer medical questions related to these images.\nTo build such a framework, we create PathVQA, a VQA dataset with 32,795 questions asked from 4,998 pathology images</p>"}},{"node":{"frontmatter":{"title":"Code Graph","cover":{"childImageSharp":{"gatsbyImageData":{"layout":"constrained","placeholder":{"fallback":"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABQAAAALCAYAAAB/Ca1DAAAACXBIWXMAABYlAAAWJQFJUiTwAAAByElEQVQoz22STW8TMRCG9///EA4cEFy4IKpeEAWUCEEJERQ1KmnaJEt2s7bXH2Pvg+wlHwJGemXLmnnm9djVMAwcIu//VkrpqBACfd/je4vUj2WfZYyhaZqyVueg8+IY41EZlOWcR5mefrvATF6hzQh0zo2NvD8BM+Rfh4khpZwwOvQaGxN68w69fU9IYx6cav4LzAlHSIwYEXa6pV1d4iTgXUN7+wKlt1gZmx4Y1bmjQ6QBJEWcCCZ4THCEAfR2yn7xHJ8G9PqK5vsTbL8lFlPp5DBHjIKN0NXXdHcvi5O9UnRd96f7gBdP7w1BBBcskYT3CjW/IMWYL0VlVq9xfV0c2AFU39I1K9r9HqV1KY5ZMSJZ40AIgK0faS+e4duH4y+pnL5HOYM4Rbj/hI8RF4QkoUDKbEvyCNGLr9jFHA940+HXS4azb1elfN3cMYFq1kiU4iKflznGAfNwg2prWolsNxvqzTi3Q15orhlSLNAq05MYbD3FqruS6FYL9Oe35VG0bdjNnvJreUWfH6mZodsZbvWF0CzLAEL3ozxKARaronHrN4R2Xg7l5w1ucjn+nrDH7z4g3bdS7Ospvp7glx+R+pbzyFf+DZCEVeLnKYQrAAAAAElFTkSuQmCC"},"images":{"fallback":{"src":"/static/f0d3a5d0c54fd0a689eeca8a7761f3c4/10252/codegraph.png","srcSet":"/static/f0d3a5d0c54fd0a689eeca8a7761f3c4/68202/codegraph.png 175w,\n/static/f0d3a5d0c54fd0a689eeca8a7761f3c4/fe25d/codegraph.png 350w,\n/static/f0d3a5d0c54fd0a689eeca8a7761f3c4/10252/codegraph.png 700w,\n/static/f0d3a5d0c54fd0a689eeca8a7761f3c4/bdf03/codegraph.png 1400w","sizes":"(min-width: 700px) 700px, 100vw"},"sources":[{"srcSet":"/static/f0d3a5d0c54fd0a689eeca8a7761f3c4/1934d/codegraph.avif 175w,\n/static/f0d3a5d0c54fd0a689eeca8a7761f3c4/8f8aa/codegraph.avif 350w,\n/static/f0d3a5d0c54fd0a689eeca8a7761f3c4/62eae/codegraph.avif 700w,\n/static/f0d3a5d0c54fd0a689eeca8a7761f3c4/1d19c/codegraph.avif 1400w","type":"image/avif","sizes":"(min-width: 700px) 700px, 100vw"},{"srcSet":"/static/f0d3a5d0c54fd0a689eeca8a7761f3c4/745fa/codegraph.webp 175w,\n/static/f0d3a5d0c54fd0a689eeca8a7761f3c4/b0f73/codegraph.webp 350w,\n/static/f0d3a5d0c54fd0a689eeca8a7761f3c4/ea0a8/codegraph.webp 700w,\n/static/f0d3a5d0c54fd0a689eeca8a7761f3c4/5eadd/codegraph.webp 1400w","type":"image/webp","sizes":"(min-width: 700px) 700px, 100vw"}]},"width":700,"height":399}}},"tech":["Graphviz","Networkx","Python","Tree sitter"],"github":"https://github.com/weiwenlan/PYSA","external":"https://weiwenlan.github.io/code-graph-test/","cta":""},"html":"<p>A toolkit to parse and draw a code graph. The inner structure of the project is represented by a graph.\nUse this tool to parse any code you want to learn and summary.</p>"}},{"node":{"frontmatter":{"title":"Classification with Robotic Arm","cover":{"childImageSharp":{"gatsbyImageData":{"layout":"constrained","placeholder":{"fallback":"data:image/jpeg;base64,/9j/2wBDABALDA4MChAODQ4SERATGCgaGBYWGDEjJR0oOjM9PDkzODdASFxOQERXRTc4UG1RV19iZ2hnPk1xeXBkeFxlZ2P/2wBDARESEhgVGC8aGi9jQjhCY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2NjY2P/wgARCAAMABQDASIAAhEBAxEB/8QAFgABAQEAAAAAAAAAAAAAAAAAAAID/8QAFQEBAQAAAAAAAAAAAAAAAAAAAAH/2gAMAwEAAhADEAAAAWbGyFD/xAAcEAABAwUAAAAAAAAAAAAAAAACABESAQMTISL/2gAIAQEAAQUClkukXRltonUHUF//xAAYEQACAwAAAAAAAAAAAAAAAAAAEQESIf/aAAgBAwEBPwFvJLH/xAAYEQACAwAAAAAAAAAAAAAAAAAAAQIRIf/aAAgBAgEBPwFqtFA//8QAHhAAAgEDBQAAAAAAAAAAAAAAAAExAgMhESIyUXH/2gAIAQEABj8C3cV0zFTfpi6x6VOSWSz/xAAaEAEBAQEBAQEAAAAAAAAAAAABEQAxIUFR/9oACAEBAAE/Ia7BEaXJZ8AK81OXPxywl98dcrdwTH//2gAMAwEAAgADAAAAENMf/8QAGBEAAwEBAAAAAAAAAAAAAAAAAAERIaH/2gAIAQMBAT8QSVoOXJ0//8QAFxEBAQEBAAAAAAAAAAAAAAAAAQARIf/aAAgBAgEBPxDgECbt/8QAHhABAAICAQUAAAAAAAAAAAAAAREhADFBUXGBodH/2gAIAQEAAT8QgBG5NW7DRrJlgqKJUl8xggjgIeGsJgilFr2wmdasl+sC3T1j5n//2Q=="},"images":{"fallback":{"src":"/static/efdf89a8cbb878ece642f070d72d099e/fc169/ic.jpg","srcSet":"/static/efdf89a8cbb878ece642f070d72d099e/63d33/ic.jpg 175w,\n/static/efdf89a8cbb878ece642f070d72d099e/d4efe/ic.jpg 350w,\n/static/efdf89a8cbb878ece642f070d72d099e/fc169/ic.jpg 700w,\n/static/efdf89a8cbb878ece642f070d72d099e/dc68b/ic.jpg 1400w","sizes":"(min-width: 700px) 700px, 100vw"},"sources":[{"srcSet":"/static/efdf89a8cbb878ece642f070d72d099e/becf4/ic.avif 175w,\n/static/efdf89a8cbb878ece642f070d72d099e/e54d8/ic.avif 350w,\n/static/efdf89a8cbb878ece642f070d72d099e/60e9e/ic.avif 700w,\n/static/efdf89a8cbb878ece642f070d72d099e/dbea9/ic.avif 1400w","type":"image/avif","sizes":"(min-width: 700px) 700px, 100vw"},{"srcSet":"/static/efdf89a8cbb878ece642f070d72d099e/d905c/ic.webp 175w,\n/static/efdf89a8cbb878ece642f070d72d099e/37212/ic.webp 350w,\n/static/efdf89a8cbb878ece642f070d72d099e/04f78/ic.webp 700w,\n/static/efdf89a8cbb878ece642f070d72d099e/7777e/ic.webp 1400w","type":"image/webp","sizes":"(min-width: 700px) 700px, 100vw"}]},"width":700,"height":409}}},"tech":["Robotic Arm","Arduino","Faster-RCNN","Calibration"],"github":"https://github.com/weiwenlan/Trash-classification-robotic-arm","external":"","cta":null},"html":"<p>Classification of trash with a 3D Camera and a Robotic Arm.\nUsing the Faster-RCNN model, the trash is classified and the arm is calibrated to move to the trash.</p>"}},{"node":{"frontmatter":{"title":"Lightweight Super Resolution","cover":{"childImageSharp":{"gatsbyImageData":{"layout":"constrained","placeholder":{"fallback":"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABQAAAALCAYAAAB/Ca1DAAAACXBIWXMAABYlAAAWJQFJUiTwAAACcUlEQVQoz2WSy2sUQRCH9z/x5sWbdy9eBP8OEU8eBEGUqBHRg4pvQRRFUfBBRFQ0Jj4iokajSVgTA9F9ZHezM7s72Z2d6cfMdM98Mru+raa6qovuH9X9dYEsI8tS0jxGEUYKjJRYpUhE+F+e70mtIctHlvGnpWlKIba/i1aECBEjw5CW4xBFKYHv4zouUWQJg4DA8xg08eNMcfYt42M3cKqlwbrw+s4ForBHsdTn1XSd5OsI7uwIX6setrSf1Y+jlGttzPJumsWTlL81mFtqMDZ/neVakanLozw7sYP7V04PBR9fOIDbqHBrosSbD1WCFxv48mQ71RUPf3I9C093Ui2vsDa+jsVne6lVelx68JKr00eZX5hj5uIeFk7t4MG1s0NBnViiOEZpDVlK1q6QrTbAbZHVlshaLnTa4Jah0QQhCUSfyGp63S53r13k6onDTD1/OhTMpyiKUEpircUkKdbkMcGYbABAypR3y4ssOm/JLAOIxqRonfwFJYdUyLvKPReVUqGk+OESIUJSI3lf7DEyNsp0+R42yhBS4PseKnSQfgfhNej6IcYYCmnGL2L88w1+Wj+G5U4F80dNOFN0Px+k9/AY7cnzvCx20UpRCFpztOYP03JdQhXhL76gPXGGtdlHtB8fx/v0iLXJswSzD/EmztGZG0fGKT3PYan0jq5bQYs+vX5IkiQUgspN6pNbqNYclAX38jbqx7fSvLmL2pHNNO+OsHJoE6u391Ddt5G1mftoy+BJPjdm6AZ9+kFIp9MmjmMKUgRorTDGDuEEXZQM0UoMo1Yo0UdrjQp9dJwMciklnXZncLNmc5V6vTEQ/A55vyNNiVJkzwAAAABJRU5ErkJggg=="},"images":{"fallback":{"src":"/static/bda85394777dd03cf5bd0e4a9f4dfb7d/70e91/net.png","srcSet":"/static/bda85394777dd03cf5bd0e4a9f4dfb7d/a5943/net.png 175w,\n/static/bda85394777dd03cf5bd0e4a9f4dfb7d/692d7/net.png 350w,\n/static/bda85394777dd03cf5bd0e4a9f4dfb7d/70e91/net.png 700w,\n/static/bda85394777dd03cf5bd0e4a9f4dfb7d/1e881/net.png 1400w","sizes":"(min-width: 700px) 700px, 100vw"},"sources":[{"srcSet":"/static/bda85394777dd03cf5bd0e4a9f4dfb7d/09a8e/net.avif 175w,\n/static/bda85394777dd03cf5bd0e4a9f4dfb7d/4f9e8/net.avif 350w,\n/static/bda85394777dd03cf5bd0e4a9f4dfb7d/3dd9f/net.avif 700w,\n/static/bda85394777dd03cf5bd0e4a9f4dfb7d/273c0/net.avif 1400w","type":"image/avif","sizes":"(min-width: 700px) 700px, 100vw"},{"srcSet":"/static/bda85394777dd03cf5bd0e4a9f4dfb7d/60cd3/net.webp 175w,\n/static/bda85394777dd03cf5bd0e4a9f4dfb7d/8f9e7/net.webp 350w,\n/static/bda85394777dd03cf5bd0e4a9f4dfb7d/4c894/net.webp 700w,\n/static/bda85394777dd03cf5bd0e4a9f4dfb7d/fb4a5/net.webp 1400w","type":"image/webp","sizes":"(min-width: 700px) 700px, 100vw"}]},"width":700,"height":379}}},"tech":["Pytorch","Android","Lightweight Network","NCNN"],"github":"https://github.com/weiwenlan/Mobile-Lightweight-Super-Resolution-Construction-System","external":"https://ieeexplore.ieee.org/abstract/document/9045996","cta":null},"html":"<p>Use NCNN to implement a lightweight super resolution system on the Android platform.\nSuper resolution network is implemented by lightweight and modified versions of the <a href=\"https://arxiv.org/abs/1905.02244\" target=\"_blank\" rel=\"nofollow noopener noreferrer\">MobileNetV3</a> models.</p>"}}]}}}