<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
    <channel>
        <title>Embodied AI on Producthunt daily</title>
        <link>https://producthunt.programnotes.cn/en/tags/embodied-ai/</link>
        <description>Recent content in Embodied AI on Producthunt daily</description>
        <generator>Hugo -- gohugo.io</generator>
        <language>en</language>
        <lastBuildDate>Wed, 10 Sep 2025 15:27:30 +0800</lastBuildDate><atom:link href="https://producthunt.programnotes.cn/en/tags/embodied-ai/index.xml" rel="self" type="application/rss+xml" /><item>
        <title>XLeRobot</title>
        <link>https://producthunt.programnotes.cn/en/p/xlerobot/</link>
        <pubDate>Wed, 10 Sep 2025 15:27:30 +0800</pubDate>
        
        <guid>https://producthunt.programnotes.cn/en/p/xlerobot/</guid>
        <description>&lt;img src="https://images.unsplash.com/photo-1721030019872-8fc1912887a1?ixid=M3w0NjAwMjJ8MHwxfHJhbmRvbXx8fHx8fHx8fDE3NTc0ODkyMzB8&amp;ixlib=rb-4.1.0" alt="Featured image of post XLeRobot" /&gt;&lt;h1 id=&#34;vector-wangelxlerobot&#34;&gt;&lt;a class=&#34;link&#34; href=&#34;https://github.com/Vector-Wangel/XLeRobot&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Vector-Wangel/XLeRobot&lt;/a&gt;
&lt;/h1&gt;&lt;h1 id=&#34;xlerobot-&#34;&gt;&lt;a class=&#34;link&#34; href=&#34;https://xlerobot.readthedocs.io/en/latest/index.html&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;XLeRobot 🤖&lt;/a&gt;
&lt;/h1&gt;&lt;p&gt;&lt;a class=&#34;link&#34; href=&#34;README.md&#34; &gt;&lt;img src=&#34;https://img.shields.io/badge/lang-en-blue.svg&#34;
	
	
	
	loading=&#34;lazy&#34;
	
		alt=&#34;en&#34;
	
	
&gt;&lt;/a&gt;
&lt;a class=&#34;link&#34; href=&#34;README_CN.md&#34; &gt;&lt;img src=&#34;https://img.shields.io/badge/lang-%e4%b8%ad%e6%96%87-brown.svg&#34;
	
	
	
	loading=&#34;lazy&#34;
	
		alt=&#34;中文&#34;
	
	
&gt;&lt;/a&gt;&lt;/p&gt;
&lt;a href=&#34;https://xlerobot.readthedocs.io/en/latest/index.html&#34;&gt;
  &lt;img width=&#34;1725&#34; height=&#34;1140&#34; alt=&#34;front&#34; src=&#34;https://github.com/user-attachments/assets/f9c454ee-2c46-42b4-a5d7-88834a1c95ab&#34; /&gt;
&lt;/a&gt;
&lt;h2 id=&#34;discord&#34;&gt;&lt;a class=&#34;link&#34; href=&#34;https://opensource.org/licenses/Apache-2.0&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;&lt;img src=&#34;https://img.shields.io/badge/License-Apache%202.0-blue.svg&#34;
	
	
	
	loading=&#34;lazy&#34;
	
		alt=&#34;Apache License&#34;
	
	
&gt;&lt;/a&gt;
&lt;a class=&#34;link&#34; href=&#34;https://twitter.com/VectorWang2&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;&lt;img src=&#34;https://img.shields.io/twitter/follow/VectorWang?style=social&#34;
	
	
	
	loading=&#34;lazy&#34;
	
		alt=&#34;Twitter/X&#34;
	
	
&gt;&lt;/a&gt;
&lt;a class=&#34;link&#34; href=&#34;https://xlerobot.readthedocs.io/en/latest/&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;&lt;img src=&#34;https://img.shields.io/badge/docs-passing-brightgreen.svg&#34;
	
	
	
	loading=&#34;lazy&#34;
	
		alt=&#34;Docs status&#34;
	
	
&gt;&lt;/a&gt;
&lt;a class=&#34;link&#34; href=&#34;https://discord.gg/bjZveEUh6F&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;&lt;img src=&#34;https://img.shields.io/badge/Discord-XLeRobot-7289da?style=flat&amp;amp;logo=discord&amp;amp;logoColor=white&#34;
	
	
	
	loading=&#34;lazy&#34;
	
		alt=&#34;Discord&#34;
	
	
&gt;&lt;/a&gt;
&lt;/h2&gt;&lt;p&gt;&lt;strong&gt;🚀 Bringing Embodied AI to Everyone - Cheaper Than an iPhone! 📱&lt;/strong&gt;&lt;br&gt;
&lt;strong&gt;💵 Starts from $660 cost and ⏰ &amp;lt;4hrs total assembly time!!&lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;&lt;em&gt;Built upon the giants: &lt;a class=&#34;link&#34; href=&#34;https://github.com/huggingface/lerobot&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;LeRobot&lt;/a&gt;, &lt;a class=&#34;link&#34; href=&#34;https://github.com/TheRobotStudio/SO-ARM100&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;SO-100/SO-101&lt;/a&gt;, &lt;a class=&#34;link&#34; href=&#34;https://github.com/SIGRobotics-UIUC/LeKiwi&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Lekiwi&lt;/a&gt;, &lt;a class=&#34;link&#34; href=&#34;https://github.com/timqian/bambot&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Bambot&lt;/a&gt;&lt;/em&gt;&lt;/p&gt;
&lt;table&gt;
  &lt;tr&gt;
    &lt;td&gt;&lt;img src=&#34;https://github.com/user-attachments/assets/17e31979-bd5e-4790-be70-566ea8bb181e&#34; width=&#34;250&#34;/&gt;&lt;/td&gt;
    &lt;td&gt;&lt;img src=&#34;https://github.com/user-attachments/assets/96ff4a3e-3402-47a2-bc6b-b45137ee3fdd&#34; width=&#34;250&#34;/&gt;&lt;/td&gt;
    &lt;td&gt;&lt;img src=&#34;https://github.com/user-attachments/assets/f6d52acc-bc8d-46f6-b3cd-8821f0306a7f&#34; width=&#34;250&#34;/&gt;&lt;/td&gt;
  &lt;/tr&gt;
  &lt;tr&gt;
    &lt;td&gt;&lt;img src=&#34;https://github.com/user-attachments/assets/59086300-3e6f-4a3c-b5e0-db893eeabc0c&#34; width=&#34;250&#34;/&gt;&lt;/td&gt;
    &lt;td&gt;&lt;img src=&#34;https://github.com/user-attachments/assets/4ddbc0ff-ca42-4ad0-94c6-4e0f4047fd01&#34; width=&#34;250&#34;/&gt;&lt;/td&gt;
    &lt;td&gt;&lt;img src=&#34;https://github.com/user-attachments/assets/7abc890e-9c9c-4983-8b25-122573028de5&#34; width=&#34;250&#34;/&gt;&lt;/td&gt;
  &lt;/tr&gt;
  &lt;tr&gt;
    &lt;td&gt;&lt;img src=&#34;https://github.com/user-attachments/assets/e74a602b-0146-49c4-953d-3fa3b038a7f7&#34; width=&#34;250&#34;/&gt;&lt;/td&gt;
    &lt;td&gt;&lt;img src=&#34;https://github.com/user-attachments/assets/d8090b15-97f3-4abc-98c8-208ae79894d5&#34; width=&#34;250&#34;/&gt;&lt;/td&gt;
    &lt;td&gt;&lt;img src=&#34;https://github.com/user-attachments/assets/8b54adc3-d61b-42a0-8985-ea28f2e8f64c&#34; width=&#34;250&#34;/&gt;&lt;/td&gt;
  &lt;/tr&gt;
&lt;/table&gt;
&lt;hr&gt;
&lt;h1 id=&#34;-news&#34;&gt;📰 News
&lt;/h1&gt;&lt;ul&gt;
&lt;li&gt;
&lt;p&gt;2025-09-09: &lt;strong&gt;Developer Assembly kit ready for purchase&lt;/strong&gt; &lt;a class=&#34;link&#34; href=&#34;https://e.tb.cn/h.SZFbBgZABZ8zRPe?tk=ba514rTBRjQ&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;in China&lt;/a&gt;. World-wide purchase link out in 2 days.&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;Non-profit, only for more convenient accessiblity. I personally don&amp;rsquo;t earn any from this.&lt;/li&gt;
&lt;/ul&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;p&gt;2025-09-09: Joined &lt;a class=&#34;link&#34; href=&#34;https://www.seeedstudio.com/embodied-ai-worldwide-hackathon-home-robot.html&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Embodied AI Home Robot Hackathon&lt;/a&gt; (Oct 25–26, Bay Area) held by &lt;strong&gt;SEEED x Nvidia x Huggingface&lt;/strong&gt; as mentor! &lt;a class=&#34;link&#34; href=&#34;https://docs.google.com/forms/d/e/1FAIpQLSdYYDegdgIypxuGJNLcoc8kbdmU4jKgl49zg4X-107LAmBN4g/viewform&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Register HERE&lt;/a&gt;.&lt;/p&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;img width=&#34;2400&#34; height=&#34;1256&#34; alt=&#34;image&#34; src=&#34;https://github.com/user-attachments/assets/4132c23b-5c86-4bb9-94b4-a6b12059685b&#34; /&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;p&gt;2025-08-30: XLeRobot 0.3.0 Release with final outfit touch up and household chores showcase demos.&lt;/p&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;p&gt;2025-07-30: &lt;a class=&#34;link&#34; href=&#34;https://xlerobot.readthedocs.io/en/latest/software/index.html&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Control XLeRobot in real life&lt;/a&gt; with &lt;strong&gt;keyboard/Xbox controller/Switch joycon&lt;/strong&gt; in the wild anywhere. All bluetooth, no wifi needed and zero latency.&lt;/p&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;p&gt;&lt;img src=&#34;https://github.com/user-attachments/assets/de8f50ad-a370-406c-97fb-fc01638d5624&#34;
	
	
	
	loading=&#34;lazy&#34;
	
		alt=&#34;rea&#34;
	
	
&gt;&lt;/p&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;p&gt;2025-07-08: &lt;a class=&#34;link&#34; href=&#34;https://xlerobot.readthedocs.io/en/latest/simulation/index.html&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;&lt;strong&gt;Simulation&lt;/strong&gt;&lt;/a&gt; with updated urdfs, control scripts (support Quest3 VR, keyboard, Xbox controller, switch joycon), support for new hardware and cameras, RL environment. Get started in 15 min.&lt;/p&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;p&gt;&lt;img src=&#34;https://github.com/user-attachments/assets/68b77bea-fdcf-4f42-9cf0-efcf1b188358&#34;
	
	
	
	loading=&#34;lazy&#34;
	
		alt=&#34;vr&#34;
	
	
&gt;&lt;/p&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;p&gt;2025-07-01: &lt;a class=&#34;link&#34; href=&#34;https://xlerobot.readthedocs.io/en/latest/index.html&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;&lt;strong&gt;Documentation&lt;/strong&gt; website&lt;/a&gt; out for more orgainized tutorials, demos and resources.&lt;/p&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;p&gt;2025-06-13: &lt;a class=&#34;link&#34; href=&#34;https://xlerobot.readthedocs.io&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;&lt;strong&gt;XLeRobot 0.2.0&lt;/strong&gt;&lt;/a&gt; hardware setup, the 1st version fully capable for autonomous household tasks, starts from 660$.&lt;/p&gt;
&lt;/li&gt;
&lt;/ul&gt;
&lt;hr&gt;
&lt;h2 id=&#34;-total-cost-&#34;&gt;💵 Total Cost 💵
&lt;/h2&gt;&lt;blockquote&gt;
&lt;p&gt;[!NOTE]
Cost excludes 3D printing, tools, shipping, and taxes.&lt;/p&gt;
&lt;/blockquote&gt;
&lt;table&gt;
  &lt;thead&gt;
      &lt;tr&gt;
          &lt;th&gt;Price (Buy all the parts yourself)&lt;/th&gt;
          &lt;th&gt;US&lt;/th&gt;
          &lt;th&gt;EU&lt;/th&gt;
          &lt;th&gt;CN&lt;/th&gt;
      &lt;/tr&gt;
  &lt;/thead&gt;
  &lt;tbody&gt;
      &lt;tr&gt;
          &lt;td&gt;&lt;strong&gt;Basic&lt;/strong&gt; (use your laptop, single RGB head cam)&lt;/td&gt;
          &lt;td&gt;&lt;strong&gt;~$660&lt;/strong&gt;&lt;/td&gt;
          &lt;td&gt;&lt;strong&gt;~€680&lt;/strong&gt;&lt;/td&gt;
          &lt;td&gt;&lt;strong&gt;~¥3999&lt;/strong&gt;&lt;/td&gt;
      &lt;/tr&gt;
      &lt;tr&gt;
          &lt;td&gt;↑ Stereo dual-eye RGB head cam&lt;/td&gt;
          &lt;td&gt;+$30&lt;/td&gt;
          &lt;td&gt;+€30&lt;/td&gt;
          &lt;td&gt;+¥199&lt;/td&gt;
      &lt;/tr&gt;
      &lt;tr&gt;
          &lt;td&gt;+ RasberryPi&lt;/td&gt;
          &lt;td&gt;+$79&lt;/td&gt;
          &lt;td&gt;+€79&lt;/td&gt;
          &lt;td&gt;+¥399&lt;/td&gt;
      &lt;/tr&gt;
      &lt;tr&gt;
          &lt;td&gt;↑ RealSense RGBD head cam&lt;/td&gt;
          &lt;td&gt;+$220&lt;/td&gt;
          &lt;td&gt;+€230&lt;/td&gt;
          &lt;td&gt;+¥1499&lt;/td&gt;
      &lt;/tr&gt;
  &lt;/tbody&gt;
&lt;/table&gt;
&lt;hr&gt;
&lt;h2 id=&#34;-get-started-&#34;&gt;🚀 Get Started 🚀
&lt;/h2&gt;&lt;blockquote&gt;
&lt;p&gt;[!NOTE]
If you are totally new to programming, please spend at least a day to get yourself familiar with basic Python, Ubuntu and Github (with the help of Google and AI). At least you should know how to setup ubuntu system, git clone, pip install, use intepreters (VS Code, Cursor, Pycharm, etc.) and directly run commands in the terminals.&lt;/p&gt;
&lt;/blockquote&gt;
&lt;ol&gt;
&lt;li&gt;💵 &lt;strong&gt;Buy your parts&lt;/strong&gt;: &lt;a class=&#34;link&#34; href=&#34;https://xlerobot.readthedocs.io/en/latest/hardware/getting_started/material.html&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Bill of Materials&lt;/a&gt;&lt;/li&gt;
&lt;li&gt;🖨️ &lt;strong&gt;Print your stuff&lt;/strong&gt;: &lt;a class=&#34;link&#34; href=&#34;https://xlerobot.readthedocs.io/en/latest/hardware/getting_started/3d.html&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;3D printing&lt;/a&gt;&lt;/li&gt;
&lt;li&gt;🔨 &lt;del&gt;Avengers&lt;/del&gt;: &lt;a class=&#34;link&#34; href=&#34;https://xlerobot.readthedocs.io/en/latest/hardware/getting_started/assemble.html&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;&lt;strong&gt;Assemble&lt;/strong&gt;!&lt;/a&gt;&lt;/li&gt;
&lt;li&gt;💻 &lt;strong&gt;Software&lt;/strong&gt;: &lt;a class=&#34;link&#34; href=&#34;https://xlerobot.readthedocs.io/en/latest/software/index.html&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Get your robot moving!&lt;/a&gt;&lt;/li&gt;
&lt;/ol&gt;
&lt;hr&gt;
&lt;h2 id=&#34;contribute&#34;&gt;Contribute
&lt;/h2&gt;&lt;p&gt;&lt;strong&gt;👋 Want to contribute to XLeRobot?&lt;/strong&gt;
Please refer to &lt;a class=&#34;link&#34; href=&#34;CONTRIBUTING.md&#34; &gt;CONTRIBUTING.md&lt;/a&gt; for guidance on how to get involved!&lt;/p&gt;
&lt;p&gt;&lt;strong&gt;Main Contributors&lt;/strong&gt;&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;https://vector-wangel.github.io/&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Gaotian/Vector Wang&lt;/a&gt;&lt;/li&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;https://lzhuoyi.github.io/Zhuoyi_Lu.github.io/&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Zhuoyi Lu&lt;/a&gt;: RL sim2real deploy, teleop on real robot (Xbox, VR, Joycon)&lt;/li&gt;
&lt;li&gt;Nicole Yue: Documentation website setup&lt;/li&gt;
&lt;li&gt;Yuesong Wang: Mujoco simulation&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt;This is just a small brick in the pyramid, made possible by &lt;a class=&#34;link&#34; href=&#34;https://github.com/huggingface/lerobot&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;LeRobot&lt;/a&gt;, &lt;a class=&#34;link&#34; href=&#34;https://github.com/TheRobotStudio/SO-ARM100&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;SO-100&lt;/a&gt;, &lt;a class=&#34;link&#34; href=&#34;https://github.com/SIGRobotics-UIUC/LeKiwi&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Lekiwi&lt;/a&gt;, and &lt;a class=&#34;link&#34; href=&#34;https://github.com/timqian/bambot&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Bambot&lt;/a&gt;. Thanks to all the talented contributors behind these detailed and professional projects.&lt;/p&gt;
&lt;p&gt;Looking forward to collaborating with anyone interested in contributing to this project!&lt;/p&gt;
&lt;h2 id=&#34;about-me&#34;&gt;About me
&lt;/h2&gt;&lt;p&gt;&lt;a class=&#34;link&#34; href=&#34;https://vector-wangel.github.io/&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Gaotian/Vector Wang&lt;/a&gt;&lt;/p&gt;
&lt;p&gt;I am a CS graduate student at Rice University &lt;a class=&#34;link&#34; href=&#34;https://robotpilab.github.io/&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;RobotPi Lab&lt;/a&gt;, focusing on robust object manipulation, where we propse virtual cages and funnels and physics-aware world models to close the Sim2real gap and achieve robust manipulation under uncertainties. One of my papers, Caging in Time, has recently been accepted by International Journal of Robotics Research (IJRR).&lt;/p&gt;
&lt;p&gt;I built XLeRobot as a personal hobby to instantiate my research theory, also to provide a low-cost platform for people who are interested in robotics and embodied AI to work with.&lt;/p&gt;
&lt;h2 id=&#34;star-history-chart&#34;&gt;&lt;a class=&#34;link&#34; href=&#34;https://star-history.com/#Vector-Wangel/XLeRobot&amp;amp;Timeline&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;&lt;img src=&#34;https://api.star-history.com/svg?repos=Vector-Wangel/XLeRobot&amp;amp;type=Timeline&#34;
	
	
	
	loading=&#34;lazy&#34;
	
		alt=&#34;Star History Chart&#34;
	
	
&gt;&lt;/a&gt;
&lt;/h2&gt;&lt;h2 id=&#34;citation&#34;&gt;Citation
&lt;/h2&gt;&lt;p&gt;If you want, you can cite this work with:&lt;/p&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;div class=&#34;chroma&#34;&gt;
&lt;table class=&#34;lntable&#34;&gt;&lt;tr&gt;&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code&gt;&lt;span class=&#34;lnt&#34;&gt;1
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;2
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;3
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;4
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;5
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;6
&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;
&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-bibtex&#34; data-lang=&#34;bibtex&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;nc&#34;&gt;@misc&lt;/span&gt;&lt;span class=&#34;p&#34;&gt;{&lt;/span&gt;&lt;span class=&#34;nl&#34;&gt;wang2025xlerobot&lt;/span&gt;&lt;span class=&#34;p&#34;&gt;,&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;    &lt;span class=&#34;na&#34;&gt;author&lt;/span&gt; &lt;span class=&#34;p&#34;&gt;=&lt;/span&gt; &lt;span class=&#34;s&#34;&gt;{Wang, Gaotian and Lu, Zhuoyi}&lt;/span&gt;&lt;span class=&#34;p&#34;&gt;,&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;    &lt;span class=&#34;na&#34;&gt;title&lt;/span&gt; &lt;span class=&#34;p&#34;&gt;=&lt;/span&gt; &lt;span class=&#34;s&#34;&gt;{XLeRobot: A Practical Low-cost Household Dual-Arm Mobile Robot Design for General Manipulation}&lt;/span&gt;&lt;span class=&#34;p&#34;&gt;,&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;    &lt;span class=&#34;na&#34;&gt;howpublished&lt;/span&gt; &lt;span class=&#34;p&#34;&gt;=&lt;/span&gt; &lt;span class=&#34;s&#34;&gt;&amp;#34;\url{https://github.com/Vector-Wangel/XLeRobot}&amp;#34;&lt;/span&gt;&lt;span class=&#34;p&#34;&gt;,&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;    &lt;span class=&#34;na&#34;&gt;year&lt;/span&gt; &lt;span class=&#34;p&#34;&gt;=&lt;/span&gt; &lt;span class=&#34;s&#34;&gt;{2025}&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;p&#34;&gt;}&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;&lt;/tr&gt;&lt;/table&gt;
&lt;/div&gt;
&lt;/div&gt;&lt;p&gt;&amp;mdash;&lt;img src=&#34;https://github.com/user-attachments/assets/682ef049-bb42-4b50-bf98-74d6311e774d&#34;
	
	
	
	loading=&#34;lazy&#34;
	
		alt=&#34;Generated Image August 27, 2025 - 4_58PM&#34;
	
	
&gt;&lt;/p&gt;
&lt;h2 id=&#34;-disclaimer-&#34;&gt;🪧 Disclaimer 🪧
&lt;/h2&gt;&lt;blockquote&gt;
&lt;p&gt;[!NOTE]
If you build, buy, or develop a XLeRobot based on this repo, you will be fully responsible for all the physical and mental damages it does to you or others.&lt;/p&gt;
&lt;/blockquote&gt;
</description>
        </item>
        <item>
        <title>Genesis</title>
        <link>https://producthunt.programnotes.cn/en/p/genesis/</link>
        <pubDate>Tue, 29 Jul 2025 15:36:08 +0800</pubDate>
        
        <guid>https://producthunt.programnotes.cn/en/p/genesis/</guid>
        <description>&lt;img src="https://images.unsplash.com/photo-1666362132684-607fd1f81c6e?ixid=M3w0NjAwMjJ8MHwxfHJhbmRvbXx8fHx8fHx8fDE3NTM3NzQ1NTF8&amp;ixlib=rb-4.1.0" alt="Featured image of post Genesis" /&gt;&lt;h1 id=&#34;genesis-embodied-aigenesis&#34;&gt;&lt;a class=&#34;link&#34; href=&#34;https://github.com/Genesis-Embodied-AI/Genesis&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Genesis-Embodied-AI/Genesis&lt;/a&gt;
&lt;/h1&gt;&lt;p&gt;&lt;img src=&#34;https://producthunt.programnotes.cn/imgs/big_text.png&#34;
	
	
	
	loading=&#34;lazy&#34;
	
		alt=&#34;Genesis&#34;
	
	
&gt;&lt;/p&gt;
&lt;p&gt;&lt;img src=&#34;https://producthunt.programnotes.cn/imgs/teaser.png&#34;
	
	
	
	loading=&#34;lazy&#34;
	
		alt=&#34;Teaser&#34;
	
	
&gt;&lt;/p&gt;
&lt;p&gt;&lt;a class=&#34;link&#34; href=&#34;https://pypi.org/project/genesis-world/&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;&lt;img src=&#34;https://img.shields.io/pypi/v/genesis-world&#34;
	
	
	
	loading=&#34;lazy&#34;
	
		alt=&#34;PyPI - Version&#34;
	
	
&gt;&lt;/a&gt;
&lt;a class=&#34;link&#34; href=&#34;https://pepy.tech/projects/genesis-world&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;&lt;img src=&#34;https://static.pepy.tech/badge/genesis-world&#34;
	
	
	
	loading=&#34;lazy&#34;
	
		alt=&#34;PyPI Downloads&#34;
	
	
&gt;&lt;/a&gt;
&lt;a class=&#34;link&#34; href=&#34;https://github.com/Genesis-Embodied-AI/Genesis/issues&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;&lt;img src=&#34;https://img.shields.io/github/issues/Genesis-Embodied-AI/Genesis&#34;
	
	
	
	loading=&#34;lazy&#34;
	
		alt=&#34;GitHub Issues&#34;
	
	
&gt;&lt;/a&gt;
&lt;a class=&#34;link&#34; href=&#34;https://github.com/Genesis-Embodied-AI/Genesis/discussions&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;&lt;img src=&#34;https://img.shields.io/github/discussions/Genesis-Embodied-AI/Genesis&#34;
	
	
	
	loading=&#34;lazy&#34;
	
		alt=&#34;GitHub Discussions&#34;
	
	
&gt;&lt;/a&gt;
&lt;a class=&#34;link&#34; href=&#34;https://discord.gg/nukCuhB47p&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;&lt;img src=&#34;https://img.shields.io/discord/1322086972302430269?logo=discord&#34;
	
	
	
	loading=&#34;lazy&#34;
	
		alt=&#34;Discord&#34;
	
	
&gt;&lt;/a&gt;
&lt;a href=&#34;https://drive.google.com/uc?export=view&amp;id=1ZS9nnbQ-t1IwkzJlENBYqYIIOOZhXuBZ&#34;&gt;&lt;img src=&#34;https://img.shields.io/badge/WeChat-07C160?style=for-the-badge&amp;logo=wechat&amp;logoColor=white&#34; height=&#34;20&#34; style=&#34;display:inline&#34;&gt;&lt;/a&gt;&lt;/p&gt;
&lt;p&gt;[
[
[
[
[&lt;/p&gt;
&lt;h1 id=&#34;genesis&#34;&gt;Genesis
&lt;/h1&gt;&lt;h2 id=&#34;-news&#34;&gt;🔥 News
&lt;/h2&gt;&lt;ul&gt;
&lt;li&gt;[2025-07-02] The development of Genesis is now officially supported by &lt;a class=&#34;link&#34; href=&#34;https://genesis-ai.company/&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Genesis AI&lt;/a&gt;.&lt;/li&gt;
&lt;li&gt;[2025-01-09] We released a &lt;a class=&#34;link&#34; href=&#34;https://github.com/zhouxian/genesis-speed-benchmark&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;detailed performance benchmarking and comparison report&lt;/a&gt; on Genesis, together with all the test scripts.&lt;/li&gt;
&lt;li&gt;[2025-01-08] Released v0.2.1 🎊 🎉&lt;/li&gt;
&lt;li&gt;[2025-01-08] Created &lt;a class=&#34;link&#34; href=&#34;https://discord.gg/nukCuhB47p&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Discord&lt;/a&gt; and &lt;a class=&#34;link&#34; href=&#34;https://drive.google.com/uc?export=view&amp;amp;id=1ZS9nnbQ-t1IwkzJlENBYqYIIOOZhXuBZ&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Wechat&lt;/a&gt; group.&lt;/li&gt;
&lt;li&gt;[2024-12-25] Added a &lt;a class=&#34;link&#34; href=&#34;#docker&#34; &gt;docker&lt;/a&gt; including support for the ray-tracing renderer&lt;/li&gt;
&lt;li&gt;[2024-12-24] Added guidelines for &lt;a class=&#34;link&#34; href=&#34;https://github.com/Genesis-Embodied-AI/Genesis/blob/main/.github/CONTRIBUTING.md&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;contributing to Genesis&lt;/a&gt;&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;table-of-contents&#34;&gt;Table of Contents
&lt;/h2&gt;&lt;ol&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;#what-is-genesis&#34; &gt;What is Genesis?&lt;/a&gt;&lt;/li&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;#key-features&#34; &gt;Key Features&lt;/a&gt;&lt;/li&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;#quick-installation&#34; &gt;Quick Installation&lt;/a&gt;&lt;/li&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;#docker&#34; &gt;Docker&lt;/a&gt;&lt;/li&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;#documentation&#34; &gt;Documentation&lt;/a&gt;&lt;/li&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;#contributing-to-genesis&#34; &gt;Contributing to Genesis&lt;/a&gt;&lt;/li&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;#support&#34; &gt;Support&lt;/a&gt;&lt;/li&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;#license-and-acknowledgments&#34; &gt;License and Acknowledgments&lt;/a&gt;&lt;/li&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;#associated-papers&#34; &gt;Associated Papers&lt;/a&gt;&lt;/li&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;#citation&#34; &gt;Citation&lt;/a&gt;&lt;/li&gt;
&lt;/ol&gt;
&lt;h2 id=&#34;what-is-genesis&#34;&gt;What is Genesis?
&lt;/h2&gt;&lt;p&gt;Genesis is a physics platform designed for general-purpose &lt;em&gt;Robotics/Embodied AI/Physical AI&lt;/em&gt; applications. It is simultaneously multiple things:&lt;/p&gt;
&lt;ol&gt;
&lt;li&gt;A &lt;strong&gt;universal physics engine&lt;/strong&gt; re-built from the ground up, capable of simulating a wide range of materials and physical phenomena.&lt;/li&gt;
&lt;li&gt;A &lt;strong&gt;lightweight&lt;/strong&gt;, &lt;strong&gt;ultra-fast&lt;/strong&gt;, &lt;strong&gt;pythonic&lt;/strong&gt;, and &lt;strong&gt;user-friendly&lt;/strong&gt; robotics simulation platform.&lt;/li&gt;
&lt;li&gt;A powerful and fast &lt;strong&gt;photo-realistic rendering system&lt;/strong&gt;.&lt;/li&gt;
&lt;li&gt;A &lt;strong&gt;generative data engine&lt;/strong&gt; that transforms user-prompted natural language description into various modalities of data.&lt;/li&gt;
&lt;/ol&gt;
&lt;p&gt;Powered by a universal physics engine re-designed and re-built from the ground up, Genesis integrates various physics solvers and their coupling into a unified framework. This core physics engine is further enhanced by a generative agent framework that operates at an upper level, aiming towards fully automated data generation for robotics and beyond.&lt;/p&gt;
&lt;p&gt;&lt;strong&gt;Note&lt;/strong&gt;: Currently, we are open-sourcing the &lt;em&gt;underlying physics engine&lt;/em&gt; and the &lt;em&gt;simulation platform&lt;/em&gt;. Our &lt;em&gt;generative framework&lt;/em&gt; is a modular system that incorporates many different generative modules, each handling a certain range of data modalities, routed by a high level agent. Some of the modules integrated existing papers and some are still under submission. Access to our generative feature will be gradually rolled out in the near future. If you are interested, feel free to explore more in the &lt;a class=&#34;link&#34; href=&#34;#associated-papers&#34; &gt;paper list&lt;/a&gt; below.&lt;/p&gt;
&lt;p&gt;Genesis aims to:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Lower the barrier&lt;/strong&gt; to using physics simulations, making robotics research accessible to everyone. See our &lt;a class=&#34;link&#34; href=&#34;https://genesis-world.readthedocs.io/en/latest/user_guide/overview/mission.html&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;mission statement&lt;/a&gt;.&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Unify diverse physics solvers&lt;/strong&gt; into a single framework to recreate the physical world with the highest fidelity.&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Automate data generation&lt;/strong&gt;, reducing human effort and letting the data flywheel spin on its own.&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt;Project Page: &lt;a class=&#34;link&#34; href=&#34;https://genesis-embodied-ai.github.io/&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;https://genesis-embodied-ai.github.io/&lt;/a&gt;&lt;/p&gt;
&lt;h2 id=&#34;key-features&#34;&gt;Key Features
&lt;/h2&gt;&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Speed&lt;/strong&gt;: Over 43 million FPS when simulating a Franka robotic arm with a single RTX 4090 (430,000 times faster than real-time).&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Cross-platform&lt;/strong&gt;: Runs on Linux, macOS, Windows, and supports multiple compute backends (CPU, Nvidia/AMD GPUs, Apple Metal).&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Integration of diverse physics solvers&lt;/strong&gt;: Rigid body, MPM, SPH, FEM, PBD, Stable Fluid.&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Wide range of material models&lt;/strong&gt;: Simulation and coupling of rigid bodies, liquids, gases, deformable objects, thin-shell objects, and granular materials.&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Compatibility with various robots&lt;/strong&gt;: Robotic arms, legged robots, drones, &lt;em&gt;soft robots&lt;/em&gt;, and support for loading &lt;code&gt;MJCF (.xml)&lt;/code&gt;, &lt;code&gt;URDF&lt;/code&gt;, &lt;code&gt;.obj&lt;/code&gt;, &lt;code&gt;.glb&lt;/code&gt;, &lt;code&gt;.ply&lt;/code&gt;, &lt;code&gt;.stl&lt;/code&gt;, and more.&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Photo-realistic rendering&lt;/strong&gt;: Native ray-tracing-based rendering.&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Differentiability&lt;/strong&gt;: Genesis is designed to be fully differentiable. Currently, our MPM solver and Tool Solver support differentiability, with other solvers planned for future versions (starting with rigid &amp;amp; articulated body solver).&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Physics-based tactile simulation&lt;/strong&gt;: Differentiable &lt;a class=&#34;link&#34; href=&#34;https://github.com/Genesis-Embodied-AI/DiffTactile&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;tactile sensor simulation&lt;/a&gt; coming soon (expected in version 0.3.0).&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;User-friendliness&lt;/strong&gt;: Designed for simplicity, with intuitive installation and APIs.&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;quick-installation&#34;&gt;Quick Installation
&lt;/h2&gt;&lt;p&gt;Install &lt;strong&gt;PyTorch&lt;/strong&gt; first following the &lt;a class=&#34;link&#34; href=&#34;https://pytorch.org/get-started/locally/&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;official instructions&lt;/a&gt;.&lt;/p&gt;
&lt;p&gt;Then, install Genesis via PyPI:&lt;/p&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;div class=&#34;chroma&#34;&gt;
&lt;table class=&#34;lntable&#34;&gt;&lt;tr&gt;&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code&gt;&lt;span class=&#34;lnt&#34;&gt;1
&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;
&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-bash&#34; data-lang=&#34;bash&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;pip install genesis-world  &lt;span class=&#34;c1&#34;&gt;# Requires Python&amp;gt;=3.10,&amp;lt;3.13;&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;&lt;/tr&gt;&lt;/table&gt;
&lt;/div&gt;
&lt;/div&gt;&lt;p&gt;For the latest version to date, make sure that &lt;code&gt;pip&lt;/code&gt; is up-to-date via &lt;code&gt;pip install --upgrade pip&lt;/code&gt;, then run command:&lt;/p&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;div class=&#34;chroma&#34;&gt;
&lt;table class=&#34;lntable&#34;&gt;&lt;tr&gt;&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code&gt;&lt;span class=&#34;lnt&#34;&gt;1
&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;
&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-bash&#34; data-lang=&#34;bash&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;pip install git+https://github.com/Genesis-Embodied-AI/Genesis.git
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;&lt;/tr&gt;&lt;/table&gt;
&lt;/div&gt;
&lt;/div&gt;&lt;p&gt;Note that the package must still be updated manually to sync with main branch.&lt;/p&gt;
&lt;p&gt;Users seeking to edit the source code of Genesis are encourage to install Genesis in editable mode. First, make sure that &lt;code&gt;genesis-world&lt;/code&gt; has been uninstalled, then clone the repository and install locally:&lt;/p&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;div class=&#34;chroma&#34;&gt;
&lt;table class=&#34;lntable&#34;&gt;&lt;tr&gt;&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code&gt;&lt;span class=&#34;lnt&#34;&gt;1
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;2
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;3
&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;
&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-bash&#34; data-lang=&#34;bash&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;git clone https://github.com/Genesis-Embodied-AI/Genesis.git
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;nb&#34;&gt;cd&lt;/span&gt; Genesis
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;pip install -e &lt;span class=&#34;s2&#34;&gt;&amp;#34;.[dev]&amp;#34;&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;&lt;/tr&gt;&lt;/table&gt;
&lt;/div&gt;
&lt;/div&gt;&lt;h2 id=&#34;docker&#34;&gt;Docker
&lt;/h2&gt;&lt;p&gt;If you want to use Genesis from Docker, you can first build the Docker image as:&lt;/p&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;div class=&#34;chroma&#34;&gt;
&lt;table class=&#34;lntable&#34;&gt;&lt;tr&gt;&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code&gt;&lt;span class=&#34;lnt&#34;&gt;1
&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;
&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-bash&#34; data-lang=&#34;bash&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;docker build -t genesis -f docker/Dockerfile docker
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;&lt;/tr&gt;&lt;/table&gt;
&lt;/div&gt;
&lt;/div&gt;&lt;p&gt;Then you can run the examples inside the docker image (mounted to &lt;code&gt;/workspace/examples&lt;/code&gt;):&lt;/p&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;div class=&#34;chroma&#34;&gt;
&lt;table class=&#34;lntable&#34;&gt;&lt;tr&gt;&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code&gt;&lt;span class=&#34;lnt&#34;&gt;1
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;2
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;3
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;4
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;5
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;6
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;7
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;8
&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;
&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-bash&#34; data-lang=&#34;bash&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;xhost +local:root &lt;span class=&#34;c1&#34;&gt;# Allow the container to access the display&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;docker run --gpus all --rm -it &lt;span class=&#34;se&#34;&gt;\
&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;-e &lt;span class=&#34;nv&#34;&gt;DISPLAY&lt;/span&gt;&lt;span class=&#34;o&#34;&gt;=&lt;/span&gt;&lt;span class=&#34;nv&#34;&gt;$DISPLAY&lt;/span&gt; &lt;span class=&#34;se&#34;&gt;\
&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;-v /dev/dri:/dev/dri &lt;span class=&#34;se&#34;&gt;\
&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;-v /tmp/.X11-unix/:/tmp/.X11-unix &lt;span class=&#34;se&#34;&gt;\
&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;-v &lt;span class=&#34;nv&#34;&gt;$PWD&lt;/span&gt;:/workspace &lt;span class=&#34;se&#34;&gt;\
&lt;/span&gt;&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;genesis
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;&lt;/tr&gt;&lt;/table&gt;
&lt;/div&gt;
&lt;/div&gt;&lt;h3 id=&#34;amd-users&#34;&gt;AMD users
&lt;/h3&gt;&lt;p&gt;AMD users can use Genesis using the &lt;code&gt;docker/Dockerfile.amdgpu&lt;/code&gt; file, which is built by running:&lt;/p&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;div class=&#34;chroma&#34;&gt;
&lt;table class=&#34;lntable&#34;&gt;&lt;tr&gt;&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code&gt;&lt;span class=&#34;lnt&#34;&gt;1
&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;
&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-fallback&#34; data-lang=&#34;fallback&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;docker build -t genesis-amd -f docker/Dockerfile.amdgpu docker
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;&lt;/tr&gt;&lt;/table&gt;
&lt;/div&gt;
&lt;/div&gt;&lt;p&gt;and can then be used by running:&lt;/p&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;div class=&#34;chroma&#34;&gt;
&lt;table class=&#34;lntable&#34;&gt;&lt;tr&gt;&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code&gt;&lt;span class=&#34;lnt&#34;&gt; 1
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt; 2
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt; 3
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt; 4
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt; 5
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt; 6
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt; 7
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt; 8
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt; 9
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;10
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;11
&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;
&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-fallback&#34; data-lang=&#34;fallback&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;docker run -it --network=host \
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt; --device=/dev/kfd \
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt; --device=/dev/dri \
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt; --group-add=video \
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt; --ipc=host \
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt; --cap-add=SYS_PTRACE \
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt; --security-opt seccomp=unconfined \
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt; --shm-size 8G \
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt; -v $PWD:/workspace \
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt; -e DISPLAY=$DISPLAY \
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt; genesis-amd
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;&lt;/tr&gt;&lt;/table&gt;
&lt;/div&gt;
&lt;/div&gt;&lt;p&gt;The examples will be accessible from &lt;code&gt;/workspace/examples&lt;/code&gt;. Note: AMD users should use the vulkan backend. This means you will need to call &lt;code&gt;gs.init(vulkan)&lt;/code&gt; to initialise Genesis.&lt;/p&gt;
&lt;h2 id=&#34;documentation&#34;&gt;Documentation
&lt;/h2&gt;&lt;p&gt;Comprehensive documentation is available in &lt;a class=&#34;link&#34; href=&#34;https://genesis-world.readthedocs.io/en/latest/user_guide/index.html&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;English&lt;/a&gt;, &lt;a class=&#34;link&#34; href=&#34;https://genesis-world.readthedocs.io/zh-cn/latest/user_guide/index.html&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Chinese&lt;/a&gt;, and &lt;a class=&#34;link&#34; href=&#34;https://genesis-world.readthedocs.io/ja/latest/user_guide/index.html&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Japanese&lt;/a&gt;. This includes detailed installation steps, tutorials, and API references.&lt;/p&gt;
&lt;h2 id=&#34;contributing-to-genesis&#34;&gt;Contributing to Genesis
&lt;/h2&gt;&lt;p&gt;The Genesis project is an open and collaborative effort. We welcome all forms of contributions from the community, including:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Pull requests&lt;/strong&gt; for new features or bug fixes.&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Bug reports&lt;/strong&gt; through GitHub Issues.&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Suggestions&lt;/strong&gt; to improve Genesis&amp;rsquo;s usability.&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt;Refer to our &lt;a class=&#34;link&#34; href=&#34;https://github.com/Genesis-Embodied-AI/Genesis/blob/main/.github/CONTRIBUTING.md&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;contribution guide&lt;/a&gt; for more details.&lt;/p&gt;
&lt;h2 id=&#34;support&#34;&gt;Support
&lt;/h2&gt;&lt;ul&gt;
&lt;li&gt;Report bugs or request features via GitHub &lt;a class=&#34;link&#34; href=&#34;https://github.com/Genesis-Embodied-AI/Genesis/issues&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Issues&lt;/a&gt;.&lt;/li&gt;
&lt;li&gt;Join discussions or ask questions on GitHub &lt;a class=&#34;link&#34; href=&#34;https://github.com/Genesis-Embodied-AI/Genesis/discussions&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Discussions&lt;/a&gt;.&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;license-and-acknowledgments&#34;&gt;License and Acknowledgments
&lt;/h2&gt;&lt;p&gt;The Genesis source code is licensed under Apache 2.0.&lt;/p&gt;
&lt;p&gt;Genesis&amp;rsquo;s development has been made possible thanks to these open-source projects:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;https://github.com/taichi-dev/taichi&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Taichi&lt;/a&gt;: High-performance cross-platform compute backend. Kudos to the Taichi team for their technical support!&lt;/li&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;https://github.com/zhouxian/FluidLab&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;FluidLab&lt;/a&gt;: Reference MPM solver implementation.&lt;/li&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;https://github.com/erizmr/SPH_Taichi&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;SPH_Taichi&lt;/a&gt;: Reference SPH solver implementation.&lt;/li&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;https://matthias-research.github.io/pages/tenMinutePhysics/index.html&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;Ten Minute Physics&lt;/a&gt; and &lt;a class=&#34;link&#34; href=&#34;https://github.com/WASD4959/PBF3D&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;PBF3D&lt;/a&gt;: Reference PBD solver implementations.&lt;/li&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;https://github.com/google-deepmind/mujoco&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;MuJoCo&lt;/a&gt;: Reference for rigid body dynamics.&lt;/li&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;https://github.com/danfis/libccd&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;libccd&lt;/a&gt;: Reference for collision detection.&lt;/li&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;https://github.com/mmatl/pyrender&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;PyRender&lt;/a&gt;: Rasterization-based renderer.&lt;/li&gt;
&lt;li&gt;&lt;a class=&#34;link&#34; href=&#34;https://github.com/LuisaGroup/LuisaCompute&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;LuisaCompute&lt;/a&gt; and &lt;a class=&#34;link&#34; href=&#34;https://github.com/LuisaGroup/LuisaRender&#34;  target=&#34;_blank&#34; rel=&#34;noopener&#34;
    &gt;LuisaRender&lt;/a&gt;: Ray-tracing DSL.&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;associated-papers&#34;&gt;Associated Papers
&lt;/h2&gt;&lt;p&gt;Genesis is a large scale effort that integrates state-of-the-art technologies of various existing and on-going research work into a single system. Here we include a non-exhaustive list of all the papers that contributed to the Genesis project in one way or another:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;Xian, Zhou, et al. &amp;ldquo;Fluidlab: A differentiable environment for benchmarking complex fluid manipulation.&amp;rdquo; arXiv preprint arXiv:2303.02346 (2023).&lt;/li&gt;
&lt;li&gt;Xu, Zhenjia, et al. &amp;ldquo;Roboninja: Learning an adaptive cutting policy for multi-material objects.&amp;rdquo; arXiv preprint arXiv:2302.11553 (2023).&lt;/li&gt;
&lt;li&gt;Wang, Yufei, et al. &amp;ldquo;Robogen: Towards unleashing infinite data for automated robot learning via generative simulation.&amp;rdquo; arXiv preprint arXiv:2311.01455 (2023).&lt;/li&gt;
&lt;li&gt;Wang, Tsun-Hsuan, et al. &amp;ldquo;Softzoo: A soft robot co-design benchmark for locomotion in diverse environments.&amp;rdquo; arXiv preprint arXiv:2303.09555 (2023).&lt;/li&gt;
&lt;li&gt;Wang, Tsun-Hsuan Johnson, et al. &amp;ldquo;Diffusebot: Breeding soft robots with physics-augmented generative diffusion models.&amp;rdquo; Advances in Neural Information Processing Systems 36 (2023): 44398-44423.&lt;/li&gt;
&lt;li&gt;Katara, Pushkal, Zhou Xian, and Katerina Fragkiadaki. &amp;ldquo;Gen2sim: Scaling up robot learning in simulation with generative models.&amp;rdquo; 2024 IEEE International Conference on Robotics and Automation (ICRA). IEEE, 2024.&lt;/li&gt;
&lt;li&gt;Si, Zilin, et al. &amp;ldquo;DiffTactile: A Physics-based Differentiable Tactile Simulator for Contact-rich Robotic Manipulation.&amp;rdquo; arXiv preprint arXiv:2403.08716 (2024).&lt;/li&gt;
&lt;li&gt;Wang, Yian, et al. &amp;ldquo;Thin-Shell Object Manipulations With Differentiable Physics Simulations.&amp;rdquo; arXiv preprint arXiv:2404.00451 (2024).&lt;/li&gt;
&lt;li&gt;Lin, Chunru, et al. &amp;ldquo;UBSoft: A Simulation Platform for Robotic Skill Learning in Unbounded Soft Environments.&amp;rdquo; arXiv preprint arXiv:2411.12711 (2024).&lt;/li&gt;
&lt;li&gt;Zhou, Wenyang, et al. &amp;ldquo;EMDM: Efficient motion diffusion model for fast and high-quality motion generation.&amp;rdquo; European Conference on Computer Vision. Springer, Cham, 2025.&lt;/li&gt;
&lt;li&gt;Qiao, Yi-Ling, Junbang Liang, Vladlen Koltun, and Ming C. Lin. &amp;ldquo;Scalable differentiable physics for learning and control.&amp;rdquo; International Conference on Machine Learning. PMLR, 2020.&lt;/li&gt;
&lt;li&gt;Qiao, Yi-Ling, Junbang Liang, Vladlen Koltun, and Ming C. Lin. &amp;ldquo;Efficient differentiable simulation of articulated bodies.&amp;rdquo; In International Conference on Machine Learning, PMLR, 2021.&lt;/li&gt;
&lt;li&gt;Qiao, Yi-Ling, Junbang Liang, Vladlen Koltun, and Ming Lin. &amp;ldquo;Differentiable simulation of soft multi-body systems.&amp;rdquo; Advances in Neural Information Processing Systems 34 (2021).&lt;/li&gt;
&lt;li&gt;Wan, Weilin, et al. &amp;ldquo;Tlcontrol: Trajectory and language control for human motion synthesis.&amp;rdquo; arXiv preprint arXiv:2311.17135 (2023).&lt;/li&gt;
&lt;li&gt;Wang, Yian, et al. &amp;ldquo;Architect: Generating Vivid and Interactive 3D Scenes with Hierarchical 2D Inpainting.&amp;rdquo; arXiv preprint arXiv:2411.09823 (2024).&lt;/li&gt;
&lt;li&gt;Zheng, Shaokun, et al. &amp;ldquo;LuisaRender: A high-performance rendering framework with layered and unified interfaces on stream architectures.&amp;rdquo; ACM Transactions on Graphics (TOG) 41.6 (2022): 1-19.&lt;/li&gt;
&lt;li&gt;Fan, Yingruo, et al. &amp;ldquo;Faceformer: Speech-driven 3d facial animation with transformers.&amp;rdquo; Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition. 2022.&lt;/li&gt;
&lt;li&gt;Wu, Sichun, Kazi Injamamul Haque, and Zerrin Yumak. &amp;ldquo;ProbTalk3D: Non-Deterministic Emotion Controllable Speech-Driven 3D Facial Animation Synthesis Using VQ-VAE.&amp;rdquo; Proceedings of the 17th ACM SIGGRAPH Conference on Motion, Interaction, and Games. 2024.&lt;/li&gt;
&lt;li&gt;Dou, Zhiyang, et al. &amp;ldquo;C· ase: Learning conditional adversarial skill embeddings for physics-based characters.&amp;rdquo; SIGGRAPH Asia 2023 Conference Papers. 2023.&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt;&amp;hellip; and many more on-going work.&lt;/p&gt;
&lt;h2 id=&#34;citation&#34;&gt;Citation
&lt;/h2&gt;&lt;p&gt;If you use Genesis in your research, please consider citing:&lt;/p&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;div class=&#34;chroma&#34;&gt;
&lt;table class=&#34;lntable&#34;&gt;&lt;tr&gt;&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code&gt;&lt;span class=&#34;lnt&#34;&gt;1
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;2
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;3
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;4
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;5
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;6
&lt;/span&gt;&lt;span class=&#34;lnt&#34;&gt;7
&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;
&lt;td class=&#34;lntd&#34;&gt;
&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-bibtex&#34; data-lang=&#34;bibtex&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;nc&#34;&gt;@misc&lt;/span&gt;&lt;span class=&#34;p&#34;&gt;{&lt;/span&gt;&lt;span class=&#34;nl&#34;&gt;Genesis&lt;/span&gt;&lt;span class=&#34;p&#34;&gt;,&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;  &lt;span class=&#34;na&#34;&gt;author&lt;/span&gt; &lt;span class=&#34;p&#34;&gt;=&lt;/span&gt; &lt;span class=&#34;s&#34;&gt;{Genesis Authors}&lt;/span&gt;&lt;span class=&#34;p&#34;&gt;,&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;  &lt;span class=&#34;na&#34;&gt;title&lt;/span&gt; &lt;span class=&#34;p&#34;&gt;=&lt;/span&gt; &lt;span class=&#34;s&#34;&gt;{Genesis: A Generative and Universal Physics Engine for Robotics and Beyond}&lt;/span&gt;&lt;span class=&#34;p&#34;&gt;,&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;  &lt;span class=&#34;na&#34;&gt;month&lt;/span&gt; &lt;span class=&#34;p&#34;&gt;=&lt;/span&gt; &lt;span class=&#34;s&#34;&gt;{December}&lt;/span&gt;&lt;span class=&#34;p&#34;&gt;,&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;  &lt;span class=&#34;na&#34;&gt;year&lt;/span&gt; &lt;span class=&#34;p&#34;&gt;=&lt;/span&gt; &lt;span class=&#34;s&#34;&gt;{2024}&lt;/span&gt;&lt;span class=&#34;p&#34;&gt;,&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;  &lt;span class=&#34;na&#34;&gt;url&lt;/span&gt; &lt;span class=&#34;p&#34;&gt;=&lt;/span&gt; &lt;span class=&#34;s&#34;&gt;{https://github.com/Genesis-Embodied-AI/Genesis}&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;p&#34;&gt;}&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/td&gt;&lt;/tr&gt;&lt;/table&gt;
&lt;/div&gt;
&lt;/div&gt;</description>
        </item>
        
    </channel>
</rss>
